2 new changesets in galaxy-central: http://bitbucket.org/galaxy/galaxy-central/changeset/ce8cc8feb6e0/ changeset: ce8cc8feb6e0 user: dannon date: 2011-07-19 19:40:24 summary: Code cleanup. affected #: 1 file (886 bytes) --- a/lib/galaxy/web/controllers/history.py Tue Jul 19 11:27:16 2011 -0400 +++ b/lib/galaxy/web/controllers/history.py Tue Jul 19 13:40:24 2011 -0400 @@ -58,13 +58,12 @@ # Columns that are valid for filtering but are not visible. grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ) ] - columns.append( - grids.MulticolFilterColumn( - "search history names and tags", - cols_to_filter=[ columns[0], columns[2] ], + columns.append( + grids.MulticolFilterColumn( + "search history names and tags", + cols_to_filter=[ columns[0], columns[2] ], key="free-text-search", visible=False, filterable="standard" ) ) - operations = [ grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ), grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ), @@ -125,11 +124,11 @@ return trans.sa_session.query( self.model_class ).join( 'users_shared_with' ) def apply_query_filter( self, trans, query, **kwargs ): return query.filter( model.HistoryUserShareAssociation.user == trans.user ) - + class HistoryAllPublishedGrid( grids.Grid ): class NameURLColumn( grids.PublicURLColumn, NameColumn ): pass - + title = "Published Histories" model_class = model.History default_sort_key = "update_time" @@ -138,15 +137,15 @@ columns = [ NameURLColumn( "Name", key="name", filterable="advanced" ), grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.HistoryAnnotationAssociation, filterable="advanced" ), - grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ), + grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ), grids.CommunityRatingColumn( "Community Rating", key="rating" ), grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation, filterable="advanced", grid_name="PublicHistoryListGrid" ), grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago ) ] - columns.append( - grids.MulticolFilterColumn( - "Search name, annotation, owner, and tags", - cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ], + columns.append( + grids.MulticolFilterColumn( + "Search name, annotation, owner, and tags", + cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ], key="free-text-search", visible=False, filterable="standard" ) ) operations = [] @@ -156,7 +155,7 @@ def apply_query_filter( self, trans, query, **kwargs ): # A public history is published, has a slug, and is not deleted. return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False ) - + class HistoryController( BaseController, Sharable, UsesAnnotations, UsesItemRatings, UsesHistory ): @web.expose def index( self, trans ): @@ -166,11 +165,11 @@ """XML history list for functional tests""" trans.response.set_content_type( 'text/xml' ) return trans.fill_template( "/history/list_as_xml.mako" ) - + stored_list_grid = HistoryListGrid() shared_list_grid = SharedHistoryListGrid() published_list_grid = HistoryAllPublishedGrid() - + @web.expose def list_published( self, trans, **kwargs ): grid = self.published_list_grid( trans, **kwargs ) @@ -179,7 +178,7 @@ else: # Render grid wrapped in panels return trans.fill_template( "history/list_published.mako", grid=grid ) - + @web.expose @web.require_login( "work with multiple histories" ) def list( self, trans, **kwargs ): @@ -200,7 +199,7 @@ refresh_history = False # Load the histories and ensure they all belong to the current user histories = [] - for history_id in history_ids: + for history_id in history_ids: history = self.get_history( trans, history_id ) if history: # Ensure history is owned by current user @@ -209,18 +208,18 @@ histories.append( history ) else: log.warn( "Invalid history id '%r' passed to list", history_id ) - if histories: + if histories: if operation == "switch": status, message = self._list_switch( trans, histories ) - # Take action to update UI to reflect history switch. If + # Take action to update UI to reflect history switch. If # grid is using panels, it is standalone and hence a redirect # to root is needed; if grid is not using panels, it is nested - # in the main Galaxy UI and refreshing the history frame + # in the main Galaxy UI and refreshing the history frame # is sufficient. use_panels = kwargs.get('use_panels', False) == 'True' if use_panels: return trans.response.send_redirect( url_for( "/" ) ) - else: + else: trans.template_context['refresh_frames'] = ['history'] elif operation in ( "delete", "delete and remove datasets from disk" ): if operation == "delete and remove datasets from disk": @@ -338,7 +337,7 @@ trans.set_history( new_history ) # No message return None, None - + @web.expose @web.require_login( "work with shared histories" ) def list_shared( self, trans, **kwargs ): @@ -373,7 +372,7 @@ status = 'done' # Render the list view return self.shared_list_grid( trans, status=status, message=message, **kwargs ) - + @web.expose def display_structured( self, trans, id=None ): """ @@ -444,7 +443,7 @@ items.sort( key=( lambda x: x[0].create_time ), reverse=True ) # return trans.fill_template( "history/display_structured.mako", items=items ) - + @web.expose def delete_current( self, trans ): """Delete just the active history -- this does not require a logged in user.""" @@ -456,25 +455,22 @@ trans.sa_session.add( history ) trans.sa_session.flush() trans.log_event( "History id %d marked as deleted" % history.id ) - # Regardless of whether it was previously deleted, we make a new history active + # Regardless of whether it was previously deleted, we make a new history active trans.new_history() - return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] ) - + return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] ) + @web.expose @web.require_login( "rate items" ) @web.json def rate_async( self, trans, id, rating ): """ Rate a history asynchronously and return updated community data. """ - history = self.get_history( trans, id, check_ownership=False, check_accessible=True ) if not history: return trans.show_error_message( "The specified history does not exist." ) - # Rate history. history_rating = self.rate_item( trans.sa_session, trans.get_user(), history, rating ) - return self.get_ave_item_rating_data( trans.sa_session, history ) - + @web.expose def rename_async( self, trans, id=None, new_name=None ): history = self.get_history( trans, id ) @@ -490,7 +486,7 @@ trans.sa_session.add( history ) trans.sa_session.flush() return history.name - + @web.expose @web.require_login( "use Galaxy histories" ) def annotate_async( self, trans, id, new_annotation=None, **kwargs ): @@ -503,12 +499,11 @@ return new_annotation @web.expose - # TODO: Remove require_login when users are warned that, if they are not + # TODO: Remove require_login when users are warned that, if they are not # logged in, this will remove their current history. @web.require_login( "use Galaxy histories" ) def import_archive( self, trans, **kwargs ): """ Import a history from a file archive. """ - # Set archive source and type. archive_file = kwargs.get( 'archive_file', None ) archive_url = kwargs.get( 'archive_url', None ) @@ -519,37 +514,34 @@ elif archive_url: archive_source = archive_url archive_type = 'url' - # If no source to create archive from, show form to upload archive or specify URL. if not archive_source: - return trans.show_form( + return trans.show_form( web.FormBuilder( web.url_for(), "Import a History from an Archive", submit_text="Submit" ) \ .add_input( "text", "Archived History URL", "archive_url", value="", error=None ) # TODO: add support for importing via a file. - #.add_input( "file", "Archived History File", "archive_file", value=None, error=None ) + #.add_input( "file", "Archived History File", "archive_file", value=None, error=None ) ) - # Run job to do import. history_imp_tool = trans.app.toolbox.tools_by_id[ '__IMPORT_HISTORY__' ] incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type } history_imp_tool.execute( trans, incoming=incoming ) return trans.show_message( "Importing history from '%s'. \ This history will be visible when the import is complete" % archive_source ) - - @web.expose + + @web.expose def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False ): """ Export a history to an archive. """ - - # + # # Convert options to booleans. # if isinstance( gzip, basestring ): - gzip = ( gzip in [ 'True', 'true', 'T', 't' ] ) + gzip = ( gzip in [ 'True', 'true', 'T', 't' ] ) if isinstance( include_hidden, basestring ): include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] ) if isinstance( include_deleted, basestring ): - include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] ) - + include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] ) + # # Get history to export. # @@ -559,10 +551,10 @@ # Use current history. history = trans.history id = trans.security.encode_id( history.id ) - + if not history: return trans.show_error_message( "This history does not exist or you cannot export this history." ) - + # # If history has already been exported and it has not changed since export, stream it. # @@ -585,40 +577,38 @@ elif jeha.job.state in [ model.Job.states.RUNNING, model.Job.states.QUEUED, model.Job.states.WAITING ]: return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \ % ( { 'n' : history.name, 's' : url_for( action="export_archive", id=id, qualified=True ) } ) ) - + # Run job to do export. history_exp_tool = trans.app.toolbox.tools_by_id[ '__EXPORT_HISTORY__' ] - params = { - 'history_to_export' : history, - 'compress' : gzip, - 'include_hidden' : include_hidden, + params = { + 'history_to_export' : history, + 'compress' : gzip, + 'include_hidden' : include_hidden, 'include_deleted' : include_deleted } history_exp_tool.execute( trans, incoming = params, set_output_hid = True ) return trans.show_message( "Exporting History '%(n)s'. Use this link to download \ the archive or import it to another Galaxy server: \ <a href='%(u)s'>%(u)s</a>" \ % ( { 'n' : history.name, 'u' : url_for( action="export_archive", id=id, qualified=True ) } ) ) - + @web.expose @web.json @web.require_login( "get history name and link" ) def get_name_and_link_async( self, trans, id=None ): """ Returns history's name and link. """ history = self.get_history( trans, id, False ) - if self.create_item_slug( trans.sa_session, history ): trans.sa_session.flush() - return_dict = { - "name" : history.name, + return_dict = { + "name" : history.name, "link" : url_for( action="display_by_username_and_slug", username=history.user.username, slug=history.slug ) } return return_dict - + @web.expose @web.require_login( "set history's accessible flag" ) def set_accessible_async( self, trans, id=None, accessible=False ): """ Set history's importable attribute and slug. """ history = self.get_history( trans, id, True ) - # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed. importable = accessible in ['True', 'true', 't', 'T']; if history and history.importable != importable: @@ -627,7 +617,6 @@ else: history.importable = importable trans.sa_session.flush() - return @web.expose @@ -638,7 +627,7 @@ history.slug = new_slug trans.sa_session.flush() return history.slug - + @web.expose def get_item_content_async( self, trans, id ): """ Returns item content in HTML format. """ @@ -646,7 +635,7 @@ history = self.get_history( trans, id, False, True ) if history is None: raise web.httpexceptions.HTTPNotFound() - + # Get datasets. datasets = self.get_history_datasets( trans, history ) # Get annotations. @@ -654,7 +643,7 @@ for dataset in datasets: dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset ) return trans.stream_template_mako( "/history/item_content.mako", item = history, item_data = datasets ) - + @web.expose def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ): """Return autocomplete data for history names""" @@ -666,7 +655,7 @@ for history in trans.sa_session.query( model.History ).filter_by( user=user ).filter( func.lower( model.History.name ) .like(q.lower() + "%") ): ac_data = ac_data + history.name + "\n" return ac_data - + @web.expose def imp( self, trans, id=None, confirm=False, **kwd ): """Import another user's history via a shared URL""" @@ -682,7 +671,7 @@ referer_message = "<a href='%s'>return to the previous page</a>" % referer else: referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' ) - + # Do import. if not id: return trans.show_error_message( "You must specify a history you want to import.<br>You can %s." % referer_message, use_panels=True ) @@ -712,7 +701,7 @@ # Set imported history to be user's current history. trans.set_history( new_history ) return trans.show_ok_message( - message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s.""" + message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s.""" % ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True ) elif not user_history or not user_history.datasets or confirm: new_history = import_history.copy() @@ -730,13 +719,13 @@ trans.sa_session.flush() trans.set_history( new_history ) return trans.show_ok_message( - message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s.""" + message="""History "%s" has been imported. <br>You can <a href="%s">start using this history</a> or %s.""" % ( new_history.name, web.url_for( '/' ), referer_message ), use_panels=True ) return trans.show_warn_message( """ Warning! If you import this history, you will lose your current history. <br>You can <a href="%s">continue and import this history</a> or %s. """ % ( web.url_for( id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True ) - + @web.expose def view( self, trans, id=None, show_deleted=False ): """View a history. If a history is importable, then it is viewable by any user.""" @@ -757,11 +746,11 @@ history = history_to_view, datasets = datasets, show_deleted = show_deleted ) - + @web.expose def display_by_username_and_slug( self, trans, username, slug ): - """ Display history based on a username and slug. """ - + """ Display history based on a username and slug. """ + # Get history. session = trans.sa_session user = session.query( model.User ).filter_by( username=username ).first() @@ -770,14 +759,14 @@ raise web.httpexceptions.HTTPNotFound() # Security check raises error if user cannot access history. self.security_check( trans.get_user(), history, False, True) - + # Get datasets. datasets = self.get_history_datasets( trans, history ) # Get annotations. history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history ) for dataset in datasets: dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset ) - + # Get rating data. user_item_rating = 0 if trans.get_user(): @@ -787,9 +776,9 @@ else: user_item_rating = 0 ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, history ) - return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets, + return trans.stream_template_mako( "history/display.mako", item = history, item_data = datasets, user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings ) - + @web.expose @web.require_login( "share Galaxy histories" ) def sharing( self, trans, id=None, histories=[], **kwargs ): @@ -804,7 +793,7 @@ histories = [ self.get_history( trans, history_id ) for history_id in ids ] elif not histories: histories = [ trans.history ] - + # Do operation on histories. for history in histories: if 'make_accessible_via_link' in kwargs: @@ -837,17 +826,17 @@ message = "History '%s' does not seem to be shared with user '%s'" % ( history.name, user.email ) return trans.fill_template( '/sharing_base.mako', item=history, message=message, status='error' ) - - + + # Legacy issue: histories made accessible before recent updates may not have a slug. Create slug for any histories that need them. for history in histories: if history.importable and not history.slug: self._make_item_accessible( trans.sa_session, history ) - + session.flush() - + return trans.fill_template( "/sharing_base.mako", item=history ) - + @web.expose @web.require_login( "share histories with other users" ) def share( self, trans, id=None, email="", **kwd ): @@ -890,11 +879,11 @@ send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing." return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err ) if can_change or cannot_change: - return trans.fill_template( "/history/share.mako", - histories=histories, - email=email, - send_to_err=send_to_err, - can_change=can_change, + return trans.fill_template( "/history/share.mako", + histories=histories, + email=email, + send_to_err=send_to_err, + can_change=can_change, cannot_change=cannot_change, no_change_needed=unique_no_change_needed ) if no_change_needed: @@ -903,11 +892,11 @@ # User seems to be sharing an empty history send_to_err = "You cannot share an empty history. " return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err ) - + @web.expose @web.require_login( "share restricted histories with other users" ) def share_restricted( self, trans, id=None, email="", **kwd ): - if 'action' in kwd: + if 'action' in kwd: action = kwd[ 'action' ] else: err_msg = "Select an action. " @@ -938,10 +927,10 @@ # The action here is either 'public' or 'private', so we'll continue to populate the # histories_for_sharing dictionary from the can_change dictionary. for send_to_user, history_dict in can_change.items(): - for history in history_dict: + for history in history_dict: # Make sure the current history has not already been shared with the current send_to_user if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ - .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, + .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ .count() > 0: send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email ) @@ -954,7 +943,7 @@ # The user with which we are sharing the history does not have access permission on the current dataset if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations: # The current user has authority to change permissions on the current dataset because - # they have permission to manage permissions on the dataset and the dataset is not associated + # they have permission to manage permissions on the dataset and the dataset is not associated # with a library. if action == "private": trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] ) @@ -986,7 +975,7 @@ send_to_user = trans.sa_session.query( trans.app.model.User ) \ .filter( and_( trans.app.model.User.table.c.email==email_address, trans.app.model.User.table.c.deleted==False ) ) \ - .first() + .first() if send_to_user: send_to_users.append( send_to_user ) else: @@ -1004,7 +993,7 @@ for history in history_dict: # Make sure the current history has not already been shared with the current send_to_user if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ - .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, + .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ .count() > 0: send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email ) @@ -1019,7 +1008,7 @@ # The user may be attempting to share histories whose datasets cannot all be accessed by other users. # If this is the case, the user sharing the histories can: # 1) action=='public': choose to make the datasets public if he is permitted to do so - # 2) action=='private': automatically create a new "sharing role" allowing protected + # 2) action=='private': automatically create a new "sharing role" allowing protected # datasets to be accessed only by the desired users # This method will populate the can_change, cannot_change and no_change_needed dictionaries, which # are used for either displaying to the user, letting them make 1 of the choices above, or sharing @@ -1036,7 +1025,7 @@ for send_to_user in send_to_users: # Make sure the current history has not already been shared with the current send_to_user if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ - .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, + .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ .count() > 0: send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email ) @@ -1125,7 +1114,7 @@ if send_to_err: msg += send_to_err return self.sharing( trans, histories=shared_histories, msg=msg ) - + @web.expose @web.require_login( "rename histories" ) def rename( self, trans, id=None, name=None, **kwd ): @@ -1164,7 +1153,7 @@ else: change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>" return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] ) - + @web.expose @web.require_login( "clone shared Galaxy history" ) def clone( self, trans, id=None, **kwd ): @@ -1207,13 +1196,11 @@ else: msg = '%d cloned histories are now included in your previously stored histories.' % len( histories ) return trans.show_ok_message( msg ) - + @web.expose @web.require_login( "switch to a history" ) def switch_to_history( self, trans, hist_id=None ): decoded_id = trans.security.decode_id(hist_id) hist = trans.sa_session.query( trans.app.model.History ).get( decoded_id ) - trans.set_history( hist ) return trans.response.send_redirect( url_for( "/" ) ) - http://bitbucket.org/galaxy/galaxy-central/changeset/c875100ea5ed/ changeset: c875100ea5ed user: dannon date: 2011-07-20 15:01:37 summary: Merge affected #: 30 files (3.4 KB) --- a/README.txt Tue Jul 19 13:40:24 2011 -0400 +++ b/README.txt Wed Jul 20 09:01:37 2011 -0400 @@ -28,4 +28,4 @@ Not all dependencies are included for the tools provided in the sample tool_conf.xml. A full list of external dependencies is available at: -http://bitbucket.org/galaxy/galaxy-central/wiki/ToolDependencies +http://wiki.g2.bx.psu.edu/Admin/Tools/Tool%20Dependencies --- a/dist-eggs.ini Tue Jul 19 13:40:24 2011 -0400 +++ b/dist-eggs.ini Wed Jul 20 09:01:37 2011 -0400 @@ -3,7 +3,7 @@ ; eggs.g2.bx.psu.edu). Probably only useful to Galaxy developers at ; Penn State. This file is used by scripts/dist-scramble.py ; -; More information: http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs +; More information: http://wiki.g2.bx.psu.edu/Admin/Config/Eggs ; [hosts] --- a/eggs.ini Tue Jul 19 13:40:24 2011 -0400 +++ b/eggs.ini Wed Jul 20 09:01:37 2011 -0400 @@ -3,7 +3,7 @@ ; ; This file is version controlled and should not be edited by hand! ; For more information, see: -; http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Eggs +; http://wiki.g2.bx.psu.edu/Admin/Config/Eggs ; [general] --- a/lib/galaxy/jobs/runners/pbs.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/jobs/runners/pbs.py Wed Jul 20 09:01:37 2011 -0400 @@ -17,7 +17,7 @@ configured properly. Galaxy's "scramble" system should make this installation simple, please follow the instructions found at: - http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster + http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster Additional errors may follow: %s --- a/lib/galaxy/jobs/runners/sge.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/jobs/runners/sge.py Wed Jul 20 09:01:37 2011 -0400 @@ -14,7 +14,7 @@ "scramble" system should make this installation simple, please follow the instructions found at: - http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster + http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster Additional errors may follow: %s --- a/lib/galaxy/web/form_builder.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/web/form_builder.py Wed Jul 20 09:01:37 2011 -0400 @@ -658,7 +658,7 @@ self.name = name self.ldda = value self.trans = trans - def get_html( self, disabled=False ): + def get_html( self, prefix="", disabled=False ): if not self.ldda: ldda = "" text = "Choose a library dataset" @@ -666,7 +666,7 @@ ldda = self.trans.security.encode_id(self.ldda.id) text = self.ldda.name return '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \ - <input type="hidden" name="%s" value="%s">' % ( text, self.name, escape( str(ldda), quote=True ) ) + <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda), quote=True ) ) def get_display_text(self): if self.ldda: --- a/lib/galaxy/webapps/community/config.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/webapps/community/config.py Wed Jul 20 09:01:37 2011 -0400 @@ -64,7 +64,7 @@ self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False ) self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) ) self.brand = kwargs.get( 'brand', None ) - self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' ) + self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' ) self.bugs_email = kwargs.get( 'bugs_email', None ) self.blog_url = kwargs.get( 'blog_url', None ) self.screencasts_url = kwargs.get( 'screencasts_url', None ) --- a/lib/galaxy/webapps/community/controllers/common.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/webapps/community/controllers/common.py Wed Jul 20 09:01:37 2011 -0400 @@ -230,6 +230,8 @@ correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_loc_file else: correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % missing_file + else: + correction_msg = exception_msg message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg ) status = 'error' elif flush_needed: @@ -297,7 +299,7 @@ util.send_mail( frm, to, subject, body, trans.app.config ) except Exception, e: log.exception( "An error occurred sending a tool shed repository update alert by email." ) -def update_for_browsing( repository, current_working_dir ): +def update_for_browsing( repository, current_working_dir, commit_message='' ): # Make a copy of a repository's files for browsing. repo_dir = repository.repo_path repo = hg.repository( ui.ui(), repo_dir ) @@ -316,12 +318,15 @@ # ! = deleted, but still tracked # ? = not tracked # I = ignored - # We'll remove all files that are not tracked or ignored. files_to_remove_from_disk = [] + files_to_commit = [] for status_and_file_name in status_and_file_names: if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ): files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) ) + elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ): + files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) ) for full_path in files_to_remove_from_disk: + # We'll remove all files that are not tracked or ignored. if os.path.isdir( full_path ): try: os.rmdir( full_path ) @@ -336,6 +341,11 @@ except OSError, e: # The directory is not empty pass + if files_to_commit: + if not commit_message: + commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit ) + repo.dirstate.write() + repo.commit( text=commit_message ) os.chdir( repo_dir ) os.system( 'hg update > /dev/null 2>&1' ) os.chdir( current_working_dir ) --- a/lib/galaxy/webapps/community/controllers/repository.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/webapps/community/controllers/repository.py Wed Jul 20 09:01:37 2011 -0400 @@ -425,7 +425,7 @@ repository = get_repository( trans, id ) repo = hg.repository( ui.ui(), repository.repo_path ) current_working_dir = os.getcwd() - update_for_browsing( repository, current_working_dir ) + update_for_browsing( repository, current_working_dir, commit_message=commit_message ) return trans.fill_template( '/webapps/community/repository/browse_repository.mako', repo=repo, repository=repository, @@ -454,11 +454,17 @@ # Commit the change set. if not commit_message: commit_message = 'Deleted selected files' - # Commit the changes. - commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message ) + try: + commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message ) + except Exception, e: + # I never have a problem with commands.commit on a Mac, but in the test/production + # tool shed environment, it occasionally throws a "TypeError: array item must be char" + # exception. If this happens, we'll try the following. + repo.dirstate.write() + repo.commit( text=commit_message ) handle_email_alerts( trans, repository ) # Update the repository files for browsing. - update_for_browsing( repository, current_working_dir ) + update_for_browsing( repository, current_working_dir, commit_message=commit_message ) # Get the new repository tip. repo = hg.repository( ui.ui(), repo_dir ) if tip != repository.tip: --- a/lib/galaxy/webapps/community/controllers/upload.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/webapps/community/controllers/upload.py Wed Jul 20 09:01:37 2011 -0400 @@ -87,7 +87,16 @@ # Move the uploaded file to the load_point within the repository hierarchy. shutil.move( uploaded_file_name, full_path ) commands.add( repo.ui, repo, full_path ) - commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) + """ + try: + commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) + except Exception, e: + # I never have a problem with commands.commit on a Mac, but in the test/production + # tool shed environment, it occasionally throws a "TypeError: array item must be char" + # exception. If this happens, we'll try the following. + repo.dirstate.write() + repo.commit( text=commit_message ) + """ if full_path.endswith( '.loc.sample' ): # Handle the special case where a xxx.loc.sample file is # being uploaded by copying it to ~/tool-data/xxx.loc. @@ -96,7 +105,7 @@ if ok: # Update the repository files for browsing, a by-product of doing this # is eliminating unwanted files from the repository directory. - update_for_browsing( repository, current_working_dir ) + update_for_browsing( repository, current_working_dir, commit_message=commit_message ) # Get the new repository tip. if tip != repository.tip: if ( isgzip or isbz2 ) and uncompress_file: @@ -183,8 +192,14 @@ # Handle the special case where a xxx.loc.sample file is # being uploaded by copying it to ~/tool-data/xxx.loc. copy_sample_loc_file( trans, filename_in_archive ) - # Commit the changes. - commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) + try: + commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) + except Exception, e: + # I never have a problem with commands.commit on a Mac, but in the test/production + # tool shed environment, it occasionally throws a "TypeError: array item must be char" + # exception. If this happens, we'll try the following. + repo.dirstate.write() + repo.commit( text=commit_message ) handle_email_alerts( trans, repository ) return True, '', files_to_remove def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ): --- a/lib/galaxy/webapps/demo_sequencer/config.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/webapps/demo_sequencer/config.py Wed Jul 20 09:01:37 2011 -0400 @@ -49,7 +49,7 @@ self.smtp_server = kwargs.get( 'smtp_server', None ) self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) ) self.brand = kwargs.get( 'brand', None ) - self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' ) + self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' ) self.bugs_email = kwargs.get( 'bugs_email', None ) self.blog_url = kwargs.get( 'blog_url', None ) self.screencasts_url = kwargs.get( 'screencasts_url', None ) --- a/lib/galaxy/webapps/reports/config.py Tue Jul 19 13:40:24 2011 -0400 +++ b/lib/galaxy/webapps/reports/config.py Wed Jul 20 09:01:37 2011 -0400 @@ -33,7 +33,7 @@ self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail") self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) ) self.brand = kwargs.get( 'brand', None ) - self.wiki_url = kwargs.get( 'wiki_url', 'http://bitbucket.org/galaxy/galaxy-central/wiki/Home' ) + self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' ) self.bugs_email = kwargs.get( 'bugs_email', None ) self.blog_url = kwargs.get( 'blog_url', None ) self.screencasts_url = kwargs.get( 'screencasts_url', None ) --- a/static/welcome.rst Tue Jul 19 13:40:24 2011 -0400 +++ b/static/welcome.rst Wed Jul 20 09:01:37 2011 -0400 @@ -30,7 +30,7 @@ .. __: /static/help.html .. __: http://www.bx.psu.edu/cgi-bin/trac.cgi -.. __: http://bitbucket.org/galaxy/galaxy-central/wiki/GalaxyTeam +.. __: http://wiki.g2.bx.psu.edu/Galaxy%20Team .. __: mailto:galaxy@bx.psu.edu Version: <b>2.1</b> Revision: <b>$Rev$</b> --- a/templates/webapps/community/base_panels.mako Tue Jul 19 13:40:24 2011 -0400 +++ b/templates/webapps/community/base_panels.mako Wed Jul 20 09:01:37 2011 -0400 @@ -34,9 +34,9 @@ <div class="submenu"><ul><li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li> - <li><a target="_blank" href="${app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" )}">Galaxy Wiki</a></li> + <li><a target="_blank" href="${app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" )}">Galaxy Wiki</a></li><li><a target="_blank" href="${app.config.get( "screencasts_url", "http://galaxycast.org" )}">Video tutorials (screencasts)</a></li> - <li><a target="_blank" href="${app.config.get( "citation_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" )}">How to Cite Galaxy</a></li> + <li><a target="_blank" href="${app.config.get( "citation_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" )}">How to Cite Galaxy</a></li></ul></div></td> --- a/templates/webapps/galaxy/base_panels.mako Tue Jul 19 13:40:24 2011 -0400 +++ b/templates/webapps/galaxy/base_panels.mako Wed Jul 20 09:01:37 2011 -0400 @@ -107,9 +107,9 @@ <% menu_options = [ ['Email comments, bug reports, or suggestions', app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" ) ], - ['Galaxy Wiki', app.config.get( "wiki_url", "http://bitbucket.org/galaxy/galaxy-central/wiki" ), "_blank" ], + ['Galaxy Wiki', app.config.get( "wiki_url", "http://wiki.g2.bx.psu.edu/" ), "_blank" ], ['Video tutorials (screencasts)', app.config.get( "screencasts_url", "http://galaxycast.org" ), "_blank" ], - ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://bitbucket.org/galaxy/galaxy-central/wiki/Citations" ), "_blank" ] + ['How to Cite Galaxy', app.config.get( "screencasts_url", "http://wiki.g2.bx.psu.edu/Citing%20Galaxy" ), "_blank" ] ] tab( "help", "Help", None, menu_options=menu_options) %> --- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Tue Jul 19 13:40:24 2011 -0400 +++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Wed Jul 20 09:01:37 2011 -0400 @@ -13,8 +13,8 @@ <h1>rgManQQtest1</h1><table> -<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" alt="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr> -<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" alt="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr> +<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" title="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr> +<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" title="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr><tr><td><a href="rgManQQtest1.R">rgManQQtest1.R</a></td></tr><tr><td><a href="rgManQQtest1.R.log">rgManQQtest1.R.log</a></td></tr></table> @@ -35,7 +35,7 @@ - round_any + rename, round_any @@ -43,11 +43,11 @@ Loading required package: proto -[1] "### 101 values read from /tmp/rgManQQtemplYC5wa read - now running plots" +[1] "### 101 values read from /data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat read - now running plots" [1] "## qqplot on Allelep done" -[1] "## manhattan on Allelep starting 1 2 3" +[1] "## manhattan on Allelep starting 2 3 8" [1] "## manhattan plot on Allelep done" @@ -62,7 +62,7 @@ # http://StephenTurner.us/ # http://GettingGeneticsDone.blogspot.com/ -# Last updated: Tuesday, December 22, 2009 +# Last updated: 19 July 2011 by Ross Lazarus # R code for making manhattan plots and QQ plots from plink output files. # With GWAS data this can take a lot of memory. Recommended for use on # 64bit machines only, for now. @@ -72,30 +72,30 @@ library(ggplot2) coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen') -# not too fugly but need a colour expert please... +# not too ugly but need a colour expert please... -manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max", - suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) { - +DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9, + size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) { if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!") genomewideline=NULL # was genomewideline=-log10(5e-8) if (genomewide) { # use bonferroni since might be only a small region? genomewideline = -log10(0.05/length(pvals)) } - d=data.frame(CHR=chrom,BP=offset,P=pvals) - - #limit to only chrs 1-23? - d=d[d$CHR %in% 1:23, ] - + offset = as.integer(offset) + pvals = as.double(pvals) + chro = as.integer(chrom) # already dealt with X and friends? + d=data.frame(CHR=chro,BP=offset,P=pvals) + #limit to only chrs 1-22, x=23,y=24,Mt=25? + d=d[d$CHR %in% 1:25, ] if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) { - d=na.omit(d) + #d=na.omit(d) d=d[d$P>0 & d$P<=1, ] - d$logp = -log10(d$P) - + d$logp = as.double(-log10(d$P)) d$pos=NA ticks=NULL lastbase=0 chrlist = unique(d$CHR) + chrlist = sort(chrlist) # returns lexical ordering nchr = length(chrlist) # may be any number? if (nchr >= 2) { for (x in c(1:nchr)) { @@ -107,7 +107,11 @@ lastchr = chrlist[x-1] # previous whatever the list lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1) d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase + if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) { + cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos)) + } tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]) + } ticklim=c(min(d$pos),max(d$pos)) xlabs = chrlist @@ -129,8 +133,6 @@ if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y maxy = max(maxy,1.1*genomewideline) - # if (maxy<8) maxy=8 - # only makes sense if genome wide is assumed - we could have a fine mapping region? if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ] if (nchr >= 2) { manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR)) @@ -149,9 +151,6 @@ axis.text.y=theme_text(size=size.y.labels, colour="grey50"), axis.ticks=theme_segment(colour=NA) ) - #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank()) - #manplot = manplot + opts(panel.grid.major=theme_blank()) - if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3)) if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red") manplot @@ -178,16 +177,24 @@ if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank()) qq } -rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3), + +rgqqMan = function(infile="/data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8), title="rgManQQtest1",grey=0) { rawd = read.table(infile,head=T,sep='\t') dn = names(rawd) cc = dn[chromcolumn] oc = dn[offsetcolumn] -nams = c(cc,oc) +rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case +rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh +rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T) +rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T) +rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T) +nams = c(cc,oc) # for sorting plen = length(rawd[,1]) -doreorder=1 print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' ')) +rawd = rawd[do.call(order,rawd[nams]),] +# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according.... +# in case not yet ordered if (plen > 0) { for (pvalscolumn in pvalscolumns) { if (pvalscolumn > 0) @@ -199,14 +206,8 @@ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100) print(paste('## qqplot on',cname,'done')) if ((chromcolumn > 0) & (offsetcolumn > 0)) { - if (doreorder) { - rawd = rawd[do.call(order,rawd[nams]),] - # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according.... - # in case not yet ordered - doreorder = 0 - } print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn)) - mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey) + mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey) print(paste('## manhattan plot on',cname,'done')) ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100) } @@ -227,6 +228,6 @@ </pre> -<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 07/11/2010 20:04:20</h3> +<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 13:29:43</b><br/></div></body></html> --- a/tools/data_source/microbial_import.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/data_source/microbial_import.xml Wed Jul 20 09:01:37 2011 -0400 @@ -109,7 +109,7 @@ **Note:** Having trouble locating your organism? Click here_ for a list of available species and their location. -.. _here: http://bitbucket.org/galaxy/galaxy-central/wiki/Microbes +.. _here: http://wiki.g2.bx.psu.edu/Main/Data%20Libraries/Microbes </help></tool> --- a/tools/new_operations/basecoverage.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/basecoverage.xml Wed Jul 20 09:01:37 2011 -0400 @@ -34,7 +34,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations </help> --- a/tools/new_operations/cluster.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/cluster.xml Wed Jul 20 09:01:37 2011 -0400 @@ -67,7 +67,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/new_operations/complement.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/complement.xml Wed Jul 20 09:01:37 2011 -0400 @@ -43,7 +43,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/new_operations/concat.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/concat.xml Wed Jul 20 09:01:37 2011 -0400 @@ -41,7 +41,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/new_operations/coverage.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/coverage.xml Wed Jul 20 09:01:37 2011 -0400 @@ -44,7 +44,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/new_operations/intersect.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/intersect.xml Wed Jul 20 09:01:37 2011 -0400 @@ -117,7 +117,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/new_operations/join.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/join.xml Wed Jul 20 09:01:37 2011 -0400 @@ -78,7 +78,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/new_operations/merge.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/merge.xml Wed Jul 20 09:01:37 2011 -0400 @@ -44,7 +44,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/new_operations/subtract.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/new_operations/subtract.xml Wed Jul 20 09:01:37 2011 -0400 @@ -98,7 +98,7 @@ See Galaxy Interval Operation Screencasts_ (right click to open this link in another window). -.. _Screencasts: http://bitbucket.org/galaxy/galaxy-central/wiki/GopsDesc +.. _Screencasts: http://wiki.g2.bx.psu.edu/Learn/Interval%20Operations ----- --- a/tools/next_gen_conversion/fastq_gen_conv.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/next_gen_conversion/fastq_gen_conv.xml Wed Jul 20 09:01:37 2011 -0400 @@ -75,7 +75,7 @@ A good description of fastq datasets can be found `here`__, while a description of Galaxy's fastq "logic" can be found `here`__. Because ranges of quality values within different types of fastq datasets overlap it very difficult to detect them automatically. This tool supports conversion of two commonly found types (Solexa/Illumina 1.0 and Illumina 1.3+) into fastq Sanger. .. __: http://en.wikipedia.org/wiki/FASTQ_format - .. __: http://bitbucket.org/galaxy/galaxy-central/wiki/NGS + .. __: http://wiki.g2.bx.psu.edu/Admin/NGS%20Local%20Setup .. class:: warningmark --- a/tools/rgenetics/rgManQQ.py Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/rgenetics/rgManQQ.py Wed Jul 20 09:01:37 2011 -0400 @@ -1,5 +1,9 @@ #!/usr/local/bin/python - +# updated july 20 to fix sort order - R unique() sorts into strict collating order +# so need to sort after unique to revert to lexicographic order for x axis on Manhattan +# rgmanqq updated july 19 to deal with x,y and mt +# lots of fixes +# ross lazarus import sys,math,shutil,subprocess,os,time,tempfile,string from os.path import abspath from rgutils import timenow, RRun, galhtmlprefix, galhtmlpostfix, galhtmlattr @@ -18,7 +22,7 @@ # http://StephenTurner.us/ # http://GettingGeneticsDone.blogspot.com/ -# Last updated: Tuesday, December 22, 2009 +# Last updated: 19 July 2011 by Ross Lazarus # R code for making manhattan plots and QQ plots from plink output files. # With GWAS data this can take a lot of memory. Recommended for use on # 64bit machines only, for now. @@ -28,30 +32,30 @@ library(ggplot2) coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen') -# not too fugly but need a colour expert please... +# not too ugly but need a colour expert please... -manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max", - suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) { - +DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9, + size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) { if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!") genomewideline=NULL # was genomewideline=-log10(5e-8) if (genomewide) { # use bonferroni since might be only a small region? genomewideline = -log10(0.05/length(pvals)) } - d=data.frame(CHR=chrom,BP=offset,P=pvals) - - #limit to only chrs 1-23? - d=d[d$CHR %in% 1:23, ] - + offset = as.integer(offset) + pvals = as.double(pvals) + chro = as.integer(chrom) # already dealt with X and friends? + d=data.frame(CHR=chro,BP=offset,P=pvals) + #limit to only chrs 1-22, x=23,y=24,Mt=25? + d=d[d$CHR %in% 1:25, ] if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) { - d=na.omit(d) + #d=na.omit(d) d=d[d$P>0 & d$P<=1, ] - d$logp = -log10(d$P) - + d$logp = as.double(-log10(d$P)) d$pos=NA ticks=NULL lastbase=0 chrlist = unique(d$CHR) + chrlist = sort(chrlist) # returns lexical ordering nchr = length(chrlist) # may be any number? if (nchr >= 2) { for (x in c(1:nchr)) { @@ -63,7 +67,11 @@ lastchr = chrlist[x-1] # previous whatever the list lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1) d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase + if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) { + cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos)) + } tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]) + } ticklim=c(min(d$pos),max(d$pos)) xlabs = chrlist @@ -85,8 +93,6 @@ if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y maxy = max(maxy,1.1*genomewideline) - # if (maxy<8) maxy=8 - # only makes sense if genome wide is assumed - we could have a fine mapping region? if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ] if (nchr >= 2) { manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR)) @@ -105,9 +111,6 @@ axis.text.y=theme_text(size=size.y.labels, colour="grey50"), axis.ticks=theme_segment(colour=NA) ) - #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank()) - #manplot = manplot + opts(panel.grid.major=theme_blank()) - if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3)) if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red") manplot @@ -134,21 +137,29 @@ if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank()) qq } + """ # we need another string to avoid confusion over string substitutions with %in% # instantiate rcode2 string with infile,chromcol,offsetcol,pvalscols,title before saving and running -rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%s, offsetcolumn=%s, pvalscolumns=%s, +rcode2 = """rgqqMan = function(infile="%s",chromcolumn=%d, offsetcolumn=%d, pvalscolumns=c(%s), title="%s",grey=%d) { rawd = read.table(infile,head=T,sep='\\t') dn = names(rawd) cc = dn[chromcolumn] oc = dn[offsetcolumn] -nams = c(cc,oc) +rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case +rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh +rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T) +rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T) +rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T) +nams = c(cc,oc) # for sorting plen = length(rawd[,1]) -doreorder=1 print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' ')) +rawd = rawd[do.call(order,rawd[nams]),] +# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according.... +# in case not yet ordered if (plen > 0) { for (pvalscolumn in pvalscolumns) { if (pvalscolumn > 0) @@ -160,14 +171,8 @@ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100) print(paste('## qqplot on',cname,'done')) if ((chromcolumn > 0) & (offsetcolumn > 0)) { - if (doreorder) { - rawd = rawd[do.call(order,rawd[nams]),] - # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according.... - # in case not yet ordered - doreorder = 0 - } print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn)) - mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey) + mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey) print(paste('## manhattan plot on',cname,'done')) ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100) } @@ -198,50 +203,13 @@ this can be called externally, I guess...for QC eg? """ if debug: - print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir - ffd,filtered_fname = tempfile.mkstemp(prefix='rgManQQtemp') - f = open(filtered_fname,'w') - inf = open(input_fname,'r') - ohead = inf.readline().strip().split('\t') # see if we have a header - inf.seek(0) # rewind - newhead = ['pval%d' % (x+1) for x in pval_cols] - newhead.insert(0,'Offset') - newhead.insert(0,'Chrom') - havehead = 0 - wewant = [chrom_col,offset_col] - wewant += pval_cols - try: - allnums = ['%d' % x for x in ohead] # this should barf if non numerics == header row? - f.write('\t'.join(newhead)) # for R to read - f.write('\n') - except: - havehead = 1 - newhead = [ohead[chrom_col],ohead[offset_col]] - newhead += [ohead[x] for x in pval_cols] - f.write('\t'.join(newhead)) # use the original head - f.write('\n') - for i,row in enumerate(inf): - if i == 0 and havehead: - continue # ignore header - sr = row.strip().split('\t') - if len(sr) > 1: - if sr[chrom_col].lower().find('chr') <> -1: - sr[chrom_col] = sr[chrom_col][3:] - newr = [sr[x] for x in wewant] # grab cols we need - s = '\t'.join(newr) - f.write(s) - f.write('\n') - f.close() - pvc = [x+3 for x in range(len(pval_cols))] # 2 for offset and chrom, 1 for r offset start - pvc = 'c(%s)' % (','.join(map(str,pvc))) - rcmd = '%s%s' % (rcode,rcode2 % (filtered_fname,'1','2',pvc,title,grey)) + print 'doManQQ',input_fname,chrom_col,offset_col,pval_cols,title,grey,ctitle,outdir + rcmd = '%s%s' % (rcode,rcode2 % (input_fname,chrom_col,offset_col,pval_cols,title,grey)) if debug: - print 'running\n%s\n' % rcmd + print 'running\n%s\n' % rcmd rlog,flist = RRun(rcmd=rcmd,title=ctitle,outdir=outdir) rlog.append('## R script=') rlog.append(rcmd) - if beTidy: - os.unlink(filtered_fname) return rlog,flist @@ -272,19 +240,20 @@ offset_col = -1 p = sys.argv[7].strip().split(',') try: - p = [int(x) for x in p] + q = [int(x) for x in p] except: - p = [-1] + p = -1 if chrom_col == -1 or offset_col == -1: # was passed as zero - do not do manhattan plots chrom_col = -1 offset_col = -1 grey = 0 if (sys.argv[8].lower() in ['1','true']): grey = 1 - if p == [-1]: + if p == -1: print >> sys.stderr,'## Cannot run rgManQQ - missing pval column' sys.exit(1) - rlog,flist = doManQQ(input_fname,chrom_col,offset_col,p,title,grey,ctitle,outdir) + p = ['%d' % (int(x) + 1) for x in p] + rlog,flist = doManQQ(input_fname,chrom_col+1,offset_col+1,','.join(p),title,grey,ctitle,outdir) flist.sort() html = [galhtmlprefix % progname,] html.append('<h1>%s</h1>' % title) @@ -294,7 +263,7 @@ fname,expl = row # RRun returns pairs of filenames fiddled for the log and R script e = os.path.splitext(fname)[-1] if e in ['.png','.jpg']: - s= '<tr><td><a href="%s"><img src="%s" alt="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \ + s= '<tr><td><a href="%s"><img src="%s" title="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \ % (fname,fname,expl,expl ) html.append(s) else: @@ -317,3 +286,4 @@ if __name__ == "__main__": main() + --- a/tools/rgenetics/rgManQQ.xml Tue Jul 19 13:40:24 2011 -0400 +++ b/tools/rgenetics/rgManQQ.xml Wed Jul 20 09:01:37 2011 -0400 @@ -1,4 +1,4 @@ -<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.1"> +<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.2"><code file="rgManQQ_code.py"/><description>Plots for WGA P values</description> --- a/universe_wsgi.ini.sample Tue Jul 19 13:40:24 2011 -0400 +++ b/universe_wsgi.ini.sample Wed Jul 20 09:01:37 2011 -0400 @@ -3,7 +3,7 @@ # environment. To tune the application for a multi-user production # environment, see the documentation at: # -# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ProductionServer +# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Production%20Server # # Throughout this sample configuration file, except where stated otherwise, @@ -129,7 +129,7 @@ # Directory where data used by tools is located, see the samples in that # directory and the wiki for help: -# http://bitbucket.org/galaxy/galaxy-central/wiki/DataIntegration +# http://wiki.g2.bx.psu.edu/Admin/Data%20Integration #tool_data_path = tool-data # Directory where chrom len files are kept, currently mainly used by trackster @@ -208,13 +208,13 @@ #logo_url = / # The URL linked by the "Galaxy Wiki" link in the "Help" menu. -#wiki_url = http://bitbucket.org/galaxy/galaxy-central/wiki +#wiki_url = http://wiki.g2.bx.psu.edu/ # The URL linked by the "Email comments..." link in the "Help" menu. #bugs_email = mailto:galaxy-bugs@bx.psu.edu # The URL linked by the "How to Cite..." link in the "Help" menu. -#citation_url = http://bitbucket.org/galaxy/galaxy-central/wiki/Citations +#citation_url = http://wiki.g2.bx.psu.edu/Citing%20Galaxy # Serve static content, which must be enabled if you're not serving it via a # proxy server. These options should be self explanatory and so are not @@ -314,7 +314,7 @@ # -- Data Libraries # These library upload options are described in much more detail in the wiki: -# http://bitbucket.org/galaxy/galaxy-central/wiki/DataLibraries/UploadingFiles +# http://wiki.g2.bx.psu.edu/Admin/Data%20Libraries/Uploading%20Library%20Files # Add an option to the library upload form which allows administrators to # upload a directory of files. @@ -372,7 +372,7 @@ # User authentication can be delegated to an upstream proxy server (usually # Apache). The upstream proxy should set a REMOTE_USER header in the request. # Enabling remote user disables regular logins. For more information, see: -# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ApacheProxy +# http://wiki.g2.bx.psu.edu/Admin/Config/Apache%20Proxy #use_remote_user = False # If use_remote_user is enabled and your external authentication @@ -388,7 +388,7 @@ # users (email addresses). These users will have access to the Admin section # of the server, and will have access to create users, groups, roles, # libraries, and more. For more information, see: -# http://bitbucket.org/galaxy/galaxy-central/wiki/Admin/AdminInterface +# http://wiki.g2.bx.psu.edu/Admin/Interface #admin_users = None # Force everyone to log in (disable anonymous access). @@ -454,7 +454,7 @@ # If running multiple Galaxy processes, one can be designated as the job # runner. For more information, see: -# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/WebApplicationScaling +# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Web%20Application%20Scali... #enable_job_running = True # Should jobs be tracked through the database, rather than in memory. @@ -505,7 +505,7 @@ # Clustering Galaxy is not a straightforward process and requires some # pre-configuration. See the the wiki before attempting to set any of these # options: -# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/Cluster +# http://wiki.g2.bx.psu.edu/Admin/Config/Performance/Cluster # Comma-separated list of job runners to start. local is always started. If # left commented, no jobs will be run on the cluster, even if a cluster URL is Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.