galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
August 2009
- 12 participants
- 156 discussions
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/36c479b93d7e
changeset: 2609:36c479b93d7e
user: James Taylor <james(a)jamestaylor.org>
date: Sun Aug 23 12:28:36 2009 -0400
description:
Fix security problem with grids. Template should not be passed at call time, it must be passed at configure time.
6 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
lib/galaxy/web/controllers/requests.py
lib/galaxy/web/controllers/requests_admin.py
lib/galaxy/web/framework/helpers/grids.py
manage_db.sh
templates/grid.mako
diffs (325 lines):
diff -r 19b86ccccf6f -r 36c479b93d7e lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Sun Aug 23 12:26:46 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Sun Aug 23 12:28:36 2009 -0400
@@ -39,6 +39,7 @@
# Grid definition
title = "Stored histories"
model_class = model.History
+ template='/history/grid.mako'
default_sort_key = "-create_time"
columns = [
grids.GridColumn( "Name", key="name",
@@ -86,6 +87,7 @@
return history.user.email
# Grid definition
title = "Histories shared with you by others"
+ template='/history/grid.mako'
model_class = model.History
default_sort_key = "-update_time"
columns = [
@@ -161,7 +163,7 @@
status, message = self._list_undelete( trans, histories )
trans.sa_session.flush()
# Render the list view
- return self.stored_list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
+ return self.stored_list_grid( trans, status=status, message=message, **kwargs )
def _list_delete( self, trans, histories ):
"""Delete histories"""
n_deleted = 0
@@ -240,14 +242,14 @@
if operation == "clone":
if not id:
message = "Select a history to clone"
- return self.shared_list_grid( trans, status='error', message=message, template='/history/grid.mako', **kwargs )
+ return self.shared_list_grid( trans, status='error', message=message, **kwargs )
# When cloning shared histories, only copy active datasets
new_kwargs = { 'clone_choice' : 'active' }
return self.clone( trans, id, **new_kwargs )
elif operation == 'unshare':
if not id:
message = "Select a history to unshare"
- return self.shared_list_grid( trans, status='error', message=message, template='/history/grid.mako', **kwargs )
+ return self.shared_list_grid( trans, status='error', message=message, **kwargs )
ids = util.listify( id )
histories = []
for history_id in ids:
@@ -261,7 +263,7 @@
message = "Unshared %d shared histories" % len( ids )
status = 'done'
# Render the list view
- return self.shared_list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
+ return self.shared_list_grid( trans, status=status, message=message, **kwargs )
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
diff -r 19b86ccccf6f -r 36c479b93d7e lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Sun Aug 23 12:26:46 2009 -0400
+++ b/lib/galaxy/web/controllers/requests.py Sun Aug 23 12:28:36 2009 -0400
@@ -16,6 +16,7 @@
class RequestsListGrid( grids.Grid ):
title = "Sequencing Requests"
+ template = '/requests/grid.mako'
model_class = model.Request
default_sort_key = "-create_time"
show_filter = model.Request.states.UNSUBMITTED
@@ -103,7 +104,7 @@
self.request_grid.default_filter = dict(state=kwargs['show_filter'], deleted=False)
self.request_grid.show_filter = kwargs.get('show_filter', trans.app.model.Request.states.UNSUBMITTED)
# Render the list view
- return self.request_grid( trans, template='/requests/grid.mako', **kwargs )
+ return self.request_grid( trans, **kwargs )
def __show_request(self, trans, id, add_sample=False):
try:
diff -r 19b86ccccf6f -r 36c479b93d7e lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Sun Aug 23 12:26:46 2009 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Sun Aug 23 12:28:36 2009 -0400
@@ -14,6 +14,7 @@
class RequestsListGrid( grids.Grid ):
title = "Sequencing Requests"
+ template = "admin/requests/grid.mako"
model_class = model.Request
default_sort_key = "-create_time"
show_filter = model.Request.states.SUBMITTED
@@ -101,7 +102,7 @@
self.request_grid.default_filter = dict(state=kwargs['show_filter'], deleted=False)
self.request_grid.show_filter = kwargs.get('show_filter', trans.app.model.Request.states.SUBMITTED)
# Render the list view
- return self.request_grid( trans, template='/admin/requests/grid.mako', **kwargs )
+ return self.request_grid( trans, **kwargs )
@web.expose
@web.require_admin
def edit(self, trans, **kwd):
diff -r 19b86ccccf6f -r 36c479b93d7e lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Sun Aug 23 12:26:46 2009 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Sun Aug 23 12:28:36 2009 -0400
@@ -14,8 +14,9 @@
title = ""
exposed = True
model_class = None
- template = None
+ template = "grid.mako"
columns = []
+ operations = []
standard_filters = []
default_filter = None
default_sort_key = None
@@ -25,7 +26,6 @@
def __call__( self, trans, **kwargs ):
status = kwargs.get( 'status', None )
message = kwargs.get( 'message', None )
- template = kwargs.get( 'template', None )
session = trans.sa_session
# Build initial query
query = self.build_initial_query( session )
@@ -77,7 +77,7 @@
else:
new_kwargs[ 'id' ] = trans.security.encode_id( id )
return url_for( **new_kwargs )
- return trans.fill_template( template,
+ return trans.fill_template( self.template,
grid=self,
query=query,
sort_key=sort_key,
diff -r 19b86ccccf6f -r 36c479b93d7e templates/grid.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/grid.mako Sun Aug 23 12:28:36 2009 -0400
@@ -0,0 +1,196 @@
+<%inherit file="/base.mako"/>
+<%def name="title()">${grid.title}</%def>
+
+%if message:
+ <p>
+ <div class="${message_type}message transient-message">${message}</div>
+ <div style="clear: both"></div>
+ </p>
+%endif
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ ## TODO: generalize and move into galaxy.base.js
+ $(document).ready(function() {
+ $(".grid").each( function() {
+ var grid = this;
+ var checkboxes = $(this).find("input.grid-row-select-checkbox");
+ var update = $(this).find( "span.grid-selected-count" );
+ $(checkboxes).each( function() {
+ $(this).change( function() {
+ var n = $(checkboxes).filter("[checked]").size();
+ update.text( n );
+ });
+ })
+ });
+ });
+ ## Can this be moved into base.mako?
+ %if refresh_frames:
+ %if 'masthead' in refresh_frames:
+ ## Refresh masthead == user changes (backward compatibility)
+ if ( parent.user_changed ) {
+ %if trans.user:
+ parent.user_changed( "${trans.user.email}", ${int( app.config.is_admin_user( trans.user ) )} );
+ %else:
+ parent.user_changed( null, false );
+ %endif
+ }
+ %endif
+ %if 'history' in refresh_frames:
+ if ( parent.frames && parent.frames.galaxy_history ) {
+ parent.frames.galaxy_history.location.href="${h.url_for( controller='root', action='history')}";
+ if ( parent.force_right_panel ) {
+ parent.force_right_panel( 'show' );
+ }
+ }
+ %endif
+ %if 'tools' in refresh_frames:
+ if ( parent.frames && parent.frames.galaxy_tools ) {
+ parent.frames.galaxy_tools.location.href="${h.url_for( controller='root', action='tool_menu')}";
+ if ( parent.force_left_panel ) {
+ parent.force_left_panel( 'show' );
+ }
+ }
+ %endif
+ %endif
+ </script>
+</%def>
+
+<%def name="stylesheets()">
+ <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
+ <style>
+ ## Not generic to all grids -- move to base?
+ .count-box {
+ min-width: 1.1em;
+ padding: 5px;
+ border-width: 1px;
+ border-style: solid;
+ text-align: center;
+ display: inline-block;
+ }
+ </style>
+</%def>
+
+<div class="grid-header">
+ <h2>${grid.title}</h2>
+ %if grid.standard_filters:
+ <span class="title">Filter:</span>
+ %for i, filter in enumerate( grid.standard_filters ):
+ %if i > 0:
+ <span>|</span>
+ %endif
+ <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
+ %endfor
+ %endif
+</div>
+
+
+<form action="${url()}" method="post" >
+ <table class="grid">
+ <thead>
+ <tr>
+ <th></th>
+ %for column in grid.columns:
+ %if column.visible:
+ <%
+ href = ""
+ extra = ""
+ if column.sortable:
+ if sort_key == column.key:
+ if sort_order == "asc":
+ href = url( sort=( "-" + column.key ) )
+ extra = "↓"
+ else:
+ href = url( sort=( column.key ) )
+ extra = "↑"
+ else:
+ href = url( sort=column.key )
+ %>
+ <th\
+ %if column.ncells > 1:
+ colspan="${column.ncells}"
+ %endif
+ >
+ %if href:
+ <a href="${href}">${column.label}</a>
+ %else:
+ ${column.label}
+ %endif
+ <span>${extra}</span>
+ </th>
+ %endif
+ %endfor
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
+ %for i, item in enumerate( query ):
+ <tr \
+ %if current_item == item:
+ class="current" \
+ %endif
+ >
+ ## Item selection column
+ <td style="width: 1.5em;">
+ <input type="checkbox" name="id" value=${trans.security.encode_id( item.id )} class="grid-row-select-checkbox" />
+ </td>
+ ## Data columns
+ %for column in grid.columns:
+ %if column.visible:
+ <%
+ # Link
+ link = column.get_link( trans, grid, item )
+ if link:
+ href = url( **link )
+ else:
+ href = None
+ # Value (coerced to list so we can loop)
+ value = column.get_value( trans, grid, item )
+ if column.ncells == 1:
+ value = [ value ]
+ %>
+ %for cellnum, v in enumerate( value ):
+ <%
+ # Attach popup menu?
+ if column.attach_popup and cellnum == 0:
+ extra = '<a id="grid-%d-popup" class="arrow" style="display: none;"><span>▼</span></a>' % i
+ else:
+ extra = ""
+ %>
+ %if href:
+ <td><div class="menubutton split"><a class="label" href="${href}">${v}${extra}</a></td>
+ %else:
+ <td >${v}${extra}</td>
+ %endif
+ %endfor
+ %endif
+ %endfor
+ ## Actions column
+ <td>
+ <div popupmenu="grid-${i}-popup">
+ %for operation in grid.operations:
+ %if operation.allowed( item ):
+ <a class="action-button" href="${url( operation=operation.label, id=item.id )}">${operation.label}</a>
+ %endif
+ %endfor
+ </div>
+ </td>
+ </tr>
+ %endfor
+ </tbody>
+ <tfoot>
+ <tr>
+ <td></td>
+ <td colspan="100">
+ For <span class="grid-selected-count"></span> selected items:
+ %for operation in grid.operations:
+ %if operation.allow_multiple:
+ <input type="submit" name="operation" value="${operation.label}" class="action-button">
+ %endif
+ %endfor
+ </td>
+ </tr>
+ </tfoot>
+ </table>
+</form>
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/3e9b7ec9a305
changeset: 2610:3e9b7ec9a305
user: James Taylor <james(a)jamestaylor.org>
date: Sun Aug 23 12:31:05 2009 -0400
description:
Backed out changeset 19b86ccccf6f (not quite ready yet)
5 file(s) affected in this change:
lib/galaxy/model/__init__.py
lib/galaxy/model/mapping.py
lib/galaxy/model/migrate/versions/0014_pages.py
lib/galaxy/web/controllers/user.py
templates/user/index.mako
diffs (220 lines):
diff -r 19b86ccccf6f -r 3e9b7ec9a305 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Sun Aug 23 12:26:46 2009 -0400
+++ b/lib/galaxy/model/__init__.py Sun Aug 23 12:31:05 2009 -0400
@@ -33,7 +33,6 @@
self.external = False
self.deleted = False
self.purged = False
- self.username = None
# Relationships
self.histories = []
@@ -1119,20 +1118,7 @@
self.country+'<br/>'+ \
'Phone: '+self.phone
-class Page( object ):
- def __init__( self ):
- self.id = None
- self.user = None
- self.title = None
- self.slug = None
- self.latest_revision_id = None
- self.revisions = []
-class PageRevision( object ):
- def __init__( self ):
- self.user = None
- self.title = None
- self.content = None
diff -r 19b86ccccf6f -r 3e9b7ec9a305 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Sun Aug 23 12:26:46 2009 -0400
+++ b/lib/galaxy/model/mapping.py Sun Aug 23 12:31:05 2009 -0400
@@ -42,7 +42,6 @@
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "email", TrimmedString( 255 ), nullable=False ),
- Column( "username", TrimmedString( 255 ), index=True, unique=True ),
Column( "password", TrimmedString( 40 ), nullable=False ),
Column( "external", Boolean, default=False ),
Column( "deleted", Boolean, index=True, default=False ),
@@ -524,26 +523,6 @@
Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
Column( "comment", TEXT ) )
-Page.table = Table( "page", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
- Column( "latest_revision_id", Integer,
- ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
- Column( "title", TEXT ),
- Column( "slug", TEXT, unique=True, index=True ),
- )
-
-PageRevision.table = Table( "page_revision", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
- Column( "title", TEXT ),
- Column( "content", TEXT )
- )
-
# With the tables defined we can define the mappers and setup the
# relationships between the model objects.
@@ -926,18 +905,6 @@
assign_mapper( context, MetadataFile, MetadataFile.table,
properties=dict( history_dataset=relation( HistoryDatasetAssociation ), library_dataset=relation( LibraryDatasetDatasetAssociation ) ) )
-assign_mapper( context, PageRevision, PageRevision.table )
-
-assign_mapper( context, Page, Page.table,
- properties=dict( user=relation( User ),
- revisions=relation( PageRevision, backref='page',
- cascade="all, delete-orphan",
- primaryjoin=( Page.table.c.id == PageRevision.table.c.page_id ) ),
- latest_revision=relation( PageRevision, post_update=True,
- primaryjoin=( Page.table.c.latest_revision_id == PageRevision.table.c.id ),
- lazy=False )
- ) )
-
def db_next_hid( self ):
"""
Override __next_hid to generate from the database in a concurrency
diff -r 19b86ccccf6f -r 3e9b7ec9a305 lib/galaxy/model/migrate/versions/0014_pages.py
--- a/lib/galaxy/model/migrate/versions/0014_pages.py Sun Aug 23 12:26:46 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,56 +0,0 @@
-from sqlalchemy import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-
-import logging
-log = logging.getLogger( __name__ )
-
-metadata = MetaData( migrate_engine )
-
-Page_table = Table( "page", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
- Column( "latest_revision_id", Integer,
- ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
- Column( "title", TEXT ),
- Column( "slug", TEXT, unique=True, index=True ),
- )
-
-PageRevision_table = Table( "page_revision", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
- Column( "title", TEXT ),
- Column( "content", TEXT )
- )
-
-def upgrade():
- metadata.reflect()
- try:
- Page_table.create()
- except:
- log.debug( "Could not create page table" )
- try:
- PageRevision_table.create()
- except:
- log.debug( "Could not create page_revision table" )
-
- # Add 1 column to the user table
- User_table = Table( "galaxy_user", metadata, autoload=True )
- col = Column( 'username', String(255), index=True, unique=True, default=False )
- print type( col ), col
- col.create( User_table )
- assert col is User_table.c.username
-
-def downgrade():
- metadata.reflect()
- Page_table.drop()
- PageRevision_table.drop()
- User_table = Table( "galaxy_user", metadata, autoload=True )
- User_table.c.username.drop()
\ No newline at end of file
diff -r 19b86ccccf6f -r 3e9b7ec9a305 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Sun Aug 23 12:26:46 2009 -0400
+++ b/lib/galaxy/web/controllers/user.py Sun Aug 23 12:31:05 2009 -0400
@@ -4,7 +4,7 @@
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy import util
-import logging, os, string, re
+import logging, os, string
from random import choice
log = logging.getLogger( __name__ )
@@ -19,8 +19,6 @@
"""
require_login_nocreation_template = require_login_template % ""
require_login_creation_template = require_login_template % " If you don't already have an account, <a href='%s'>you may create one</a>."
-
-VALID_USERNAME_RE = re.compile( "^[a-zA-Z0-9\-\_]+$" )
class User( BaseController ):
edit_address_id = None
@@ -80,37 +78,6 @@
.add_text( "email", "Email", value=email, error=email_err )
.add_text( "conf_email", "Confirm Email", value='', error=conf_email_err )
.add_password( "password", "Password", value='', error=pass_err ) )
-
- @web.expose
- def change_username(self, trans, username='', **kwd):
- username_err = ''
- user = trans.get_user()
- if not user:
- trans.response.send_redirect( web.url_for( action='login' ) )
- if trans.request.method == "POST":
- if len( username ) < 4:
- username_err = "Username must be at least 4 characters in length"
- elif len( username ) > 255:
- username_err = "USername must be at most 255 characters in length"
- elif not( VALID_USERNAME_RE.match( username ) ):
- username_err = "Username must contain only letters, numbers, '-', and '_'"
- elif trans.app.model.User.filter_by( username=username ).first():
- username_err = "This username is not available"
- else:
- user.username = username
- user.flush()
- trans.log_event( "User change username" )
- return trans.show_ok_message( "Username been set to: " + user.username )
- else:
- username = user.username or ''
- return trans.show_form(
- web.FormBuilder( web.url_for(), "Change username", submit_text="Submit" )
- .add_text( "username", "Username", value=username, error=username_err,
- help="""Your username is an optional identifier that
- will be used to generate adresses for information
- you share publicly. Usernames must be at least
- four characters in length and contain only letters,
- numbers, and the '-' and '_' characters""" ) )
@web.expose
def login( self, trans, email='', password='' ):
diff -r 19b86ccccf6f -r 3e9b7ec9a305 templates/user/index.mako
--- a/templates/user/index.mako Sun Aug 23 12:26:46 2009 -0400
+++ b/templates/user/index.mako Sun Aug 23 12:31:05 2009 -0400
@@ -9,7 +9,6 @@
<ul>
<li><a href="${h.url_for( action='change_password' )}">${_('Change your password')}</a></li>
<li><a href="${h.url_for( action='change_email' )}">${_('Update your email address')}</a></li>
- <li><a href="${h.url_for( action='change_username' )}">${_('Change your public username')}</a></li>
<li><a href="${h.url_for( action='set_default_permissions' )}">${_('Change default permissions')}</a> for new histories</li>
<li><a href="${h.url_for( action='manage_addresses' )}">${_('Manage your addresses')}</a></li>
<li><a href="${h.url_for( action='logout' )}">${_('Logout')}</a></li>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/52a145b22b69
changeset: 2611:52a145b22b69
user: James Taylor <james(a)jamestaylor.org>
date: Sun Aug 23 12:31:20 2009 -0400
description:
Merge
1 file(s) affected in this change:
lib/galaxy/model/migrate/versions/0014_pages.py
diffs (220 lines):
diff -r 36c479b93d7e -r 52a145b22b69 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Sun Aug 23 12:28:36 2009 -0400
+++ b/lib/galaxy/model/__init__.py Sun Aug 23 12:31:20 2009 -0400
@@ -33,7 +33,6 @@
self.external = False
self.deleted = False
self.purged = False
- self.username = None
# Relationships
self.histories = []
@@ -1119,20 +1118,7 @@
self.country+'<br/>'+ \
'Phone: '+self.phone
-class Page( object ):
- def __init__( self ):
- self.id = None
- self.user = None
- self.title = None
- self.slug = None
- self.latest_revision_id = None
- self.revisions = []
-class PageRevision( object ):
- def __init__( self ):
- self.user = None
- self.title = None
- self.content = None
diff -r 36c479b93d7e -r 52a145b22b69 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Sun Aug 23 12:28:36 2009 -0400
+++ b/lib/galaxy/model/mapping.py Sun Aug 23 12:31:20 2009 -0400
@@ -42,7 +42,6 @@
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "email", TrimmedString( 255 ), nullable=False ),
- Column( "username", TrimmedString( 255 ), index=True, unique=True ),
Column( "password", TrimmedString( 40 ), nullable=False ),
Column( "external", Boolean, default=False ),
Column( "deleted", Boolean, index=True, default=False ),
@@ -524,26 +523,6 @@
Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
Column( "comment", TEXT ) )
-Page.table = Table( "page", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
- Column( "latest_revision_id", Integer,
- ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
- Column( "title", TEXT ),
- Column( "slug", TEXT, unique=True, index=True ),
- )
-
-PageRevision.table = Table( "page_revision", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
- Column( "title", TEXT ),
- Column( "content", TEXT )
- )
-
# With the tables defined we can define the mappers and setup the
# relationships between the model objects.
@@ -926,18 +905,6 @@
assign_mapper( context, MetadataFile, MetadataFile.table,
properties=dict( history_dataset=relation( HistoryDatasetAssociation ), library_dataset=relation( LibraryDatasetDatasetAssociation ) ) )
-assign_mapper( context, PageRevision, PageRevision.table )
-
-assign_mapper( context, Page, Page.table,
- properties=dict( user=relation( User ),
- revisions=relation( PageRevision, backref='page',
- cascade="all, delete-orphan",
- primaryjoin=( Page.table.c.id == PageRevision.table.c.page_id ) ),
- latest_revision=relation( PageRevision, post_update=True,
- primaryjoin=( Page.table.c.latest_revision_id == PageRevision.table.c.id ),
- lazy=False )
- ) )
-
def db_next_hid( self ):
"""
Override __next_hid to generate from the database in a concurrency
diff -r 36c479b93d7e -r 52a145b22b69 lib/galaxy/model/migrate/versions/0014_pages.py
--- a/lib/galaxy/model/migrate/versions/0014_pages.py Sun Aug 23 12:28:36 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,56 +0,0 @@
-from sqlalchemy import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-
-import logging
-log = logging.getLogger( __name__ )
-
-metadata = MetaData( migrate_engine )
-
-Page_table = Table( "page", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
- Column( "latest_revision_id", Integer,
- ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
- Column( "title", TEXT ),
- Column( "slug", TEXT, unique=True, index=True ),
- )
-
-PageRevision_table = Table( "page_revision", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
- Column( "title", TEXT ),
- Column( "content", TEXT )
- )
-
-def upgrade():
- metadata.reflect()
- try:
- Page_table.create()
- except:
- log.debug( "Could not create page table" )
- try:
- PageRevision_table.create()
- except:
- log.debug( "Could not create page_revision table" )
-
- # Add 1 column to the user table
- User_table = Table( "galaxy_user", metadata, autoload=True )
- col = Column( 'username', String(255), index=True, unique=True, default=False )
- print type( col ), col
- col.create( User_table )
- assert col is User_table.c.username
-
-def downgrade():
- metadata.reflect()
- Page_table.drop()
- PageRevision_table.drop()
- User_table = Table( "galaxy_user", metadata, autoload=True )
- User_table.c.username.drop()
\ No newline at end of file
diff -r 36c479b93d7e -r 52a145b22b69 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Sun Aug 23 12:28:36 2009 -0400
+++ b/lib/galaxy/web/controllers/user.py Sun Aug 23 12:31:20 2009 -0400
@@ -4,7 +4,7 @@
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy import util
-import logging, os, string, re
+import logging, os, string
from random import choice
log = logging.getLogger( __name__ )
@@ -19,8 +19,6 @@
"""
require_login_nocreation_template = require_login_template % ""
require_login_creation_template = require_login_template % " If you don't already have an account, <a href='%s'>you may create one</a>."
-
-VALID_USERNAME_RE = re.compile( "^[a-zA-Z0-9\-\_]+$" )
class User( BaseController ):
edit_address_id = None
@@ -80,37 +78,6 @@
.add_text( "email", "Email", value=email, error=email_err )
.add_text( "conf_email", "Confirm Email", value='', error=conf_email_err )
.add_password( "password", "Password", value='', error=pass_err ) )
-
- @web.expose
- def change_username(self, trans, username='', **kwd):
- username_err = ''
- user = trans.get_user()
- if not user:
- trans.response.send_redirect( web.url_for( action='login' ) )
- if trans.request.method == "POST":
- if len( username ) < 4:
- username_err = "Username must be at least 4 characters in length"
- elif len( username ) > 255:
- username_err = "USername must be at most 255 characters in length"
- elif not( VALID_USERNAME_RE.match( username ) ):
- username_err = "Username must contain only letters, numbers, '-', and '_'"
- elif trans.app.model.User.filter_by( username=username ).first():
- username_err = "This username is not available"
- else:
- user.username = username
- user.flush()
- trans.log_event( "User change username" )
- return trans.show_ok_message( "Username been set to: " + user.username )
- else:
- username = user.username or ''
- return trans.show_form(
- web.FormBuilder( web.url_for(), "Change username", submit_text="Submit" )
- .add_text( "username", "Username", value=username, error=username_err,
- help="""Your username is an optional identifier that
- will be used to generate adresses for information
- you share publicly. Usernames must be at least
- four characters in length and contain only letters,
- numbers, and the '-' and '_' characters""" ) )
@web.expose
def login( self, trans, email='', password='' ):
diff -r 36c479b93d7e -r 52a145b22b69 templates/user/index.mako
--- a/templates/user/index.mako Sun Aug 23 12:28:36 2009 -0400
+++ b/templates/user/index.mako Sun Aug 23 12:31:20 2009 -0400
@@ -9,7 +9,6 @@
<ul>
<li><a href="${h.url_for( action='change_password' )}">${_('Change your password')}</a></li>
<li><a href="${h.url_for( action='change_email' )}">${_('Update your email address')}</a></li>
- <li><a href="${h.url_for( action='change_username' )}">${_('Change your public username')}</a></li>
<li><a href="${h.url_for( action='set_default_permissions' )}">${_('Change default permissions')}</a> for new histories</li>
<li><a href="${h.url_for( action='manage_addresses' )}">${_('Manage your addresses')}</a></li>
<li><a href="${h.url_for( action='logout' )}">${_('Logout')}</a></li>
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/fbad627b45ac
changeset: 2605:fbad627b45ac
user: James Taylor <james(a)jamestaylor.org>
date: Sat Aug 22 23:18:47 2009 -0400
description:
Fixing styles for history rename and share interfaces
6 file(s) affected in this change:
static/june_2007_style/base.css.tmpl
static/june_2007_style/blue/base.css
static/scripts/packed/galaxy.base.js
templates/history/rename.mako
templates/history/share.mako
templates/root/index.mako
diffs (390 lines):
diff -r abdb523727ed -r fbad627b45ac static/june_2007_style/base.css.tmpl
--- a/static/june_2007_style/base.css.tmpl Thu Aug 06 10:29:51 2009 -0400
+++ b/static/june_2007_style/base.css.tmpl Sat Aug 22 23:18:47 2009 -0400
@@ -3,6 +3,7 @@
body{font-size:75%;}
## Mixins
+
.unselectable {
user-select: none;
-moz-user-select: none;
@@ -15,8 +16,8 @@
}
## Real styles
-body
-{
+
+body {
background: $base_bg_bottom;
color: $base_text;
background-image: url(base_bg.png);
@@ -25,60 +26,55 @@
margin: 10px;
}
-img
-{
+img {
border: 0;
}
-a:link, a:visited, a:active
-{
+a:link, a:visited, a:active {
color: $link_text;
}
-h1, h2, h3, h4
-{
+h1, h2, h3, h4 {
color: $header_text;
/*text-shadow: #bbb 2px 2px 1px;*/
}
-hr
-{
+hr {
border: none;
height: 0px;
border-bottom: dotted $base_text 1px;
}
-div.toolForm
-{
- border: solid $form_border 1px;
+th {
+ text-align: left;
}
-div.toolFormTitle
-{
- font-weight: bold;
- padding: 5px;
- padding-left: 10px;
- padding-right: 10px;
- background: $form_title_bg_bottom;
- ## background-image: url(form_title_bg.png);
- background-repeat: repeat-x;
- background-position: top;
- border-bottom: solid $form_border 1px;
+div.toolForm {
+ border: solid $form_border 1px;
}
-div.toolParamHelp
-{
- color: #666;
+div.toolFormTitle {
+ font-weight: bold;
+ padding: 5px;
+ padding-left: 10px;
+ padding-right: 10px;
+ background: $form_title_bg_bottom;
+ ## background-image: url(form_title_bg.png);
+ background-repeat: repeat-x;
+ background-position: top;
+ border-bottom: solid $form_border 1px;
}
-div.toolParamHelp a
-{
- color: #666;
+div.toolParamHelp {
+ color: #666;
}
-div.toolFormBody
-{
+div.toolParamHelp a {
+ color: #666;
+}
+
+div.toolFormBody {
background: $form_body_bg_bottom;
background-image: url(form_body_bg.png);
background-repeat: repeat-x;
@@ -86,8 +82,7 @@
padding: 5px 0;
}
-div.toolFormBody div.toolFormTitle
-{
+div.toolFormBody div.toolFormTitle {
background: transparent;
border: none;
font-weight: bold;
@@ -104,12 +99,10 @@
border-color: ${layout_border};
}
-div.toolHelp
-{
+div.toolHelp {
}
-div.toolHelpBody
-{
+div.toolHelpBody {
width: 100%;
overflow: auto;
}
@@ -121,15 +114,13 @@
padding-bottom: 0.25em;
}
-/* Forms */
+## Forms
-div.form
-{
+div.form {
border: solid $form_border 1px;
}
-div.form-title
-{
+div.form-title {
font-weight: bold;
padding: 5px 10px;
background: $form_title_bg_bottom;
@@ -139,53 +130,44 @@
border-bottom: solid $form_border 1px;
}
-div.form-body
-{
+div.form-body {
padding: 5px 0;
}
-div.form-row
-{
+div.form-row {
padding: 5px 10px;
}
-div.form-title-row
-{
+div.form-title-row {
padding: 5px 10px;
}
-div.repeat-group-item
-{
+div.repeat-group-item {
border-left: solid $form_border 5px;
margin-left: 10px;
margin-bottom: 10px;
}
-div.form-row-error
-{
+div.form-row-error {
background: $error_message_bg;
}
-div.form-row label
-{
+div.form-row label {
font-weight: bold;
display: block;
margin-bottom: .2em;
}
-div.form-row-input
-{
+div.form-row-input {
float: left;
width: 300px;
}
-div.form-row-input > input
-{
+div.form-row-input > input {
max-width: 300px;
}
-div.form-row-error-message
-{
+div.form-row-error-message {
width: 300px;
float: left;
color: red;
@@ -193,19 +175,17 @@
padding: 3px 0 0 1em;
}
-select, input, textarea
-{
+select, input, textarea {
font: inherit;
font-size: 115%;
}
-select, textarea, input[type="text"], input[type="file"], input[type="password"]
-{
+select, textarea, input[type="text"], input[type="file"], input[type="password"] {
-webkit-box-sizing: border-box;
max-width: 300px;
}
-/* Messages */
+## Messages
.errormessage, .warningmessage, .donemessage, .infomessage, .welcomeBlue, .welcomeRed , .screencastBox, .yellowbox, .redbox, .bluebox, .greenbox
{
diff -r abdb523727ed -r fbad627b45ac static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css Thu Aug 06 10:29:51 2009 -0400
+++ b/static/june_2007_style/blue/base.css Sat Aug 22 23:18:47 2009 -0400
@@ -10,6 +10,7 @@
a:link,a:visited,a:active{color:#303030;}
h1,h2,h3,h4{color:#023858;}
hr{border:none;height:0px;border-bottom:dotted #303030 1px;}
+th{text-align:left;}
div.toolForm{border:solid #d8b365 1px;}
div.toolFormTitle{font-weight:bold;padding:5px;padding-left:10px;padding-right:10px;background:#ebd9b2;background-repeat:repeat-x;background-position:top;border-bottom:solid #d8b365 1px;}
div.toolParamHelp{color:#666;}
diff -r abdb523727ed -r fbad627b45ac static/scripts/packed/galaxy.base.js
--- a/static/scripts/packed/galaxy.base.js Thu Aug 06 10:29:51 2009 -0400
+++ b/static/scripts/packed/galaxy.base.js Sat Aug 22 23:18:47 2009 -0400
@@ -1,1 +1,1 @@
-$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};jQuery(document).ready(function(){jQuery("a[confirm]").click(function(){return confirm(jQuery(this).attr("confirm"))});jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.show()})});function ensure_popup_helper(){if($("#popup-helper").length==0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function make_popupmenu(d,b){ens
ure_popup_helper();$(d).css("position","relative");var a=$("<div class='popupmenu' id='"+d.attr("id")+"-menu'></div>").css("position","absolute").appendTo(d);$.each(b,function(g,f){$("<div class='popupmenu-item' />").html(g).click(f).appendTo(a)});var c=function(){$(a).unbind().hide();$(document).unbind("click.popupmenu")};var e=function(f){var g=$(d).offset();$(document).bind("click.popupmenu",c);$(a).click(c).css({left:0,top:-1000}).show();$(a).css({top:0,left:0});return false};$(d).click(e)};
\ No newline at end of file
+$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};jQuery(document).ready(function(){jQuery("a[confirm]").click(function(){return confirm(jQuery(this).attr("confirm"))});jQuery("div[popupmenu]").each(function(){var c={};$(this).find("a").each(function(){var b=$(this).attr("confirm"),d=$(this).attr("href"),e=$(this).attr("target");c[$(this).text()]=function(){if(!b||confirm(b)){var g=window;if(e=="_parent"){g=window.parent}g.location=d}}});var a=$("#"+$(this).attr("popupmenu"));make_popupmenu(a,c);$(this).remove();a.show()})});function ensure_popup_helper(){if($("#popup-helper").length==0){$("<div id='popup-helper'/>").css({background:"white",opacity:0,zIndex:15000,position:"absolute",top:0,left:0,width:"100%",height:"100%"}).appendTo("body").hide()}}function make_popupmenu(d,c){ens
ure_popup_helper();var a=$(d);var b=$("<ul id='"+d.attr("id")+"-menu'></div>");$.each(c,function(g,f){if(f){$("<li/>").html(g).click(f).appendTo(b)}else{$("<li class='head'/>").html(g).appendTo(b)}});var e=$("<div class='popmenu-wrapper'>");e.append(b).append("<div class='overlay-border'>").css("position","absolute").appendTo("body").hide();attach_popupmenu(d,e)}function attach_popupmenu(b,d){console.log(b,d);var a=function(){d.unbind().hide();$("#popup-helper").unbind("click.popupmenu").hide()};var c=function(g){console.log(g);var h=$(b).offset();$("#popup-helper").bind("click.popupmenu",a).show();d.click(a).css({left:0,top:-1000}).show();var f=g.pageX-d.width()/2;f=Math.min(f,$(document).scrollLeft()+$(window).width()-$(d).width()-20);f=Math.max(f,$(document).scrollLeft()+20);d.css({top:g.pageY-5,left:f});return false};$(b).click(c)};
\ No newline at end of file
diff -r abdb523727ed -r fbad627b45ac templates/history/rename.mako
--- a/templates/history/rename.mako Thu Aug 06 10:29:51 2009 -0400
+++ b/templates/history/rename.mako Sat Aug 22 23:18:47 2009 -0400
@@ -3,39 +3,37 @@
<%def name="title()">${_('Rename History')}</%def>
<div class="toolForm">
- <div class="toolFormTitle">${_('Rename History')}</div>
+ <div class="toolFormTitle">${_('Rename')}</div>
<div class="toolFormBody">
<form action="${h.url_for( controller='history', action='rename' )}" method="post" >
- <table class="grid">
+ <div class="form-row">
+ <table>
+ <thead>
+ <tr>
+ <th>${_('Current Name')}</th>
+ <th>${_('New Name')}</th>
+ </tr>
+ </thead>
+ <tbody>
%for history in histories:
<tr>
<td>
- <div class="form-row">
- <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
- <label>${_('Current Name')}</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- ${history.name}
- </div>
- </div>
+ <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
+ ${history.name}
</td>
<td>
- <div class="form-row">
- <label>${_('New Name')}</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- <input type="text" name="name" value="${history.name}" size="40">
- </div>
- </div>
+ <input type="text" name="name" value="${history.name}" size="40">
</td>
</tr>
%endfor
+ </tbody>
<tr>
<td colspan="2">
- <div class="form-row">
- <input type="submit" name="history_rename_btn" value="${_('Rename Histories')}">
- </div>
+ <input type="submit" name="history_rename_btn" value="${_('Rename Histories')}">
</td>
</tr>
</table>
+ </div>
</form>
</div>
</div>
diff -r abdb523727ed -r fbad627b45ac templates/history/share.mako
--- a/templates/history/share.mako Thu Aug 06 10:29:51 2009 -0400
+++ b/templates/history/share.mako Sat Aug 22 23:18:47 2009 -0400
@@ -8,32 +8,32 @@
%if not can_change and not cannot_change and not no_change_needed:
## We are sharing histories that contain only public datasets
<form name='share' id='share' action="${h.url_for( controller="history", action='share' )}" method="post" >
- %for history in histories:
- <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
- <div class="toolForm">
- <div class="form-row">
- <label>${_('History Name:')}</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- ${history.name}
- </div>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <label>${_('Number of Datasets:')}</label>
- <div style="float: left; width: 250px; margin-right: 10px;">
- %if len( history.datasets ) < 1:
- <div class="warningmark">${_('This history contains no data.')}</div>
- %else:
- ${len(history.datasets)}
- %endif
- </td>
- </div>
- </div>
- <div style="clear: both"></div>
- <p/>
- </div>
- %endfor
- <p/>
+ <div class="form-title-row"><b>Histories to be shared:</b></div>
+ <div class="form-row" style="padding-left: 2em;">
+ <table width="100%">
+ <thead>
+ <th>${_('History Name')}</th>
+ <th>${_('Number of Datasets')}</th>
+ </thead>
+ <tbody>
+ %for history in histories:
+ <tr>
+ <td>
+ <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
+ ${history.name}
+ </td>
+ <td>
+ %if len( history.datasets ) < 1:
+ <div class="warningmark">${_('This history contains no data.')}</div>
+ %else:
+ ${len(history.datasets)}
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </tbody>
+ </table>
+ </div>
<div style="clear: both"></div>
<div class="form-row">
<label>Galaxy user emails with which to share histories</label>
diff -r abdb523727ed -r fbad627b45ac templates/root/index.mako
--- a/templates/root/index.mako Thu Aug 06 10:29:51 2009 -0400
+++ b/templates/root/index.mako Sat Aug 22 23:18:47 2009 -0400
@@ -20,7 +20,7 @@
"Clone": function() {
galaxy_main.location = "${h.url_for( controller='history', action='clone')}";
},
- "Manage sharing": function() {
+ "Share": function() {
galaxy_main.location = "${h.url_for( controller='history', action='share' )}";
},
"Extract workflow": function() {
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/abdb523727ed
changeset: 2604:abdb523727ed
user: James Taylor <james(a)jamestaylor.org>
date: Thu Aug 06 10:29:51 2009 -0400
description:
Rework popup menu styles. Popups are now larger, have borders, and have better highlight styles. The history options button is now a popup menu. (Also misc style fixes and improvements).
16 file(s) affected in this change:
lib/galaxy/web/controllers/history.py
static/june_2007_style/base.css.tmpl
static/june_2007_style/blue/base.css
static/june_2007_style/blue/history-buttons.png
static/june_2007_style/blue/history-states.png
static/june_2007_style/blue/library.css
static/june_2007_style/blue/panel_layout.css
static/june_2007_style/blue_colors.ini
static/june_2007_style/panel_layout.css.tmpl
static/scripts/galaxy.base.js
templates/base_panels.mako
templates/history/clone.mako
templates/history/grid.mako
templates/root/index.mako
templates/workflow/editor.mako
templates/workflow/list.mako
diffs (688 lines):
diff -r 542471b183d7 -r abdb523727ed lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Aug 20 18:44:35 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Aug 06 10:29:51 2009 -0400
@@ -229,6 +229,7 @@
# No message
return None, None
@web.expose
+ @web.require_login( "work with shared histories" )
def list_shared( self, trans, **kwargs ):
"""List histories shared with current user by others"""
msg = util.restore_text( kwargs.get( 'msg', '' ) )
@@ -617,6 +618,7 @@
if send_to_err:
msg += send_to_err
return self.sharing( trans, histories=shared_histories, msg=msg )
+
@web.expose
@web.require_login( "share histories with other users" )
def sharing( self, trans, histories=[], id=None, **kwd ):
@@ -644,6 +646,7 @@
shared_msg = "History (%s) now shared with: %d users. " % ( history.name, len( history.users_shared_with ) )
msg = '%s%s' % ( shared_msg, msg )
return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='done' )
+
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
@@ -681,19 +684,26 @@
else:
change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>"
return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
+
@web.expose
@web.require_login( "clone shared Galaxy history" )
- def clone( self, trans, id, **kwd ):
+ def clone( self, trans, id=None, **kwd ):
"""Clone a list of histories"""
params = util.Params( kwd )
- ids = util.listify( id )
- histories = []
- for history_id in ids:
- history = get_history( trans, history_id, check_ownership=False )
- histories.append( history )
+ # If clone_choice was not specified, display form passing along id
+ # argument
clone_choice = params.get( 'clone_choice', None )
if not clone_choice:
- return trans.fill_template( "/history/clone.mako", history=history )
+ return trans.fill_template( "/history/clone.mako", id_argument=id )
+ # Extract histories for id argument, defaulting to current
+ if id is None:
+ histories = [ trans.history ]
+ else:
+ ids = util.listify( id )
+ histories = []
+ for history_id in ids:
+ history = get_history( trans, history_id, check_ownership=False )
+ histories.append( history )
user = trans.get_user()
for history in histories:
if history.user == user:
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/base.css.tmpl
--- a/static/june_2007_style/base.css.tmpl Thu Aug 20 18:44:35 2009 -0400
+++ b/static/june_2007_style/base.css.tmpl Thu Aug 06 10:29:51 2009 -0400
@@ -2,6 +2,19 @@
body{font:13px/1.231 verdana,arial,helvetica,clean,sans-serif;*font-size:small;*font:x-small;}select,input,button,textarea,button{font:99% verdana,arial,helvetica,clean,sans-serif;}table{font-size:inherit;font:100%;}pre,code,kbd,samp,tt{font-family:monospace;*font-size:108%;line-height:100%;}
body{font-size:75%;}
+## Mixins
+.unselectable {
+ user-select: none;
+ -moz-user-select: none;
+ -webkit-user-select: none;
+}
+
+.shadow {
+ -moz-box-shadow: 0 3px 30px black;
+ -webkit-box-shadow: 0 3px 30px black;
+}
+
+## Real styles
body
{
background: $base_bg_bottom;
@@ -48,7 +61,7 @@
padding-left: 10px;
padding-right: 10px;
background: $form_title_bg_bottom;
- background-image: url(form_title_bg.png);
+ ## background-image: url(form_title_bg.png);
background-repeat: repeat-x;
background-position: top;
border-bottom: solid $form_border 1px;
@@ -524,10 +537,11 @@
## Button styles
.action-button {
- background: #eeeeee;
+ background: transparent;
+ line-height: 16px;
color: #333;
text-decoration: none;
- font-size: 95%;
+ font-size: 100%;
font-weight: bold;
display: inline-block;
cursor: pointer;
@@ -556,39 +570,139 @@
background: #aaaaaa;
}
-## Popup menu styles
+## A menu button is a button that has an attached popup menu
-div.popupmenu {
- display: none;
- background: #eeeeee;
- color: #333;
- font-size: 110%;
- font-weight: bold;
- font-style: normal;
- white-space: nowrap;
- position: absolute;
- z-index: 20000;
- border: solid #aaaaaa 1px;
- padding: 3px 0;
+.menubutton {
+ display: inline-block;
+ cursor: pointer;
+ position: relative;
+ .unselectable;
+
+ border: solid transparent 1px;
-moz-border-radius: 0.5em;
-webkit-border-radius: 0.5em;
border-radius: 0.5em;
- user-select: none;
- -moz-user-select: none;
- -webkit-user-select: none;
+
+ padding: 3px 0.5em;
+ margin: -3px -0.5em;
+ padding-right: 1.5em;
+
+ .label, .arrow {
+ position: relative;
+ display: block;
+ }
+
+ .label {
+ border-right: none;
+ }
+
+ .arrow {
+ padding-left: 2px;
+ width: 1em;
+ position: absolute;
+ top: 0;
+ right: 5px;
+ height: 100%;
+ }
+
+ .arrow > span {
+ display: inline-block;
+ padding-top: 3px;
+ }
+
}
-div.popupmenu-item {
- padding: 3px 1em;
- cursor: pointer;
+.menubutton:hover {
+ border-color: #aaaaaa;
+ ## background: #eeeeee;
+ ## color: #333;
}
-div.popupmenu-item:hover {
- background: #aaaaaa;
+## A split menu button, the main button has an action, the arrow causes the
+## popup menu to appear
+
+.menubutton.split {
+ padding-right: 2em;
+ .arrow {
+ border-left: solid transparent 1px;
+ }
+}
+
+.menubutton.split:hover {
+ .arrow {
+ border-left: solid #aaaaaa 1px;
+ }
+}
+
+## Popup menu styles
+
+.overlay-border {
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 100%;
+ width: 100%;
+ padding: 1em;
+ margin: -1em;
+ background-color: rgba(0,0,0,0.5);
+ -moz-border-radius: 1em;
+ -webkit-border-radius: 1em;
+ z-index: -1;
+}
+
+div.popmenu-wrapper {
+
+ position: absolute;
+ top: 100%;
+ z-index: 20000;
+
+ ul {
+
+ display: block;
+ margin: 0;
+ padding: 0;
+
+ background: white;
+ color: #333;
+ font-weight: bold;
+ font-style: normal;
+ white-space: nowrap;
+ border: solid #aaaaaa 1px;
+ padding: 3px 0;
+ -moz-border-radius: 0.5em;
+ -webkit-border-radius: 0.5em;
+ border-radius: 0.5em;
+ ## margin: -3px -0.5em;
+ ## min-width: 100%;
+
+ .unselectable;
+
+ li {
+ display: block;
+ padding: 3px 1em;
+ cursor: pointer;
+ border-top: solid transparent 1px;
+ border-bottom: solid transparent 1px;
+ }
+
+ li.head {
+ color: #999;
+ font-style: italic;
+ }
+ }
+}
+
+div.popmenu-wrapper ul li:hover {
+ background: #EEEEFF;
+ border-color: #aaa;
+}
+
+div.popmenu-wrapper ul li.head:hover {
+ background: inherit;
+ border-color: transparent;
}
.popup-arrow {
- font-size: 80%;
cursor: pointer;
text-decoration: none;
color: #555;
@@ -632,7 +746,7 @@
}
.grid thead th {
background: $table_header_bg;
- background-image: url(form_title_bg.png);
+ ## background-image: url(form_title_bg.png);
background-repeat: repeat-x;
background-position: top;
border-top: solid $table_border 1px;
@@ -648,4 +762,4 @@
}
.grid .current {
background-color: #EEEEFF;
-}
\ No newline at end of file
+}
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css Thu Aug 20 18:44:35 2009 -0400
+++ b/static/june_2007_style/blue/base.css Thu Aug 06 10:29:51 2009 -0400
@@ -3,13 +3,15 @@
table{font-size:inherit;font:100%;}
pre,code,kbd,samp,tt{font-family:monospace;*font-size:108%;line-height:100%;}
body{font-size:75%;}
+.unselectable{user-select:none;-moz-user-select:none;-webkit-user-select:none;}
+.shadow{-moz-box-shadow:0 3px 30px black;-webkit-box-shadow:0 3px 30px black;}
body{background:#FFFFFF;color:#303030;background-image:url(base_bg.png);background-repeat:repeat-x;background-position:top;margin:10px;}
img{border:0;}
a:link,a:visited,a:active{color:#303030;}
h1,h2,h3,h4{color:#023858;}
hr{border:none;height:0px;border-bottom:dotted #303030 1px;}
div.toolForm{border:solid #d8b365 1px;}
-div.toolFormTitle{font-weight:bold;padding:5px;padding-left:10px;padding-right:10px;background:#d2c099;background-image:url(form_title_bg.png);background-repeat:repeat-x;background-position:top;border-bottom:solid #d8b365 1px;}
+div.toolFormTitle{font-weight:bold;padding:5px;padding-left:10px;padding-right:10px;background:#ebd9b2;background-repeat:repeat-x;background-position:top;border-bottom:solid #d8b365 1px;}
div.toolParamHelp{color:#666;}
div.toolParamHelp a{color:#666;}
div.toolFormBody{background:#FFFFFF;background-image:url(form_body_bg.png);background-repeat:repeat-x;background-position:top;padding:5px 0;}
@@ -20,7 +22,7 @@
div.toolHelpBody{width:100%;overflow:auto;}
div.titleRow{font-weight:bold;border-bottom:dotted gray 1px;margin-bottom:0.5em;padding-bottom:0.25em;}
div.form{border:solid #d8b365 1px;}
-div.form-title{font-weight:bold;padding:5px 10px;background:#d2c099;background-image:url(form_title_bg.png);background-repeat:repeat-x;background-position:top;border-bottom:solid #d8b365 1px;}
+div.form-title{font-weight:bold;padding:5px 10px;background:#ebd9b2;background-image:url(form_title_bg.png);background-repeat:repeat-x;background-position:top;border-bottom:solid #d8b365 1px;}
div.form-body{padding:5px 0;}
div.form-row{padding:5px 10px;}
div.form-title-row{padding:5px 10px;}
@@ -77,14 +79,28 @@
.state-fg-ok{color:#66AA66;}
.state-fg-running{color:#AAAA66;}
.state-fg-error{color:#AA6666;}
-.action-button{background:#eeeeee;color:#333;text-decoration:none;font-size:95%;font-weight:bold;display:inline-block;cursor:pointer;padding:2px;border:solid #aaaaaa 1px;padding-right:0.5em;padding-left:0.5em;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;user-select:none;-moz-user-select:none;-webkit-user-select:none;}
+.action-button{background:transparent;line-height:16px;color:#333;text-decoration:none;font-size:100%;font-weight:bold;display:inline-block;cursor:pointer;padding:2px;border:solid #aaaaaa 1px;padding-right:0.5em;padding-left:0.5em;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;user-select:none;-moz-user-select:none;-webkit-user-select:none;}
.action-button > *{vertical-align:middle;}
.action-button:hover{color:black;background:#dddddd;}
.action-button:active{color:white;background:#aaaaaa;}
-div.popupmenu{display:none;background:#eeeeee;color:#333;font-size:110%;font-weight:bold;font-style:normal;white-space:nowrap;position:absolute;z-index:20000;border:solid #aaaaaa 1px;padding:3px 0;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;user-select:none;-moz-user-select:none;-webkit-user-select:none;}
-div.popupmenu-item{padding:3px 1em;cursor:pointer;}
-div.popupmenu-item:hover{background:#aaaaaa;}
-.popup-arrow{font-size:80%;cursor:pointer;text-decoration:none;color:#555;}
+.menubutton{display:inline-block;cursor:pointer;position:relative;user-select:none;-moz-user-select:none;-webkit-user-select:none;border:solid transparent 1px;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;padding:3px 0.5em;margin:-3px -0.5em;padding-right:1.5em;}
+.menubutton .label,.menubutton .arrow{position:relative;display:block;}
+.menubutton .label{border-right:none;}
+.menubutton .arrow{padding-left:2px;width:1em;position:absolute;top:0;right:5px;height:100%;}
+.menubutton .arrow > span{display:inline-block;padding-top:3px;}
+.menubutton:hover{border-color:#aaaaaa;}
+.menubutton.split{padding-right:2em;}
+.menubutton.split .arrow{border-left:solid transparent 1px;}
+.menubutton.split:hover{}
+.menubutton.split:hover .arrow{border-left:solid #aaaaaa 1px;}
+.overlay-border{position:absolute;top:0;left:0;height:100%;width:100%;padding:1em;margin:-1em;background-color:rgba(0,0,0,0.5);-moz-border-radius:1em;-webkit-border-radius:1em;z-index:-1;}
+div.popmenu-wrapper{position:absolute;top:100%;z-index:20000;}
+div.popmenu-wrapper ul{display:block;margin:0;padding:0;background:white;color:#333;font-weight:bold;font-style:normal;white-space:nowrap;border:solid #aaaaaa 1px;padding:3px 0;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;user-select:none;-moz-user-select:none;-webkit-user-select:none;}
+div.popmenu-wrapper ul li{display:block;padding:3px 1em;cursor:pointer;border-top:solid transparent 1px;border-bottom:solid transparent 1px;}
+div.popmenu-wrapper ul li.head{color:#999;font-style:italic;}
+div.popmenu-wrapper ul li:hover{background:#EEEEFF;border-color:#aaa;}
+div.popmenu-wrapper ul li.head:hover{background:inherit;border-color:transparent;}
+.popup-arrow{cursor:pointer;text-decoration:none;color:#555;}
.popup-arrow:hover{color:black;}
div.permissionContainer{padding-left:20px;}
.grid-header{padding-bottom:1em;}
@@ -93,6 +109,6 @@
.grid{padding-top:1em;border-collapse:collapse;width:100%;}
.grid tbody td{border-top:solid #DDDDDD 1px;border-bottom:solid #DDDDDD 1px;padding:0.5em 1em;}
.grid tbody td:empty{padding:0;}
-.grid thead th{background:#ebd9b2;background-image:url(form_title_bg.png);background-repeat:repeat-x;background-position:top;border-top:solid #d8b365 1px;border-bottom:solid #d8b365 1px;padding:0.5em 1em;text-align:left;}
+.grid thead th{background:#ebd9b2;background-repeat:repeat-x;background-position:top;border-top:solid #d8b365 1px;border-bottom:solid #d8b365 1px;padding:0.5em 1em;text-align:left;}
.grid tfoot td{background-color:#F8F8F8;border-top:solid #DDDDDD 1px;border-bottom:solid #DDDDDD 1px;padding:0.5em 1em;}
.grid .current{background-color:#EEEEFF;}
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/blue/history-buttons.png
Binary file static/june_2007_style/blue/history-buttons.png has changed
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/blue/history-states.png
Binary file static/june_2007_style/blue/history-states.png has changed
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/blue/library.css
--- a/static/june_2007_style/blue/library.css Thu Aug 20 18:44:35 2009 -0400
+++ b/static/june_2007_style/blue/library.css Thu Aug 06 10:29:51 2009 -0400
@@ -1,4 +1,4 @@
-.libraryRow{background-color:#d2c099;}
+.libraryRow{background-color:#ebd9b2;}
.datasetHighlighted{background-color:#C1C9E5;}
.libraryItemDeleted-True{font-style:italic;}
div.historyItemBody{padding:4px 4px 2px 4px;}
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/blue/panel_layout.css
--- a/static/june_2007_style/blue/panel_layout.css Thu Aug 20 18:44:35 2009 -0400
+++ b/static/june_2007_style/blue/panel_layout.css Thu Aug 06 10:29:51 2009 -0400
@@ -24,7 +24,7 @@
.unified-panel-header-inner{padding-top:0.45em;}
.menu-bg{background:#C1C9E5 url(menu_bg.png) top repeat-x;}
div.unified-panel-body{position:absolute;top:2em;bottom:0;width:100%;margin-top:1px;}
-.panel-header-button{color:#333;text-decoration:none;display:inline-block;cursor:pointer;margin:-1px;padding:1px;border:0px;padding-right:0.5em;padding-left:0.5em;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;background:transparent;}
+.panel-header-button{color:#333;text-decoration:none;display:inline-block;cursor:pointer;margin:-1px;padding:1px;padding-right:0.5em;padding-left:0.5em;-moz-border-radius:0.5em;-webkit-border-radius:0.5em;border-radius:0.5em;background:#bbb;}
.panel-header-button:hover{color:black;background:#aaaaaa;}
.panel-header-button:active{color:white;background:#aaaaaa;}
#overlay{position:fixed;top:0;left:0;width:100%;height:100%;z-index:20000;}
@@ -37,9 +37,8 @@
.panel-done-message{background-image:url(done_small.png);background-color:#CCFFCC;}
.panel-info-message{background-image:url(info_small.png);background-color:#CCCCFF;}
#masthead{position:absolute;top:0;left:0;width:100%;height:32px;background:#2C3143;color:#fff;border-bottom:solid #444 1px;z-index:15000;padding:0;}
-#masthead a{color:#eeeeee;}
+#masthead a{color:#eeeeee;text-decoration:none;}
#masthead .title{padding:3px 10px;font-size:175%;font-weight:bold;}
-#masthead a{text-decoration:none;}
#masthead a:hover{text-decoration:underline;}
.tab-group{margin:0;padding:0 10px;height:100%;white-space:nowrap;cursor:default;background:transparent;}
.tab-group .tab{background:#2C3143;position:relative;float:left;margin:0;padding:0 1em;height:32px;line-height:32px;text-align:left;}
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/blue_colors.ini
--- a/static/june_2007_style/blue_colors.ini Thu Aug 20 18:44:35 2009 -0400
+++ b/static/june_2007_style/blue_colors.ini Thu Aug 06 10:29:51 2009 -0400
@@ -9,7 +9,7 @@
#menu_bg_hatch=#AAAAFF
# Forms
form_title_bg_top=#ebd9b2
-form_title_bg_bottom=#d2c099
+form_title_bg_bottom=#ebd9b2
form_title_bg_hatch=-
form_border=#d8b365
#form_body_bg=#FFFFFF
diff -r 542471b183d7 -r abdb523727ed static/june_2007_style/panel_layout.css.tmpl
--- a/static/june_2007_style/panel_layout.css.tmpl Thu Aug 20 18:44:35 2009 -0400
+++ b/static/june_2007_style/panel_layout.css.tmpl Thu Aug 06 10:29:51 2009 -0400
@@ -144,13 +144,12 @@
display: inline-block;
cursor: pointer;
margin: -1px; padding: 1px;
- border: 0px;
padding-right: 0.5em;
padding-left: 0.5em;
-moz-border-radius: 0.5em;
-webkit-border-radius: 0.5em;
border-radius: 0.5em;
- background: transparent;
+ background: #bbb;
}
.panel-header-button:hover {
@@ -230,8 +229,7 @@
## Masthead
-#masthead
-{
+#masthead {
position:absolute;
top:0;
left:0;
@@ -242,27 +240,20 @@
border-bottom: solid ${layout_masthead_border} 1px;
z-index: 15000;
padding: 0;
+
+ a {
+ color: ${masthead_link};
+ text-decoration: none;
+ }
+
+ .title {
+ padding: 3px 10px;
+ font-size: 175%;
+ font-weight: bold;
+ }
}
-#masthead a
-{
- color: $masthead_link;
-}
-
-#masthead .title
-{
- padding: 3px 10px;
- font-size: 175%;
- font-weight: bold;
-}
-
-#masthead a
-{
- text-decoration: none;
-}
-
-#masthead a:hover
-{
+#masthead a:hover {
text-decoration: underline;
}
diff -r 542471b183d7 -r abdb523727ed static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js Thu Aug 20 18:44:35 2009 -0400
+++ b/static/scripts/galaxy.base.js Thu Aug 06 10:29:51 2009 -0400
@@ -54,30 +54,52 @@
function make_popupmenu( button_element, options ) {
ensure_popup_helper();
- $(button_element).css( "position", "relative" );
- var menu_element = $( "<div class='popupmenu' id='" + button_element.attr('id') + "-menu'></div>" )
- .css( "position", "absolute" )
- .appendTo( button_element );
+ var container_element = $(button_element);
+ // if ( container_element.parent().hasClass( "combo-button" ) ) {
+ // container_element = container_element.parent();
+ // }
+ // ontainer_element).css( "position", "relative" );
+ var menu_element = $( "<ul id='" + button_element.attr('id') + "-menu'></div>" );
$.each( options, function( k, v ) {
- $( "<div class='popupmenu-item' />" ).html( k ).click( v ).appendTo( menu_element );
+ if ( v ) {
+ $( "<li/>" ).html( k ).click( v ).appendTo( menu_element );
+ } else {
+ $( "<li class='head'/>" ).html( k ).appendTo( menu_element );
+ }
});
+ var wrapper = $( "<div class='popmenu-wrapper'>" );
+ wrapper.append( menu_element )
+ .append( "<div class='overlay-border'>" )
+ .css( "position", "absolute" )
+ .appendTo( "body" )
+ .hide();
+ attach_popupmenu( button_element, wrapper );
+};
+
+function attach_popupmenu( button_element, wrapper ) {
+ console.log( button_element, wrapper );
var clean = function() {
- $(menu_element).unbind().hide();
- // $("#popup-helper").unbind().hide();
- $(document).unbind( "click.popupmenu" );
+ wrapper.unbind().hide();
+ $("#popup-helper").unbind( "click.popupmenu" ).hide();
+ // $(document).unbind( "click.popupmenu" );
};
var click = function( e ) {
+ console.log( e );
var o = $(button_element).offset();
- // $("#popup-helper").mousedown( clean ).show();
- $(document).bind( "click.popupmenu", clean );
+ $("#popup-helper").bind( "click.popupmenu", clean ).show();
+ // $(document).bind( "click.popupmenu", clean );
// Show off screen to get size right
- $( menu_element ).click( clean ).css( { left: 0, top: -1000 } ).show();
+ wrapper.click( clean ).css( { left: 0, top: -1000 } ).show();
// console.log( e.pageX, $(document).scrollLeft() + $(window).width(), $(menu_element).width() );
- // var x = Math.min( e.pageX - 2, $(document).scrollLeft() + $(window).width() - $(menu_element).width() - 5 );
+ var x = e.pageX - wrapper.width() / 2
+ x = Math.min( x, $(document).scrollLeft() + $(window).width() - $(wrapper).width() - 20 );
+ x = Math.max( x, $(document).scrollLeft() + 20 );
// console.log( e.pageX, $(document).scrollLeft() + $(window).width(), $(menu_element).width() );
- $( menu_element ).css( {
- top: 0, //e.pageY - 2,
- left: 0 // x
+
+
+ wrapper.css( {
+ top: e.pageY - 5,
+ left: x
} );
return false;
};
diff -r 542471b183d7 -r abdb523727ed templates/base_panels.mako
--- a/templates/base_panels.mako Thu Aug 20 18:44:35 2009 -0400
+++ b/templates/base_panels.mako Thu Aug 06 10:29:51 2009 -0400
@@ -20,7 +20,7 @@
## Default stylesheets
<%def name="stylesheets()">
- ${h.css('panel_layout')}
+ ${h.css('base','panel_layout')}
<style type="text/css">
#center {
%if not self.has_left_panel:
@@ -50,7 +50,7 @@
<%def name="late_javascripts()">
## Scripts can be loaded later since they progressively add features to
## the panels, but do not change layout
- ${h.js( 'jquery', 'jquery.event.drag', 'jquery.event.hover', 'jquery.form', 'galaxy.panels' )}
+ ${h.js( 'jquery', 'jquery.event.drag', 'jquery.event.hover', 'jquery.form', 'galaxy.base', 'galaxy.panels' )}
<script type="text/javascript">
ensure_dd_helper();
diff -r 542471b183d7 -r abdb523727ed templates/history/clone.mako
--- a/templates/history/clone.mako Thu Aug 20 18:44:35 2009 -0400
+++ b/templates/history/clone.mako Thu Aug 06 10:29:51 2009 -0400
@@ -7,7 +7,9 @@
<div class="toolFormBody">
<form action="${h.url_for( controller='history', action='clone' )}" method="post" >
<div class="form-row">
- <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
+ %if id_argument is not None:
+ <input type="hidden" name="id" value="${trans.security.encode_id( id_argument )}">
+ %endif
You can clone the history such that the clone will include all items in the original
history, or you can eliminate the original history's deleted items from the clone.
</div>
diff -r 542471b183d7 -r abdb523727ed templates/history/grid.mako
--- a/templates/history/grid.mako Thu Aug 20 18:44:35 2009 -0400
+++ b/templates/history/grid.mako Thu Aug 06 10:29:51 2009 -0400
@@ -153,16 +153,15 @@
<%
# Attach popup menu?
if column.attach_popup and cellnum == 0:
- extra = '<a id="grid-%d-popup" class="popup-arrow" style="display: none;">▼</a>' % i
+ extra = '<a id="grid-%d-popup" class="arrow" style="display: none;"><span>▼</span></a>' % i
else:
extra = ""
%>
%if href:
- <td><a href="${href}">${v}</a> ${extra}</td>
+ <td><div class="menubutton split"><a class="label" href="${href}">${v}${extra}</a></td>
%else:
<td >${v}${extra}</td>
%endif
- </td>
%endfor
%endif
%endfor
diff -r 542471b183d7 -r abdb523727ed templates/root/index.mako
--- a/templates/root/index.mako Thu Aug 20 18:44:35 2009 -0400
+++ b/templates/root/index.mako Thu Aug 06 10:29:51 2009 -0400
@@ -1,4 +1,46 @@
<%inherit file="/base_panels.mako"/>
+
+<%def name="late_javascripts()">
+ ${parent.late_javascripts()}
+ <script type="text/javascript">
+ $(function(){
+ $("#history-options-button").css( "position", "relative" );
+ make_popupmenu( $("#history-options-button"), {
+ "List your histories": null,
+ "Stored by you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list')}";
+ },
+ "Shared with you": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='list_shared')}";
+ },
+ "Current History": null,
+ "Create new": function() {
+ galaxy_history.location = "${h.url_for( controller='root', action='history_new' )}";
+ },
+ "Clone": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='clone')}";
+ },
+ "Manage sharing": function() {
+ galaxy_main.location = "${h.url_for( controller='history', action='share' )}";
+ },
+ "Extract workflow": function() {
+ galaxy_main.location = "${h.url_for( controller='workflow', action='build_from_current_history' )}";
+ },
+ "Dataset security": function() {
+ galaxy_main.location = "${h.url_for( controller='root', action='history_set_default_permissions' )}";
+ },
+ "Show deleted datasets": function() {
+ galaxy_history.location = "${h.url_for( controller='root', action='history', show_deleted=True)}";
+ },
+ "Delete": function() {
+ if ( confirm( "Really delete the current history?" ) ) {
+ galaxy_main.location = "${h.url_for( controller='history', action='delete_current' )}";
+ }
+ }
+ });
+ });
+ </script>
+</%def>
<%def name="init()">
<%
@@ -48,7 +90,7 @@
<div class="unified-panel-header" unselectable="on">
<div class="unified-panel-header-inner">
<div style="float: right">
- <a class='panel-header-button' href="${h.url_for( controller='root', action='history_options' )}" target="galaxy_main"><span>${_('Options')}</span></a>
+ <a id="history-options-button" class='panel-header-button' href="${h.url_for( controller='root', action='history_options' )}" target="galaxy_main"><span>${_('Options')}<span>▼</span></span></a>
</div>
<div class="panel-header-text">${_('History')}</div>
</div>
diff -r 542471b183d7 -r abdb523727ed templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Thu Aug 20 18:44:35 2009 -0400
+++ b/templates/workflow/editor.mako Thu Aug 06 10:29:51 2009 -0400
@@ -677,9 +677,8 @@
<div class="unified-panel-header" unselectable="on">
<div class="unified-panel-header-inner" style="float: right">
- <span class="panel-header-button-group">
<a id="layout-button" class="panel-header-button">Layout</a>
- </span>
+
<a id="save-button" class="panel-header-button">Save</a>
<a id="close-button" class="panel-header-button">Close</a>
</div>
diff -r 542471b183d7 -r abdb523727ed templates/workflow/list.mako
--- a/templates/workflow/list.mako Thu Aug 20 18:44:35 2009 -0400
+++ b/templates/workflow/list.mako Thu Aug 06 10:29:51 2009 -0400
@@ -37,8 +37,10 @@
%for i, workflow in enumerate( workflows ):
<tr>
<td>
+ <div class="menubutton">
+ <a id="wf-${i}-popup" class="arrow" style="display: none;"><span>▼</span></a>
${workflow.name}
- <a id="wf-${i}-popup" class="popup-arrow" style="display: none;">▼</a>
+ </div>
</td>
<td>${len(workflow.latest_workflow.steps)}</td>
## <td>${str(workflow.update_time)[:19]}</td>
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/97170896bb91
changeset: 2600:97170896bb91
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 11:04:45 2009 -0400
description:
Remove redundant auto-save js
2 file(s) affected in this change:
templates/workflow/editor_generic_form.mako
templates/workflow/editor_tool_form.mako
diffs (23 lines):
diff -r bedfce38e652 -r 97170896bb91 templates/workflow/editor_generic_form.mako
--- a/templates/workflow/editor_generic_form.mako Thu Aug 20 10:59:13 2009 -0400
+++ b/templates/workflow/editor_generic_form.mako Thu Aug 20 11:04:45 2009 -0400
@@ -40,7 +40,3 @@
</form>
</div>
</div>
-
-<script type="text/javascript">
- workflow.enable_auto_save();
-</script>
diff -r bedfce38e652 -r 97170896bb91 templates/workflow/editor_tool_form.mako
--- a/templates/workflow/editor_tool_form.mako Thu Aug 20 10:59:13 2009 -0400
+++ b/templates/workflow/editor_tool_form.mako Thu Aug 20 11:04:45 2009 -0400
@@ -108,8 +108,3 @@
</form>
</div>
</div>
-
-<script type="text/javascript">
- workflow.enable_auto_save();
-</script>
-
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0517fd342fc8
changeset: 2601:0517fd342fc8
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 12:41:37 2009 -0400
description:
Use JSON2.js for all JSON operations, and remove unused javascript files
7 file(s) affected in this change:
static/scripts/cookie_set.js
static/scripts/jquery.cookie.js
static/scripts/jquery.json.js
static/scripts/packed/cookie_set.js
static/scripts/packed/jquery.cookie.js
static/scripts/packed/jquery.json.js
templates/workflow/editor.mako
diffs (270 lines):
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/cookie_set.js
--- a/static/scripts/cookie_set.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-function CookieSet( cookie_name ) {
- this.cookie_name = cookie_name;
- this.store = store = {};
- jQuery.each( ( jQuery.cookie( cookie_name) || "" ).split( "|" ), function( k, v ) {
- store[ v ] = true;
- });
-};
-CookieSet.prototype.add = function( value ) {
- this.store[value] = true;
- return this;
-};
-CookieSet.prototype.remove = function( value ) {
- delete this.store[value];
- return this;
-};
-CookieSet.prototype.removeAll = function( value ) {
- this.store = {};
- return this;
-};
-CookieSet.prototype.contains = function( value ) {
- return ( value in this.store );
-};
-CookieSet.prototype.save = function() {
- t = [];
- for ( key in this.store ) {
- if ( key != "" ) { t.push( key ) }
- }
- jQuery.cookie( this.cookie_name, t.join( "|" ) );
- return this;
-};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/jquery.cookie.js
--- a/static/scripts/jquery.cookie.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,92 +0,0 @@
-/**
- * Cookie plugin
- *
- * Copyright (c) 2006 Klaus Hartl (stilbuero.de)
- * Dual licensed under the MIT and GPL licenses:
- * http://www.opensource.org/licenses/mit-license.php
- * http://www.gnu.org/licenses/gpl.html
- *
- */
-
-/**
- * Create a cookie with the given name and value and other optional parameters.
- *
- * @example $.cookie('the_cookie', 'the_value');
- * @desc Set the value of a cookie.
- * @example $.cookie('the_cookie', 'the_value', {expires: 7, path: '/', domain: 'jquery.com', secure: true});
- * @desc Create a cookie with all available options.
- * @example $.cookie('the_cookie', 'the_value');
- * @desc Create a session cookie.
- * @example $.cookie('the_cookie', null);
- * @desc Delete a cookie by passing null as value.
- *
- * @param String name The name of the cookie.
- * @param String value The value of the cookie.
- * @param Object options An object literal containing key/value pairs to provide optional cookie attributes.
- * @option Number|Date expires Either an integer specifying the expiration date from now on in days or a Date object.
- * If a negative value is specified (e.g. a date in the past), the cookie will be deleted.
- * If set to null or omitted, the cookie will be a session cookie and will not be retained
- * when the the browser exits.
- * @option String path The value of the path atribute of the cookie (default: path of page that created the cookie).
- * @option String domain The value of the domain attribute of the cookie (default: domain of page that created the cookie).
- * @option Boolean secure If true, the secure attribute of the cookie will be set and the cookie transmission will
- * require a secure protocol (like HTTPS).
- * @type undefined
- *
- * @name $.cookie
- * @cat Plugins/Cookie
- * @author Klaus Hartl/klaus.hartl(a)stilbuero.de
- */
-
-/**
- * Get the value of a cookie with the given name.
- *
- * @example $.cookie('the_cookie');
- * @desc Get the value of a cookie.
- *
- * @param String name The name of the cookie.
- * @return The value of the cookie.
- * @type String
- *
- * @name $.cookie
- * @cat Plugins/Cookie
- * @author Klaus Hartl/klaus.hartl(a)stilbuero.de
- */
-jQuery.cookie = function(name, value, options) {
- if (typeof value != 'undefined') { // name and value given, set cookie
- options = options || {};
- if (value === null) {
- value = '';
- options.expires = -1;
- }
- var expires = '';
- if (options.expires && (typeof options.expires == 'number' || options.expires.toUTCString)) {
- var date;
- if (typeof options.expires == 'number') {
- date = new Date();
- date.setTime(date.getTime() + (options.expires * 24 * 60 * 60 * 1000));
- } else {
- date = options.expires;
- }
- expires = '; expires=' + date.toUTCString(); // use expires attribute, max-age is not supported by IE
- }
- var path = options.path ? '; path=' + options.path : '';
- var domain = options.domain ? '; domain=' + options.domain : '';
- var secure = options.secure ? '; secure' : '';
- document.cookie = [name, '=', encodeURIComponent(value), expires, path, domain, secure].join('');
- } else { // only name given, get cookie
- var cookieValue = null;
- if (document.cookie && document.cookie != '') {
- var cookies = document.cookie.split(';');
- for (var i = 0; i < cookies.length; i++) {
- var cookie = jQuery.trim(cookies[i]);
- // Does this cookie string begin with the name we want?
- if (cookie.substring(0, name.length + 1) == (name + '=')) {
- cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
- break;
- }
- }
- }
- return cookieValue;
- }
-};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/jquery.json.js
--- a/static/scripts/jquery.json.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,96 +0,0 @@
-(function ($) {
- var m = {
- '\b': '\\b',
- '\t': '\\t',
- '\n': '\\n',
- '\f': '\\f',
- '\r': '\\r',
- '"' : '\\"',
- '\\': '\\\\'
- },
- s = {
- 'array': function (x) {
- var a = ['['], b, f, i, l = x.length, v;
- for (i = 0; i < l; i += 1) {
- v = x[i];
- f = s[typeof v];
- if (f) {
- v = f(v);
- if (typeof v == 'string') {
- if (b) {
- a[a.length] = ',';
- }
- a[a.length] = v;
- b = true;
- }
- }
- }
- a[a.length] = ']';
- return a.join('');
- },
- 'boolean': function (x) {
- return String(x);
- },
- 'null': function (x) {
- return "null";
- },
- 'number': function (x) {
- return isFinite(x) ? String(x) : 'null';
- },
- 'object': function (x) {
- if (x) {
- if (x instanceof Array) {
- return s.array(x);
- }
- var a = ['{'], b, f, i, v;
- for (i in x) {
- v = x[i];
- f = s[typeof v];
- if (f) {
- v = f(v);
- if (typeof v == 'string') {
- if (b) {
- a[a.length] = ',';
- }
- a.push(s.string(i), ':', v);
- b = true;
- }
- }
- }
- a[a.length] = '}';
- return a.join('');
- }
- return 'null';
- },
- 'string': function (x) {
- if (/["\\\x00-\x1f]/.test(x)) {
- x = x.replace(/([\x00-\x1f\\"])/g, function(a, b) {
- var c = m[b];
- if (c) {
- return c;
- }
- c = b.charCodeAt();
- return '\\u00' +
- Math.floor(c / 16).toString(16) +
- (c % 16).toString(16);
- });
- }
- return '"' + x + '"';
- }
- };
-
- $.toJSON = function(v) {
- var f = isNaN(v) ? s[typeof v] : s['number'];
- if (f) return f(v);
- };
-
- $.parseJSON = function(v, safe) {
- if (safe === undefined) safe = $.parseJSON.safe;
- if (safe && !/^("(\\.|[^"\\\n\r])*?"|[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t])+?$/.test(v))
- return undefined;
- return eval('('+v+')');
- };
-
- $.parseJSON.safe = false;
-
-})(jQuery);
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/packed/cookie_set.js
--- a/static/scripts/packed/cookie_set.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-function CookieSet(a){this.cookie_name=a;this.store=store={};jQuery.each((jQuery.cookie(a)||"").split("|"),function(c,b){store[b]=true})}CookieSet.prototype.add=function(a){this.store[a]=true;return this};CookieSet.prototype.remove=function(a){delete this.store[a];return this};CookieSet.prototype.removeAll=function(a){this.store={};return this};CookieSet.prototype.contains=function(a){return(a in this.store)};CookieSet.prototype.save=function(){t=[];for(key in this.store){if(key!=""){t.push(key)}}jQuery.cookie(this.cookie_name,t.join("|"));return this};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/packed/jquery.cookie.js
--- a/static/scripts/packed/jquery.cookie.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-jQuery.cookie=function(b,j,m){if(typeof j!="undefined"){m=m||{};if(j===null){j="";m.expires=-1}var e="";if(m.expires&&(typeof m.expires=="number"||m.expires.toUTCString)){var f;if(typeof m.expires=="number"){f=new Date();f.setTime(f.getTime()+(m.expires*24*60*60*1000))}else{f=m.expires}e="; expires="+f.toUTCString()}var l=m.path?"; path="+m.path:"";var g=m.domain?"; domain="+m.domain:"";var a=m.secure?"; secure":"";document.cookie=[b,"=",encodeURIComponent(j),e,l,g,a].join("")}else{var d=null;if(document.cookie&&document.cookie!=""){var k=document.cookie.split(";");for(var h=0;h<k.length;h++){var c=jQuery.trim(k[h]);if(c.substring(0,b.length+1)==(b+"=")){d=decodeURIComponent(c.substring(b.length+1));break}}}return d}};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/packed/jquery.json.js
--- a/static/scripts/packed/jquery.json.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-(function($){var m={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},s={array:function(x){var a=["["],b,f,i,l=x.length,v;for(i=0;i<l;i+=1){v=x[i];f=s[typeof v];if(f){v=f(v);if(typeof v=="string"){if(b){a[a.length]=","}a[a.length]=v;b=true}}}a[a.length]="]";return a.join("")},"boolean":function(x){return String(x)},"null":function(x){return"null"},number:function(x){return isFinite(x)?String(x):"null"},object:function(x){if(x){if(x instanceof Array){return s.array(x)}var a=["{"],b,f,i,v;for(i in x){v=x[i];f=s[typeof v];if(f){v=f(v);if(typeof v=="string"){if(b){a[a.length]=","}a.push(s.string(i),":",v);b=true}}}a[a.length]="}";return a.join("")}return"null"},string:function(x){if(/["\\\x00-\x1f]/.test(x)){x=x.replace(/([\x00-\x1f\\"])/g,function(a,b){var c=m[b];if(c){return c}c=b.charCodeAt();return"\\u00"+Math.floor(c/16).toString(16)+(c%16).toString(16)})}return'"'+x+'"'}};$.toJSON=function(v){var f=isNaN(v)?s[typeof v]:s.number;if(f){return f(v)
}};$.parseJSON=function(v,safe){if(safe===undefined){safe=$.parseJSON.safe}if(safe&&!/^("(\\.|[^"\\\n\r])*?"|[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t])+?$/.test(v)){return undefined}return eval("("+v+")")};$.parseJSON.safe=false})(jQuery);
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Thu Aug 20 11:04:45 2009 -0400
+++ b/templates/workflow/editor.mako Thu Aug 20 12:41:37 2009 -0400
@@ -30,7 +30,6 @@
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.drop.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.hover.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.form.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.json.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.jstore-all.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.base.js')}"> </script>
@@ -350,7 +349,7 @@
type: "POST",
data: {
id: "${trans.security.encode_id( workflow_id )}",
- workflow_data: function() { return $.toJSON( workflow.to_simple() ) },
+ workflow_data: function() { return JSON.stringify( workflow.to_simple() ) },
"_": "true"
},
dataType: 'json',
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/542471b183d7
changeset: 2603:542471b183d7
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 18:44:35 2009 -0400
description:
Merge trunk
0 file(s) affected in this change:
diffs (1505 lines):
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Aug 20 18:44:35 2009 -0400
@@ -6,6 +6,8 @@
from galaxy.datatypes.tabular import *
from galaxy.datatypes.interval import *
from galaxy.datatypes import metadata
+from galaxy.util.json import from_json_string
+from galaxy.util.expressions import ExpressionContext
import pkg_resources
pkg_resources.require( "PasteDeploy" )
@@ -18,6 +20,12 @@
# States for running a job. These are NOT the same as data states
JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_OK, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED = 'wait', 'error', 'input_error', 'input_deleted', 'ok', 'ready', 'deleted', 'admin_deleted'
+
+# This file, if created in the job's working directory, will be used for
+# setting advanced metadata properties on the job and its associated outputs.
+# This interface is currently experimental, is only used by the upload tool,
+# and should eventually become API'd
+TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
class JobManager( object ):
"""
@@ -320,6 +328,7 @@
self.working_directory = \
os.path.join( self.app.config.job_working_directory, str( self.job_id ) )
self.output_paths = None
+ self.tool_provided_job_metadata = None
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job ) #wrapper holding the info required to restore and clean up from files used for setting metadata externally
def get_param_dict( self ):
@@ -422,6 +431,8 @@
dataset.blurb = 'tool error'
dataset.info = message
dataset.set_size()
+ if dataset.ext == 'auto':
+ dataset.extension = 'data'
dataset.flush()
job.state = model.Job.states.ERROR
job.command_line = self.command_line
@@ -486,16 +497,28 @@
except ( IOError, OSError ):
self.fail( "Job %s's output dataset(s) could not be read" % job.id )
return
+ job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
for dataset_assoc in job.output_datasets:
+ context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
+ if context.get( 'path', None ):
+ # The tool can set an alternate output path for the dataset.
+ try:
+ shutil.move( context['path'], dataset.file_name )
+ except ( IOError, OSError ):
+ if not context['stderr']:
+ context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
- dataset.info = stdout + stderr
+ dataset.info = context['stdout'] + context['stderr']
dataset.set_size()
- if stderr:
+ if context['stderr']:
dataset.blurb = "error"
elif dataset.has_data():
+ # If the tool was expected to set the extension, attempt to retrieve it
+ if dataset.ext == 'auto':
+ dataset.extension = context.get( 'ext', 'data' )
#if a dataset was copied, it won't appear in our dictionary:
#either use the metadata from originating output dataset, or call set_meta on the copies
#it would be quicker to just copy the metadata from the originating output dataset,
@@ -510,18 +533,39 @@
#the metadata that was stored to disk for use via the external process,
#and the changes made by the user will be lost, without warning or notice
dataset.metadata.from_JSON_dict( self.external_output_metadata.get_output_filenames_by_dataset( dataset ).filename_out )
- if self.tool.is_multi_byte:
- dataset.set_multi_byte_peek()
- else:
- dataset.set_peek()
+ try:
+ assert context.get( 'line_count', None ) is not None
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek( line_count=context['line_count'] )
+ else:
+ dataset.set_peek( line_count=context['line_count'] )
+ except:
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek()
+ else:
+ dataset.set_peek()
+ try:
+ # set the name if provided by the tool
+ dataset.name = context['name']
+ except:
+ pass
else:
dataset.blurb = "empty"
+ if dataset.ext == 'auto':
+ dataset.extension = 'txt'
dataset.flush()
- if stderr:
+ if context['stderr']:
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
else:
dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
- dataset_assoc.dataset.dataset.flush()
+ # If any of the rest of the finish method below raises an
+ # exception, the fail method will run and set the datasets to
+ # ERROR. The user will never see that the datasets are in error if
+ # they were flushed as OK here, since upon doing so, the history
+ # panel stops checking for updates. So allow the
+ # mapping.context.current.flush() at the bottom of this method set
+ # the state instead.
+ #dataset_assoc.dataset.dataset.flush()
# Save stdout and stderr
if len( stdout ) > 32768:
@@ -591,7 +635,8 @@
return self.output_paths
class DatasetPath( object ):
- def __init__( self, real_path, false_path = None ):
+ def __init__( self, dataset_id, real_path, false_path = None ):
+ self.dataset_id = dataset_id
self.real_path = real_path
self.false_path = false_path
def __str__( self ):
@@ -605,10 +650,55 @@
self.output_paths = []
for name, data in [ ( da.name, da.dataset.dataset ) for da in job.output_datasets ]:
false_path = os.path.abspath( os.path.join( self.working_directory, "galaxy_dataset_%d.dat" % data.id ) )
- self.output_paths.append( DatasetPath( data.file_name, false_path ) )
+ self.output_paths.append( DatasetPath( data.id, data.file_name, false_path ) )
else:
- self.output_paths = [ DatasetPath( da.dataset.file_name ) for da in job.output_datasets ]
+ self.output_paths = [ DatasetPath( da.dataset.dataset.id, da.dataset.file_name ) for da in job.output_datasets ]
return self.output_paths
+
+ def get_output_file_id( self, file ):
+ if self.output_paths is None:
+ self.get_output_fnames()
+ for dp in self.output_paths:
+ if self.app.config.outputs_to_working_directory and os.path.basename( dp.false_path ) == file:
+ return dp.dataset_id
+ elif os.path.basename( dp.real_path ) == file:
+ return dp.dataset_id
+ return None
+
+ def get_tool_provided_job_metadata( self ):
+ if self.tool_provided_job_metadata is not None:
+ return self.tool_provided_job_metadata
+
+ # Look for JSONified job metadata
+ self.tool_provided_job_metadata = []
+ meta_file = os.path.join( self.working_directory, TOOL_PROVIDED_JOB_METADATA_FILE )
+ if os.path.exists( meta_file ):
+ for line in open( meta_file, 'r' ):
+ try:
+ line = from_json_string( line )
+ assert 'type' in line
+ except:
+ log.exception( '(%s) Got JSON data from tool, but data is improperly formatted or no "type" key in data' % self.job_id )
+ log.debug( 'Offending data was: %s' % line )
+ continue
+ # Set the dataset id if it's a dataset entry and isn't set.
+ # This isn't insecure. We loop the job's output datasets in
+ # the finish method, so if a tool writes out metadata for a
+ # dataset id that it doesn't own, it'll just be ignored.
+ if line['type'] == 'dataset' and 'dataset_id' not in line:
+ try:
+ line['dataset_id'] = self.get_output_file_id( line['dataset'] )
+ except KeyError:
+ log.warning( '(%s) Tool provided job dataset-specific metadata without specifying a dataset' % self.job_id )
+ continue
+ self.tool_provided_job_metadata.append( line )
+ return self.tool_provided_job_metadata
+
+ def get_dataset_finish_context( self, job_context, dataset ):
+ for meta in self.get_tool_provided_job_metadata():
+ if meta['type'] == 'dataset' and meta['dataset_id'] == dataset.id:
+ return ExpressionContext( meta, job_context )
+ return job_context
def check_output_sizes( self ):
sizes = []
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Aug 20 18:44:35 2009 -0400
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil
+import logging, os, string, sys, tempfile, glob, shutil, types
import simplejson
import binascii
from UserDict import DictMixin
@@ -415,6 +415,7 @@
output.metadata_source = data_elem.get("metadata_source", "")
output.parent = data_elem.get("parent", None)
output.label = util.xml_text( data_elem, "label" )
+ output.count = int( data_elem.get("count", 1) )
output.filters = data_elem.findall( 'filter' )
self.outputs[ output.name ] = output
# Any extra generated config files for the tool
@@ -816,7 +817,11 @@
# If we've completed the last page we can execute the tool
elif state.page == self.last_page:
out_data = self.execute( trans, incoming=params )
- return 'tool_executed.mako', dict( out_data=out_data )
+ try:
+ assert type( out_data ) is types.DictType
+ return 'tool_executed.mako', dict( out_data=out_data )
+ except:
+ return 'message.mako', dict( message_type='error', message=out_data, refresh_frames=[] )
# Otherwise move on to the next page
else:
state.page += 1
@@ -824,15 +829,26 @@
self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
else:
- if filter( lambda x: isinstance( x, FieldStorage ) and x.file, state.inputs.values() ):
+ try:
+ self.find_fieldstorage( state.inputs )
+ except InterruptedUpload:
# If inputs contain a file it won't persist. Most likely this
# is an interrupted upload. We should probably find a more
# standard method of determining an incomplete POST.
return self.handle_interrupted( trans, state.inputs )
- else:
- # Just a refresh, render the form with updated state and errors.
- return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ except:
+ pass
+ # Just a refresh, render the form with updated state and errors.
+ return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ def find_fieldstorage( self, x ):
+ if isinstance( x, FieldStorage ):
+ raise InterruptedUpload( None )
+ elif type( x ) is types.DictType:
+ [ self.find_fieldstorage( y ) for y in x.values() ]
+ elif type( x ) is types.ListType:
+ [ self.find_fieldstorage( y ) for y in x ]
+
def handle_interrupted( self, trans, inputs ):
"""
Upon handling inputs, if it appears that we have received an incomplete
@@ -1704,3 +1720,6 @@
return value
else:
return incoming.get( key, default )
+
+class InterruptedUpload( Exception ):
+ pass
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Aug 20 18:44:35 2009 -0400
@@ -1,8 +1,10 @@
import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
+from cgi import FieldStorage
from __init__ import ToolAction
from galaxy import datatypes, jobs
from galaxy.datatypes import sniff
from galaxy import model, util
+from galaxy.util.json import to_json_string
import sys, traceback
@@ -11,14 +13,28 @@
class UploadToolAction( ToolAction ):
# Action for uploading files
- def __init__( self ):
- self.empty = False
- self.line_count = None
- def remove_tempfile( self, filename ):
- try:
- os.unlink( filename )
- except:
- log.exception( 'failure removing temporary file: %s' % filename )
+ def persist_uploads( self, incoming ):
+ if 'files' in incoming:
+ new_files = []
+ temp_files = []
+ for upload_dataset in incoming['files']:
+ f = upload_dataset['file_data']
+ if isinstance( f, FieldStorage ):
+ # very small files can be StringIOs
+ if 'name' in dir( f.file ) and f.file.name != '<fdopen>':
+ local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
+ f.file.close()
+ else:
+ local_filename = datatypes.sniff.stream_to_file( f.file, prefix="strio_upload_file_" )[0]
+ upload_dataset['file_data'] = dict( filename = f.filename,
+ local_filename = local_filename )
+ if upload_dataset['url_paste'].strip() != '':
+ upload_dataset['url_paste'] = datatypes.sniff.stream_to_file( StringIO.StringIO( upload_dataset['url_paste'] ), prefix="strio_url_paste_" )[0]
+ else:
+ upload_dataset['url_paste'] = None
+ new_files.append( upload_dataset )
+ incoming['files'] = new_files
+ return incoming
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
dataset_upload_inputs = []
for input_name, input in tool.inputs.iteritems():
@@ -42,330 +58,100 @@
log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) )
else:
self.precreated_datasets.append( data )
+
data_list = []
+
+ incoming = self.persist_uploads( incoming )
+
+ json_file = tempfile.mkstemp()
+ json_file_path = json_file[1]
+ json_file = os.fdopen( json_file[0], 'w' )
for dataset_upload_input in dataset_upload_inputs:
uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, incoming )
for uploaded_dataset in uploaded_datasets:
- precreated_dataset = self.get_precreated_dataset( uploaded_dataset.precreated_name )
- dataset = self.add_file( trans, uploaded_dataset.primary_file, uploaded_dataset.name, uploaded_dataset.file_type, uploaded_dataset.is_multi_byte, uploaded_dataset.dbkey, space_to_tab = uploaded_dataset.space_to_tab, info = uploaded_dataset.info, precreated_dataset = precreated_dataset, metadata = uploaded_dataset.metadata, uploaded_dataset = uploaded_dataset )
- #dataset state is now set, we should not do anything else to this dataset
- data_list.append( dataset )
- #clean up extra temp names
- uploaded_dataset.clean_up_temp_files()
-
+ data = self.get_precreated_dataset( uploaded_dataset.name )
+ if not data:
+ data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
+ data.name = uploaded_dataset.name
+ data.state = data.states.QUEUED
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.add_dataset( data, genome_build = uploaded_dataset.dbkey )
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+ else:
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.genome_build = uploaded_dataset.dbkey
+ if uploaded_dataset.type == 'composite':
+ # we need to init metadata before the job is dispatched
+ data.init_meta()
+ for meta_name, meta_value in uploaded_dataset.metadata.iteritems():
+ setattr( data.metadata, meta_name, meta_value )
+ data.flush()
+ json = dict( file_type = uploaded_dataset.file_type,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ metadata = uploaded_dataset.metadata,
+ primary_file = uploaded_dataset.primary_file,
+ extra_files_path = data.extra_files_path,
+ composite_file_paths = uploaded_dataset.composite_files,
+ composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
+ else:
+ try:
+ is_binary = uploaded_dataset.datatype.is_binary
+ except:
+ is_binary = None
+ json = dict( file_type = uploaded_dataset.file_type,
+ name = uploaded_dataset.name,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ is_binary = is_binary,
+ space_to_tab = uploaded_dataset.space_to_tab,
+ path = uploaded_dataset.path )
+ json_file.write( to_json_string( json ) + '\n' )
+ data_list.append( data )
+ json_file.close()
+
#cleanup unclaimed precreated datasets:
for data in self.precreated_datasets:
log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
data.state = data.states.ERROR
data.info = 'No file contents were available.'
- if data_list:
- trans.app.model.flush()
+ if not data_list:
+ try:
+ os.remove( json_file_path )
+ except:
+ pass
+ return 'No data was entered in the upload form, please go back and choose data to upload.'
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
job.history_id = trans.history.id
job.tool_id = tool.id
- try:
- # For backward compatibility, some tools may not have versions yet.
- job.tool_version = tool.version
- except:
- job.tool_version = "1.0.1"
+ job.tool_version = tool.version
job.state = trans.app.model.Job.states.UPLOAD
job.flush()
log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
+
+ for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ job.add_parameter( name, value )
+ job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
+ for i, dataset in enumerate( data_list ):
+ job.add_output_dataset( i, dataset )
+ trans.app.model.flush()
- #if we could make a 'real' job here, then metadata could be set before job.finish() is called
- hda = data_list[0] #only our first hda is being added as output for the job, why?
- job.state = trans.app.model.Job.states.OK
- file_size_str = datatypes.data.nice_size( hda.dataset.file_size )
- job.info = "%s, size: %s" % ( hda.info, file_size_str )
- job.add_output_dataset( hda.name, hda )
- job.flush()
- log.info( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ) )
- trans.log_event( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ), tool_id=tool.id )
- return dict( output=hda )
-
- def upload_empty(self, trans, job, err_code, err_msg, precreated_dataset = None):
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( create_dataset=True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
- data.name = err_code
- data.extension = "txt"
- data.dbkey = "?"
- data.info = err_msg
- data.file_size = 0
- data.state = data.states.EMPTY
- data.flush()
- if precreated_dataset is None:
- trans.history.add_dataset( data )
- trans.app.model.flush()
- # Indicate job failure by setting state and info
- job.state = trans.app.model.Job.states.ERROR
- job.info = err_msg
- job.add_output_dataset( data.name, data )
- job.flush()
- log.info( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ) )
- trans.log_event( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ), tool_id=job.tool_id )
- return dict( output=data )
-
- def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None, metadata = {}, uploaded_dataset = None ):
- def dataset_no_data_error( data, message = 'there was an error uploading your file' ):
- data.info = "No data: %s." % message
- data.state = data.states.ERROR
- if data.extension is None:
- data.extension = 'data'
- return data
- data_type = None
-
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
-
- # See if we have an empty file
- if not os.path.getsize( temp_name ) > 0:
- return dataset_no_data_error( data, message = 'you attempted to upload an empty file' )
- #raise BadFileException( "you attempted to upload an empty file." )
- if is_multi_byte:
- ext = sniff.guess_ext( temp_name, is_multi_byte=True )
- else:
- if not data_type: #at this point data_type is always None (just initialized above), so this is always True...lots of cleanup needed here
- # See if we have a gzipped file, which, if it passes our restrictions,
- # we'll decompress on the fly.
- is_gzipped, is_valid = self.check_gzip( temp_name )
- if is_gzipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_gzipped and is_valid:
- # We need to uncompress the temp_name file
- CHUNK_SIZE = 2**20 # 1Mb
- fd, uncompressed = tempfile.mkstemp()
- gzipped_file = gzip.GzipFile( temp_name )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- return dataset_no_data_error( data, message = 'problem decompressing gzipped data' )
- #raise BadFileException( 'problem decompressing gzipped data.' )
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- # Replace the gzipped file with the decompressed file
- shutil.move( uncompressed, temp_name )
- file_name = file_name.rstrip( '.gz' )
- data_type = 'gzip'
- ext = ''
- if not data_type:
- # See if we have a zip archive
- is_zipped, is_valid, test_ext = self.check_zip( temp_name )
- if is_zipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_zipped and is_valid:
- # Currently, we force specific tools to handle this case. We also require the user
- # to manually set the incoming file_type
- if ( test_ext == 'ab1' or test_ext == 'scf' ) and file_type != 'binseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
- elif test_ext == 'txt' and file_type != 'txtseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
- if not ( file_type == 'binseq.zip' or file_type == 'txtseq.zip' ):
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files" )
- #raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
- data_type = 'zip'
- ext = file_type
- if not data_type:
- if self.check_binary( temp_name ):
- if uploaded_dataset and uploaded_dataset.datatype and uploaded_dataset.datatype.is_binary:
- #we need a more generalized way of checking if a binary upload is of the right format for a datatype...magic number, etc
- data_type = 'binary'
- ext = uploaded_dataset.file_type
- else:
- parts = file_name.split( "." )
- if len( parts ) > 1:
- ext = parts[1].strip().lower()
- if not( ext == 'ab1' or ext == 'scf' ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- if ext == 'ab1' and file_type != 'ab1':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
- elif ext == 'scf' and file_type != 'scf':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Scf' when uploading scf files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
- data_type = 'binary'
- if not data_type:
- # We must have a text file
- if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- #if data_type != 'binary' and data_type != 'zip' and not trans.app.datatypes_registry.get_datatype_by_extension( ext ).is_binary:
- if data_type != 'binary' and data_type != 'zip':
- if space_to_tab:
- self.line_count = sniff.convert_newlines_sep2tabs( temp_name )
- else:
- self.line_count = sniff.convert_newlines( temp_name )
- if file_type == 'auto':
- ext = sniff.guess_ext( temp_name, sniff_order=trans.app.datatypes_registry.sniff_order )
- else:
- ext = file_type
- data_type = ext
- if info is None:
- info = 'uploaded %s file' %data_type
- data.extension = ext
- data.name = file_name
- data.dbkey = dbkey
- data.info = info
- data.flush()
- shutil.move( temp_name, data.file_name )
- dataset_state = data.states.OK #don't set actual state here, only set to OK when finished setting attributes of the dataset
- data.set_size()
- data.init_meta()
- #need to set metadata, has to be done after extention is set
- for meta_name, meta_value in metadata.iteritems():
- setattr( data.metadata, meta_name, meta_value )
- if self.line_count is not None:
- try:
- if is_multi_byte:
- data.set_multi_byte_peek( line_count=self.line_count )
- else:
- data.set_peek( line_count=self.line_count )
- except:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
- else:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
-
- # validate incomming data
- # Commented by greg on 3/14/07
- # for error in data.datatype.validate( data ):
- # data.add_validation_error(
- # model.ValidationError( message=str( error ), err_type=error.__class__.__name__, attributes=util.object_to_string( error.__dict__ ) ) )
- if data.missing_meta():
- data.datatype.set_meta( data )
- dbkey_to_store = dbkey
- if type( dbkey_to_store ) == type( [] ):
- dbkey_to_store = dbkey[0]
- if precreated_dataset is not None:
- trans.history.genome_build = dbkey_to_store
- else:
- trans.history.add_dataset( data, genome_build=dbkey_to_store )
- #set up composite files
- if uploaded_dataset is not None:
- composite_files = data.datatype.get_composite_files( data )
- if composite_files:
- os.mkdir( data.extra_files_path ) #make extra files path
- for name, value in composite_files.iteritems():
- if uploaded_dataset.composite_files[ value.name ] is None and not value.optional:
- data.info = "A required composite data file was not provided (%s)" % name
- dataset_state = data.states.ERROR
- break
- elif uploaded_dataset.composite_files[ value.name] is not None:
- if not value.is_binary:
- if uploaded_dataset.composite_files[ value.name ].space_to_tab:
- sniff.convert_newlines_sep2tabs( uploaded_dataset.composite_files[ value.name ].filename )
- else:
- sniff.convert_newlines( uploaded_dataset.composite_files[ value.name ].filename )
- shutil.move( uploaded_dataset.composite_files[ value.name ].filename, os.path.join( data.extra_files_path, name ) )
- if data.datatype.composite_type == 'auto_primary_file':
- #now that metadata was set above, we should create the primary file as required
- open( data.file_name, 'wb+' ).write( data.datatype.generate_primary_file( dataset = data ) )
- data.state = dataset_state #Always set dataset state LAST
- trans.app.model.flush()
- trans.log_event( "Added dataset %d to history %d" %( data.id, trans.history.id ), tool_id="upload" )
- return data
-
- def check_gzip( self, temp_name ):
- temp = open( temp_name, "U" )
- magic_check = temp.read( 2 )
- temp.close()
- if magic_check != util.gzip_magic:
- return ( False, False )
- CHUNK_SIZE = 2**15 # 32Kb
- gzipped_file = gzip.GzipFile( temp_name )
- chunk = gzipped_file.read( CHUNK_SIZE )
- gzipped_file.close()
- if self.check_html( temp_name, chunk=chunk ) or self.check_binary( temp_name, chunk=chunk ):
- return( True, False )
- return ( True, True )
-
- def check_zip( self, temp_name ):
- if not zipfile.is_zipfile( temp_name ):
- return ( False, False, None )
- zip_file = zipfile.ZipFile( temp_name, "r" )
- # Make sure the archive consists of valid files. The current rules are:
- # 1. Archives can only include .ab1, .scf or .txt files
- # 2. All file extensions within an archive must be the same
- name = zip_file.namelist()[0]
- test_ext = name.split( "." )[1].strip().lower()
- if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
- return ( True, False, test_ext )
- for name in zip_file.namelist():
- ext = name.split( "." )[1].strip().lower()
- if ext != test_ext:
- return ( True, False, test_ext )
- return ( True, True, test_ext )
-
- def check_html( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open(temp_name, "U")
- else:
- temp = chunk
- regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
- regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
- regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
- regexp4 = re.compile( "<META[^>]*>", re.I )
- lineno = 0
- for line in temp:
- lineno += 1
- matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
- if matches:
- if chunk is None:
- temp.close()
- return True
- if lineno > 100:
- break
- if chunk is None:
- temp.close()
- return False
- def check_binary( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open( temp_name, "U" )
- else:
- temp = chunk
- lineno = 0
- for line in temp:
- lineno += 1
- line = line.strip()
- if line:
- if util.is_multi_byte( line ):
- return False
- for char in line:
- if ord( char ) > 128:
- if chunk is None:
- temp.close()
- return True
- if lineno > 10:
- break
- if chunk is None:
- temp.close()
- return False
+ # Queue the job for execution
+ trans.app.job_queue.put( job.id, tool )
+ trans.log_event( "Added job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return dict( [ ( i, v ) for i, v in enumerate( data_list ) ] )
def get_precreated_dataset( self, name ):
"""
@@ -378,7 +164,3 @@
return self.precreated_datasets.pop( names.index( name ) )
else:
return None
-
-class BadFileException( Exception ):
- pass
-
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Aug 20 18:44:35 2009 -0400
@@ -304,21 +304,22 @@
def get_html_field( self, trans=None, value=None, other_values={} ):
return form_builder.FileField( self.name, ajax = self.ajax, value = value )
def from_html( self, value, trans=None, other_values={} ):
+ # TODO: Fix nginx upload module support
# Middleware or proxies may encode files in special ways (TODO: this
# should be pluggable)
- if type( value ) == dict:
- upload_location = self.tool.app.config.nginx_upload_location
- assert upload_location, \
- "Request appears to have been processed by nginx_upload_module \
- but Galaxy is not configured to recgonize it"
- # Check that the file is in the right location
- local_filename = os.path.abspath( value['path'] )
- assert local_filename.startswith( upload_location ), \
- "Filename provided by nginx is not in correct directory"
- value = Bunch(
- filename = value["name"],
- local_filename = local_filename
- )
+ #if type( value ) == dict:
+ # upload_location = self.tool.app.config.nginx_upload_location
+ # assert upload_location, \
+ # "Request appears to have been processed by nginx_upload_module \
+ # but Galaxy is not configured to recgonize it"
+ # # Check that the file is in the right location
+ # local_filename = os.path.abspath( value['path'] )
+ # assert local_filename.startswith( upload_location ), \
+ # "Filename provided by nginx is not in correct directory"
+ # value = Bunch(
+ # filename = value["name"],
+ # local_filename = local_filename
+ # )
return value
def get_required_enctype( self ):
"""
@@ -330,10 +331,18 @@
return None
elif isinstance( value, unicode ) or isinstance( value, str ):
return value
+ elif isinstance( value, dict ):
+ # or should we jsonify?
+ try:
+ return value['local_filename']
+ except:
+ return None
raise Exception( "FileToolParameter cannot be persisted" )
def to_python( self, value, app ):
if value is None:
return None
+ elif isinstance( value, unicode ) or isinstance( value, str ):
+ return value
else:
raise Exception( "FileToolParameter cannot be persisted" )
def get_initial_value( self, trans, context ):
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/parameters/grouping.py Thu Aug 20 18:44:35 2009 -0400
@@ -12,6 +12,7 @@
from galaxy.datatypes import sniff
from galaxy.util.bunch import Bunch
from galaxy.util.odict import odict
+from galaxy.util import json
class Group( object ):
def __init__( self ):
@@ -167,33 +168,30 @@
rval.append( rval_dict )
return rval
def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ):
- def get_data_file_filename( data_file, is_multi_byte = False, override_name = None, override_info = None ):
+ def get_data_file_filename( data_file, override_name = None, override_info = None ):
dataset_name = override_name
dataset_info = override_info
def get_file_name( file_name ):
file_name = file_name.split( '\\' )[-1]
file_name = file_name.split( '/' )[-1]
return file_name
- if 'local_filename' in dir( data_file ):
+ try:
# Use the existing file
- return data_file.local_filename, get_file_name( data_file.filename ), is_multi_byte
- elif 'filename' in dir( data_file ):
- #create a new tempfile
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
- precreated_name = get_file_name( data_file.filename )
- if not dataset_name:
- dataset_name = precreated_name
- if not dataset_info:
- dataset_info = 'uploaded file'
- return temp_name, get_file_name( data_file.filename ), is_multi_byte, dataset_name, dataset_info
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
- self.remove_temp_file( temp_name )
- return None, None, is_multi_byte, None, None
- def filenames_from_url_paste( url_paste, group_incoming, override_name = None, override_info = None ):
+ if not dataset_name and 'filename' in data_file:
+ dataset_name = get_file_name( data_file['filename'] )
+ if not dataset_info:
+ dataset_info = 'uploaded file'
+ return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ #return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
+ except:
+ # The uploaded file should've been persisted by the upload tool action
+ return Bunch( type=None, path=None, name=None )
+ #return None, None, None, None, None
+ def get_url_paste_urls_or_filename( group_incoming, override_name = None, override_info = None ):
filenames = []
- if url_paste not in [ None, "" ]:
+ url_paste_file = group_incoming.get( 'url_paste', None )
+ if url_paste_file is not None:
+ url_paste = open( url_paste_file, 'r' ).read( 1024 )
if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
url_paste = url_paste.replace( '\r', '' ).split( '\n' )
for line in url_paste:
@@ -208,114 +206,54 @@
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
- except Exception, e:
- temp_name = None
- precreated_name = str( e )
- log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
+ yield Bunch( type='url', path=line, name=precreated_name )
+ #yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
if override_name:
dataset_name = override_name
if override_info:
dataset_info = override_info
- is_valid = False
- for line in url_paste: #Trim off empty lines from begining
- line = line.rstrip( '\r\n' )
- if line:
- is_valid = True
- break
- if is_valid:
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
- temp_name = None
- precreated_name = str( e )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
-
+ yield Bunch( type='file', path=url_paste_file, name=precreated_name )
+ #yield ( 'file', url_paste_file, precreated_name, dataset_name, dataset_info )
def get_one_filename( context ):
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
if url_paste.strip():
warnings.append( "All file contents specified in the paste box were ignored." )
else: #we need to use url_paste
- #file_names = filenames_from_url_paste( url_paste, context, override_name = name, override_info = info )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):#file_names:
- if temp_name:
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
break
- ###this check will cause an additional file to be retrieved and created...so lets not do that
- #try: #check to see if additional paste contents were available
- # file_names.next()
- # warnings.append( "Additional file contents were specified in the paste box, but ignored." )
- #except StopIteration:
- # pass
- return temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings
-
+ return file_bunch, warnings
def get_filenames( context ):
rval = []
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
- warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ warnings = []
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
+ #rval.append( ( type, temp_name, precreated_name, space_to_tab, dataset_name, dataset_info ) )
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
return rval
- class UploadedDataset( Bunch ):
- def __init__( self, **kwd ):
- Bunch.__init__( self, **kwd )
- self.primary_file = None
- self.composite_files = odict()
- self.dbkey = None
- self.warnings = []
- self.metadata = {}
-
- self._temp_filenames = [] #store all created filenames here, delete on cleanup
- def register_temp_file( self, filename ):
- if isinstance( filename, list ):
- self._temp_filenames.extend( filename )
- else:
- self._temp_filenames.append( filename )
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- pass
- #log.warning( str( e ) )
- def clean_up_temp_files( self ):
- for filename in self._temp_filenames:
- self.remove_temp_file( filename )
-
file_type = self.get_file_type( context )
d_type = self.get_datatype( trans, context )
dbkey = context.get( 'dbkey', None )
@@ -325,51 +263,50 @@
for group_incoming in context.get( self.name, [] ):
i = int( group_incoming['__index__'] )
groups_incoming[ i ] = group_incoming
-
if d_type.composite_type is not None:
#handle uploading of composite datatypes
#Only one Dataset can be created
+ '''
dataset = UploadedDataset()
+ dataset.datatype = d_type
+ '''
+ dataset = Bunch()
+ dataset.type = 'composite'
dataset.file_type = file_type
+ dataset.dbkey = dbkey
dataset.datatype = d_type
- dataset.dbkey = dbkey
+ dataset.warnings = []
+ dataset.metadata = {}
+ dataset.composite_files = {}
#load metadata
files_metadata = context.get( self.metadata_ref, {} )
- for meta_name, meta_spec in d_type.metadata_spec.iteritems():
+ for meta_name, meta_spec in d_type.metadata_spec.iteritems():
if meta_spec.set_in_upload:
if meta_name in files_metadata:
dataset.metadata[ meta_name ] = files_metadata[ meta_name ]
-
- temp_name = None
- precreated_name = None
- is_multi_byte = False
- space_to_tab = False
- warnings = []
dataset_name = None
dataset_info = None
if dataset.datatype.composite_type == 'auto_primary_file':
#replace sniff here with just creating an empty file
temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' )
- precreated_name = dataset_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+ dataset.primary_file = temp_name
+ dataset.space_to_tab = False
+ dataset.precreated_name = dataset.name = 'Uploaded Composite Dataset (%s)' % ( file_type )
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( groups_incoming[ 0 ] )
+ file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
if dataset.datatype.composite_type:
precreated_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
writable_files_offset = 1
- if temp_name is None:#remove this before finish, this should create an empty dataset
+ dataset.primary_file = file_bunch.path
+ dataset.space_to_tab = file_bunch.space_to_tab
+ dataset.precreated_name = file_bunch.precreated_name
+ dataset.name = file_bunch.precreated_name
+ dataset.warnings.extend( file_bunch.warnings )
+ if dataset.primary_file is None:#remove this before finish, this should create an empty dataset
raise Exception( 'No primary dataset file was available for composite upload' )
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.precreated_name = precreated_name
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.warnings.extend( warnings )
- dataset.register_temp_file( temp_name )
-
keys = [ value.name for value in writable_files.values() ]
for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
key = keys[ i + writable_files_offset ]
@@ -377,37 +314,22 @@
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
dataset.composite_files[ key ] = None
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( group_incoming )
- if temp_name:
- dataset.composite_files[ key ] = Bunch( filename = temp_name, precreated_name = precreated_name, is_multi_byte = is_multi_byte, space_to_tab = space_to_tab, warnings = warnings, info = dataset_info, name = dataset_name )
- dataset.register_temp_file( temp_name )
+ file_bunch, warnings = get_one_filename( group_incoming )
+ if file_bunch.path:
+ dataset.composite_files[ key ] = file_bunch.__dict__
else:
dataset.composite_files[ key ] = None
if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
return [ dataset ]
else:
+ datasets = get_filenames( context[ self.name ][0] )
rval = []
- for temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, in get_filenames( context[ self.name ][0] ):
- dataset = UploadedDataset()
+ for dataset in datasets:
dataset.file_type = file_type
- dataset.datatype = d_type
dataset.dbkey = dbkey
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.precreated_name = precreated_name
- dataset.register_temp_file( temp_name )
rval.append( dataset )
- return rval
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- log.warning( str( e ) )
-
+ return rval
class Conditional( Group ):
type = "conditional"
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/util/__init__.py Thu Aug 20 18:44:35 2009 -0400
@@ -3,7 +3,7 @@
"""
import logging
-import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys
+import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile
# Older py compatibility
try:
@@ -454,6 +454,26 @@
out_dict[ str( key ) ] = value
return out_dict
+def mkstemp_ln( src, prefix='mkstemp_ln_' ):
+ """
+ From tempfile._mkstemp_inner, generate a hard link in the same dir with a
+ random name. Created so we can persist the underlying file of a
+ NamedTemporaryFile upon its closure.
+ """
+ dir = os.path.dirname(src)
+ names = tempfile._get_candidate_names()
+ for seq in xrange(tempfile.TMP_MAX):
+ name = names.next()
+ file = os.path.join(dir, prefix + name)
+ try:
+ linked_path = os.link( src, file )
+ return (os.path.abspath(file))
+ except OSError, e:
+ if e.errno == errno.EEXIST:
+ continue # try again
+ raise
+ raise IOError, (errno.EEXIST, "No usable temporary file name found")
+
galaxy_root_path = os.path.join(__path__[0], "..","..","..")
dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) ) #this list is used in edit attributes and the upload tool
ucsc_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "ucsc_build_sites.txt" ) ) #this list is used in history.tmpl
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 18:44:35 2009 -0400
@@ -136,6 +136,7 @@
"""
Precreate datasets for asynchronous uploading.
"""
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
def create_dataset( name, history ):
data = trans.app.model.HistoryDatasetAssociation( create_dataset = True )
data.name = name
@@ -143,6 +144,7 @@
data.history = history
data.flush()
history.add_dataset( data )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
return data
tool = self.get_toolbox().tools_by_id.get( tool_id, None )
if not tool:
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/web/framework/base.py Thu Aug 20 18:44:35 2009 -0400
@@ -212,6 +212,17 @@
else:
return None
+# For request.params, override cgi.FieldStorage.make_file to create persistent
+# tempfiles. Necessary for externalizing the upload tool. It's a little hacky
+# but for performance reasons it's way better to use Paste's tempfile than to
+# create a new one and copy.
+import cgi
+class FieldStorage( cgi.FieldStorage ):
+ def make_file(self, binary=None):
+ import tempfile
+ return tempfile.NamedTemporaryFile()
+cgi.FieldStorage = FieldStorage
+
class Request( webob.Request ):
"""
Encapsulates an HTTP request.
diff -r 62e24f51b518 -r 542471b183d7 templates/base_panels.mako
--- a/templates/base_panels.mako Thu Aug 20 12:51:39 2009 -0400
+++ b/templates/base_panels.mako Thu Aug 20 18:44:35 2009 -0400
@@ -72,9 +72,6 @@
<script type="text/javascript">
jQuery( function() {
$("iframe#galaxy_main").load( function() {
- ##$(this.contentDocument).find("input[galaxy-ajax-upload]").each( function() {
- ##$("iframe")[0].contentDocument.body.innerHTML = "HELLO"
- ##$(this.contentWindow.document).find("input[galaxy-ajax-upload]").each( function() {
$(this).contents().find("form").each( function() {
if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){
$(this).submit( function() {
diff -r 62e24f51b518 -r 542471b183d7 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Thu Aug 20 12:51:39 2009 -0400
+++ b/test/base/twilltestcase.py Thu Aug 20 18:44:35 2009 -0400
@@ -93,6 +93,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when uploading file %s" % ( hid, filename )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
def upload_url_paste( self, url_paste, ftype='auto', dbkey='unspecified (?)' ):
"""Pasted data in the upload utility"""
self.visit_page( "tool_runner/index?tool_id=upload1" )
@@ -112,6 +114,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when pasting %s" % ( hid, url_paste )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
# Functions associated with histories
def check_history_for_errors( self ):
diff -r 62e24f51b518 -r 542471b183d7 tools/data_source/upload.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/data_source/upload.py Thu Aug 20 18:44:35 2009 -0400
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+#Processes uploads from the user.
+
+# WARNING: Changes in this tool (particularly as related to parsing) may need
+# to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
+
+import urllib, sys, os, gzip, tempfile, shutil, re, gzip, zipfile
+from galaxy import eggs
+# need to import model before sniff to resolve a circular import dependency
+import galaxy.model
+from galaxy.datatypes import sniff
+from galaxy import util
+from galaxy.util.json import *
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+def stop_err( msg, ret=1 ):
+ sys.stderr.write( msg )
+ sys.exit( ret )
+
+def file_err( msg, dataset, json_file ):
+ json_file.write( to_json_string( dict( type = 'dataset',
+ ext = 'data',
+ dataset_id = dataset.dataset_id,
+ stderr = msg ) ) + "\n" )
+ try:
+ os.remove( dataset.path )
+ except:
+ pass
+
+def safe_dict(d):
+ """
+ Recursively clone json structure with UTF-8 dictionary keys
+ http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-ke…
+ """
+ if isinstance(d, dict):
+ return dict([(k.encode('utf-8'), safe_dict(v)) for k,v in d.iteritems()])
+ elif isinstance(d, list):
+ return [safe_dict(x) for x in d]
+ else:
+ return d
+
+def check_html( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open(temp_name, "U")
+ else:
+ temp = chunk
+ regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
+ regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
+ regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
+ regexp4 = re.compile( "<META[^>]*>", re.I )
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
+ if matches:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 100:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_binary( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open( temp_name, "U" )
+ else:
+ temp = chunk
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ line = line.strip()
+ if line:
+ for char in line:
+ if ord( char ) > 128:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 10:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_gzip( temp_name ):
+ temp = open( temp_name, "U" )
+ magic_check = temp.read( 2 )
+ temp.close()
+ if magic_check != util.gzip_magic:
+ return ( False, False )
+ CHUNK_SIZE = 2**15 # 32Kb
+ gzipped_file = gzip.GzipFile( temp_name )
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ gzipped_file.close()
+ if check_html( temp_name, chunk=chunk ) or check_binary( temp_name, chunk=chunk ):
+ return( True, False )
+ return ( True, True )
+
+def check_zip( temp_name ):
+ if not zipfile.is_zipfile( temp_name ):
+ return ( False, False, None )
+ zip_file = zipfile.ZipFile( temp_name, "r" )
+ # Make sure the archive consists of valid files. The current rules are:
+ # 1. Archives can only include .ab1, .scf or .txt files
+ # 2. All file extensions within an archive must be the same
+ name = zip_file.namelist()[0]
+ test_ext = name.split( "." )[1].strip().lower()
+ if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
+ return ( True, False, test_ext )
+ for name in zip_file.namelist():
+ ext = name.split( "." )[1].strip().lower()
+ if ext != test_ext:
+ return ( True, False, test_ext )
+ return ( True, True, test_ext )
+
+def add_file( dataset, json_file ):
+ data_type = None
+ line_count = None
+
+ if dataset.type == 'url':
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( dataset.path ), prefix='url_paste' )
+ except Exception, e:
+ file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
+ return
+ dataset.path = temp_name
+ dataset.is_multi_byte = is_multi_byte
+
+ # See if we have an empty file
+ if not os.path.exists( dataset.path ):
+ file_err( 'Uploaded temporary file (%s) does not exist. Please' % dataset.path, dataset, json_file )
+ return
+ if not os.path.getsize( dataset.path ) > 0:
+ file_err( 'The uploaded file is empty', dataset, json_file )
+ return
+ if 'is_multi_byte' not in dir( dataset ):
+ dataset.is_multi_byte = util.is_multi_byte( open( dataset.path, 'r' ).read( 1024 )[:100] )
+ if dataset.is_multi_byte:
+ ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
+ data_type = ext
+ else:
+ # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
+ is_gzipped, is_valid = check_gzip( dataset.path )
+ if is_gzipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_gzipped and is_valid:
+ # We need to uncompress the temp_name file
+ CHUNK_SIZE = 2**20 # 1Mb
+ fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( dataset.path ) )
+ gzipped_file = gzip.GzipFile( dataset.path )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ file_err( 'Problem decompressing gzipped data', dataset, json_file )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ # Replace the gzipped file with the decompressed file
+ shutil.move( uncompressed, dataset.path )
+ dataset.name = dataset.name.rstrip( '.gz' )
+ data_type = 'gzip'
+ if not data_type:
+ # See if we have a zip archive
+ is_zipped, is_valid, test_ext = check_zip( dataset.path )
+ if is_zipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_zipped and is_valid:
+ # Currently, we force specific tools to handle this case. We also require the user
+ # to manually set the incoming file_type
+ if ( test_ext == 'ab1' or test_ext == 'scf' ) and dataset.file_type != 'binseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'", dataset, json_file )
+ return
+ elif test_ext == 'txt' and dataset.file_type != 'txtseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'", dataset, json_file )
+ return
+ if not ( dataset.file_type == 'binseq.zip' or dataset.file_type == 'txtseq.zip' ):
+ file_err( "You must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files", dataset, json_file )
+ return
+ data_type = 'zip'
+ ext = dataset.file_type
+ if not data_type:
+ if check_binary( dataset.path ):
+ if dataset.is_binary is not None:
+ data_type = 'binary'
+ ext = dataset.file_type
+ else:
+ parts = dataset.name.split( "." )
+ if len( parts ) > 1:
+ ext = parts[1].strip().lower()
+ if not( ext == 'ab1' or ext == 'scf' ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if ext == 'ab1' and dataset.file_type != 'ab1':
+ file_err( "You must manually set the 'File Format' to 'Ab1' when uploading ab1 files.", dataset, json_file )
+ return
+ elif ext == 'scf' and dataset.file_type != 'scf':
+ file_err( "You must manually set the 'File Format' to 'Scf' when uploading scf files.", dataset, json_file )
+ return
+ data_type = 'binary'
+ if not data_type:
+ # We must have a text file
+ if check_html( dataset.path ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if data_type != 'binary' and data_type != 'zip':
+ if dataset.space_to_tab:
+ line_count = sniff.convert_newlines_sep2tabs( dataset.path )
+ else:
+ line_count = sniff.convert_newlines( dataset.path )
+ if dataset.file_type == 'auto':
+ ext = sniff.guess_ext( dataset.path )
+ else:
+ ext = dataset.file_type
+ data_type = ext
+ # Save job info for the framework
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.path,
+ ext = ext,
+ stdout = 'uploaded %s file' % data_type,
+ name = dataset.name,
+ line_count = line_count )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def add_composite_file( dataset, json_file ):
+ if dataset.composite_files:
+ os.mkdir( dataset.extra_files_path )
+ for name, value in dataset.composite_files.iteritems():
+ value = util.bunch.Bunch( **value )
+ if dataset.composite_file_paths[ value.name ] is None and not value.optional:
+ file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
+ break
+ elif dataset.composite_file_paths[value.name] is not None:
+ if not value.is_binary:
+ if uploaded_dataset.composite_files[ value.name ].space_to_tab:
+ sniff.convert_newlines_sep2tabs( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ else:
+ sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.primary_file,
+ stdout = 'uploaded %s file' % dataset.file_type )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def __main__():
+
+ if len( sys.argv ) != 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ sys.exit( 1 )
+
+ json_file = open( 'galaxy.json', 'w' )
+
+ for line in open( sys.argv[1], 'r' ):
+ dataset = from_json_string( line )
+ dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+
+ if dataset.type == 'composite':
+ add_composite_file( dataset, json_file )
+ else:
+ add_file( dataset, json_file )
+
+ # clean up paramfile
+ try:
+ os.remove( sys.argv[1] )
+ except:
+ pass
+
+if __name__ == '__main__':
+ __main__()
diff -r 62e24f51b518 -r 542471b183d7 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Aug 20 12:51:39 2009 -0400
+++ b/tools/data_source/upload.xml Thu Aug 20 18:44:35 2009 -0400
@@ -1,10 +1,13 @@
<?xml version="1.0"?>
-<tool name="Upload File" id="upload1" version="1.0.2">
+<tool name="Upload File" id="upload1" version="1.0.3">
<description>
from your computer
</description>
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
+ <command interpreter="python">
+ upload.py $paramfile
+ </command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
<options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]">
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/62e24f51b518
changeset: 2602:62e24f51b518
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 12:51:39 2009 -0400
description:
Require json2.js
2 file(s) affected in this change:
static/scripts/packed/galaxy.workflow_editor.canvas.js
templates/workflow/editor.mako
diffs (19 lines):
diff -r 0517fd342fc8 -r 62e24f51b518 static/scripts/packed/galaxy.workflow_editor.canvas.js
--- a/static/scripts/packed/galaxy.workflow_editor.canvas.js Thu Aug 20 12:41:37 2009 -0400
+++ b/static/scripts/packed/galaxy.workflow_editor.canvas.js Thu Aug 20 12:51:39 2009 -0400
@@ -1,1 +1,1 @@
-function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatype=b}OutputTerminal.prototype=new Terminal();function InputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1){for(var b in this.datatypes){if(a.datatype=="input"){return true}if(issubtype(a.datatype,this.datatypes[b])){return true}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a)
{this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;
this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(d,a,b){var c=this;$(d).each(function(){var f=this.terminal=new InputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dropstart",function(g){g.dragProxy.terminal.connectors[0].inner_color="#BBFFBB"}).bind("dropend",function(g){g.dragProxy.terminal.connectors[0].inner_color="#FFFFFF"}).bind("drop",function(g){(new Connector(g.dragTarget.terminal,g.dropTarget.terminal)).redraw()}).bind("hov
er",function(){if(f.connectors.length>0){var g=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img src='../images/delete_icon.png' />").click(function(){$.each(f.connectors,function(i,h){h.destroy()});g.remove()}))).bind("mouseleave",function(){$(this).remove()});g.css({top:$(this).offset().top-2,left:$(this).offset().left-g.width(),"padding-right":$(this).width()}).show()}});c.input_terminals[a]=f})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j){var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var k=new Connector();k.dragging=true;k.connect(this.terminal,i.terminal);$.dropManage({filter:function(h){return this.terminal.can_accept(f)}}).addClass("input-terminal-active");ret
urn i}).bind("drag",function(i){var h=function(){var k=$(i.dragProxy).offsetParent().offset(),j=i.offsetX-k.left,l=i.offsetY-k.top;$(i.dragProxy).css({left:j,top:l});i.dragProxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h){h.dragProxy.terminal.connectors[0].destroy();$(h.dragProxy).remove();$.dropManage().removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b
.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(g){var d=this.element;if(g.type){this.type=g.type}this.name=g.name;this.form_html=g.form_html;this.tool_state=g.tool_state;this.tool_errors=g.tool_errors;if(this.tool_errors){d.addClass("tool-node-error")}else{d.removeClass("tool-node-error")}var c=this;var a=d.find(".toolFormBody");a.find("div").remove();var h=$("<div class='inputs'></div>").appendTo(a);$.each(g.data_inputs,function(j,b){var f=$("<div class='terminal input-terminal'></div>");c.enable_input_terminal(f,b.name,b.extensions);h.append($("<div class='form-row dataRow input-data-row' name='"+b.name+"'>"+b.label+"</div>").prepend(f))});if((g.data_inputs.length>0)&&(g.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(g.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");c.enable_output_terminal(j,b.name,b.extension);var f=b.name;if(b.extension!="input"){f=f+" ("+b.extensio
n+")"}a.append($("<div class='form-row dataRow'>"+f+"</div>").append(j))});workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var g=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=g.find("div.input-data-row");$.each(f.data_inputs,function(k,h){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,h.name,h.extensions);g.find("div[name="+h.name+"]").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){j[0].terminal.connectors[0]=i;i.handle2=j[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+h.name+"'>"+h.label+"</div>").prepend(j))});g.replaceWith(b);g.find("div.input-data-row > .terminal").each(function(){this.terminal.destr
oy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},to_simple:function(){var a={};$.each(this.nodes,function(b,d){var f={};$.each(d.input_terminals,function(g,h){f[h.name]=null;$.each(h.connecto
rs,function(j,k){f[h.name]={id:k.handle1.node.id,output_name:k.handle1.name}})});var c={id:d.id,type:d.type,tool_id:d.tool_id,tool_state:d.tool_state,tool_errors:d.tool_errors,input_connections:f,position:$(d.element).position()};a[d.id]=c});return{steps:a}},from_simple:function(a){wf=this;var b=0;wf.name=a.name;$.each(a.steps,function(f,d){var c=prebuild_node("tool",d.name,d.tool_id);c.init_field_data(d);if(d.position){c.element.css({top:d.position.top,left:d.position.left})}c.id=d.id;wf.nodes[c.id]=c;b=Math.max(b,parseInt(f))});wf.id_counter=b+1;$.each(a.steps,function(f,d){var c=wf.nodes[f];$.each(d.input_connections,function(h,g){if(g){var i=wf.nodes[g.id];var j=new Connector();j.connect(i.output_terminals[g.output_name],c.input_terminals[h]);j.redraw()}})})},enable_auto_save:function(){outer_this=this;$(".toolFormBody").find("input,textarea,select").each(function(){$(this).focus(function(){outer_this.active_form_has_changes=true})})},check_changes_in_active_form:functio
n(){if(this.active_form_has_changes){this.has_changes=true;$(".toolFormBody").find("form").each(function(){$(this).submit()});this.active_form_has_changes=false}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){parent.show_form_for_tool(a.form_html,a)}},layout:function(){this.check_changes_in_active_form();var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){l
evel_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this
.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></d
iv>");var h="<div><img height='16' align='middle' src='../images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img src='../images/delete_icon.png' />").click(function(b){g.destroy()}).hover(function(){$(this).attr("src","../images/delete_icon_dark.png")},function(){$(this).attr("src","../images/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o){var f=$(this).
offsetParent().offset(),b=o.offsetX-f.left,p=o.offsetY-f.top;$(this).css({left:b,top:p});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.c
ss("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(g){var h=$(this).offset();var f=b.cc.position();c=f.top-h.top;d=f.left-h.left}).bind("drag",function(f){a(f.offsetX+d,f.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_n
odes();b.draw_overview()});this.ov.bind("drag",function(k){var j=b.cc.width(),g=b.cc.height(),f=b.oc.width(),h=b.oc.height(),i=$(this).offsetParent().offset(),m=k.offsetX-i.left,l=k.offsetY-i.top;a(-(m/f*j),-(l/h*g))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g){var i=$(this).offsetParent();var h=i.offset();var f=Math.max(i.width()-(g.offsetX-h.left),i.height()-(g.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(f){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var
g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;$.each(workflow.nodes,function(t,q){var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
+function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatype=b}OutputTerminal.prototype=new Terminal();function InputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1){for(var b in this.datatypes){if(a.datatype=="input"){return true}if(issubtype(a.datatype,this.datatypes[b])){return true}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a)
{this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;
this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(d,a,b){var c=this;$(d).each(function(){var f=this.terminal=new InputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dropstart",function(g){g.dragProxy.terminal.connectors[0].inner_color="#BBFFBB"}).bind("dropend",function(g){g.dragProxy.terminal.connectors[0].inner_color="#FFFFFF"}).bind("drop",function(g){(new Connector(g.dragTarget.terminal,g.dropTarget.terminal)).redraw()}).bind("hov
er",function(){if(f.connectors.length>0){var g=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img src='../images/delete_icon.png' />").click(function(){$.each(f.connectors,function(i,h){h.destroy()});g.remove()}))).bind("mouseleave",function(){$(this).remove()});g.css({top:$(this).offset().top-2,left:$(this).offset().left-g.width(),"padding-right":$(this).width()}).show()}});c.input_terminals[a]=f})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j){var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var k=new Connector();k.dragging=true;k.connect(this.terminal,i.terminal);$.dropManage({filter:function(h){return this.terminal.can_accept(f)}}).addClass("input-terminal-active");ret
urn i}).bind("drag",function(i){var h=function(){var k=$(i.dragProxy).offsetParent().offset(),j=i.offsetX-k.left,l=i.offsetY-k.top;$(i.dragProxy).css({left:j,top:l});i.dragProxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h){h.dragProxy.terminal.connectors[0].destroy();$(h.dragProxy).remove();$.dropManage().removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b
.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(g){var d=this.element;if(g.type){this.type=g.type}this.name=g.name;this.form_html=g.form_html;this.tool_state=g.tool_state;this.tool_errors=g.tool_errors;if(this.tool_errors){d.addClass("tool-node-error")}else{d.removeClass("tool-node-error")}var c=this;var a=d.find(".toolFormBody");a.find("div").remove();var h=$("<div class='inputs'></div>").appendTo(a);$.each(g.data_inputs,function(j,b){var f=$("<div class='terminal input-terminal'></div>");c.enable_input_terminal(f,b.name,b.extensions);h.append($("<div class='form-row dataRow input-data-row' name='"+b.name+"'>"+b.label+"</div>").prepend(f))});if((g.data_inputs.length>0)&&(g.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(g.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");c.enable_output_terminal(j,b.name,b.extension);var f=b.name;if(b.extension!="input"){f=f+" ("+b.extensio
n+")"}a.append($("<div class='form-row dataRow'>"+f+"</div>").append(j))});workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var g=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=g.find("div.input-data-row");$.each(f.data_inputs,function(k,h){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,h.name,h.extensions);g.find("div[name="+h.name+"]").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){j[0].terminal.connectors[0]=i;i.handle2=j[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+h.name+"'>"+h.label+"</div>").prepend(j))});g.replaceWith(b);g.find("div.input-data-row > .terminal").each(function(){this.terminal.destr
oy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},to_simple:function(){var a={};$.each(this.nodes,function(b,d){var f={};$.each(d.input_terminals,function(g,h){f[h.name]=null;$.each(h.connecto
rs,function(j,k){f[h.name]={id:k.handle1.node.id,output_name:k.handle1.name}})});var c={id:d.id,type:d.type,tool_id:d.tool_id,tool_state:d.tool_state,tool_errors:d.tool_errors,input_connections:f,position:$(d.element).position()};a[d.id]=c});return{steps:a}},from_simple:function(a){wf=this;var b=0;wf.name=a.name;$.each(a.steps,function(f,d){var c=prebuild_node("tool",d.name,d.tool_id);c.init_field_data(d);if(d.position){c.element.css({top:d.position.top,left:d.position.left})}c.id=d.id;wf.nodes[c.id]=c;b=Math.max(b,parseInt(f))});wf.id_counter=b+1;$.each(a.steps,function(f,d){var c=wf.nodes[f];$.each(d.input_connections,function(h,g){if(g){var i=wf.nodes[g.id];var j=new Connector();j.connect(i.output_terminals[g.output_name],c.input_terminals[h]);j.redraw()}})})},check_changes_in_active_form:function(){if(this.active_form_has_changes){this.has_changes=true;$("#right-content").find("form").submit();this.active_form_has_changes=false}},clear_active_node:function(){if(this.acti
ve_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){parent.show_form_for_tool(a.form_html,a)}},layout:function(){this.check_changes_in_active_form();var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];fo
r(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_
up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='../images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float:
right;'></div>");k.append($("<img src='../images/delete_icon.png' />").click(function(b){g.destroy()}).hover(function(){$(this).attr("src","../images/delete_icon_dark.png")},function(){$(this).attr("src","../images/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o){var f=$(this).offsetParent().offset(),b=o.offsetX-f.left,p=o.offsetY-f.top;$(this).css({left:b,top:p});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type
=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:functi
on(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(g){var h=$(this).offset();var f=b.cc.position();c=f.top-h.top;d=f.left-h.left}).bind("drag",function(f){a(f.offsetX+d,f.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k){var j=b.cc.width(),g=b.cc.height(),f=b.oc.width(),h=b.oc.height(),i=$(this).offsetParent().offset(),m=k.offsetX-i.left,l=k.offsetY-i.top;a(-(m/f*
j),-(l/h*g))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g){var i=$(this).offsetParent();var h=i.offset();var f=Math.max(i.width()-(g.offsetX-h.left),i.height()-(g.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(f){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent
().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;$.each(workflow.nodes,function(t,q){var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
diff -r 0517fd342fc8 -r 62e24f51b518 templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Thu Aug 20 12:41:37 2009 -0400
+++ b/templates/workflow/editor.mako Thu Aug 20 12:51:39 2009 -0400
@@ -31,6 +31,7 @@
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.hover.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.form.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.jstore-all.js')}"> </script>
+ <script type='text/javascript' src="${h.url_for('/static/scripts/json2.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.base.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.workflow_editor.canvas.js')}"> </script>
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/b77721ef035d
changeset: 2596:b77721ef035d
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Aug 21 17:03:42 2009 -0400
description:
Fix collect_primary_datasets() and add collected primary/child datasets to job.
Add new filter to dynamic_options: remove_value.
Fix error in exception when reruning a dataset with a JobToOutputDatasetAssociation but with no job.
3 file(s) affected in this change:
lib/galaxy/tools/__init__.py
lib/galaxy/tools/parameters/dynamic_options.py
lib/galaxy/web/controllers/tool_runner.py
diffs (150 lines):
diff -r 108533bf35b8 -r b77721ef035d lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Fri Aug 21 15:18:17 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Fri Aug 21 17:03:42 2009 -0400
@@ -1534,10 +1534,19 @@
child_dataset.flush()
child_dataset.set_size()
child_dataset.name = "Secondary Dataset (%s)" % ( designation )
- child_dataset.state = child_dataset.states.OK
child_dataset.init_meta()
child_dataset.set_meta()
child_dataset.set_peek()
+ # Associate new dataset with job
+ job = None
+ for assoc in outdata.creating_job_associations:
+ job = assoc.job
+ break
+ if job:
+ assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s__' % designation, child_dataset )
+ assoc.job = job
+ assoc.flush()
+ child_dataset.state = outdata.state
child_dataset.flush()
# Add child to return dict
children[name][designation] = child_dataset
@@ -1550,7 +1559,7 @@
def collect_primary_datasets( self, output):
primary_datasets = {}
- #Loop through output file names, looking for generated primary datasets in form of 'primary_associatedWithDatasetID_designation_visibility_extension'
+ #Loop through output file names, looking for generated primary datasets in form of 'primary_associatedWithDatasetID_designation_visibility_extension(_DBKEY)'
for name, outdata in output.items():
for filename in glob.glob(os.path.join(self.app.config.new_file_path,"primary_%i_*" % outdata.id) ):
if not name in primary_datasets:
@@ -1563,19 +1572,32 @@
if visible == "visible": visible = True
else: visible = False
ext = fields.pop(0).lower()
+ dbkey = outdata.dbkey
+ if fields:
+ dbkey = fields[ 0 ]
# Create new primary dataset
- primary_data = self.app.model.HistoryDatasetAssociation( extension=ext, designation=designation, visible=visible, dbkey=outdata.dbkey, create_dataset=True )
+ primary_data = self.app.model.HistoryDatasetAssociation( extension=ext, designation=designation, visible=visible, dbkey=dbkey, create_dataset=True )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, primary_data.dataset )
primary_data.flush()
# Move data from temp location to dataset location
shutil.move( filename, primary_data.file_name )
primary_data.set_size()
- primary_data.name = dataset.name
- primary_data.info = dataset.info
- primary_data.state = primary_data.states.OK
- primary_data.init_meta( copy_from=dataset )
+ primary_data.name = outdata.name
+ primary_data.info = outdata.info
+ primary_data.init_meta( copy_from=outdata )
+ primary_data.dbkey = dbkey
primary_data.set_meta()
primary_data.set_peek()
+ # Associate new dataset with job
+ job = None
+ for assoc in outdata.creating_job_associations:
+ job = assoc.job
+ break
+ if job:
+ assoc = self.app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s__' % designation, primary_data )
+ assoc.job = job
+ assoc.flush()
+ primary_data.state = outdata.state
primary_data.flush()
outdata.history.add_dataset( primary_data )
# Add dataset to return dict
diff -r 108533bf35b8 -r b77721ef035d lib/galaxy/tools/parameters/dynamic_options.py
--- a/lib/galaxy/tools/parameters/dynamic_options.py Fri Aug 21 15:18:17 2009 -0400
+++ b/lib/galaxy/tools/parameters/dynamic_options.py Fri Aug 21 17:03:42 2009 -0400
@@ -242,6 +242,55 @@
rval.append( add_value )
return rval
+class RemoveValueFilter( Filter ):
+ """
+ Removes a value from an options list.
+
+ Type: remove_value
+
+ Required Attributes:
+ value: value to remove from select list
+ or
+ ref: param to refer to
+ or
+ meta_ref: dataset to refer to
+ key: metadata key to compare to
+ """
+ def __init__( self, d_option, elem ):
+ Filter.__init__( self, d_option, elem )
+ self.value = elem.get( "value", None )
+ self.ref_name = elem.get( "ref", None )
+ self.meta_ref = elem.get( "meta_ref", None )
+ self.metadata_key = elem.get( "key", None )
+ assert self.value is not None or ( ( self.ref_name is not None or self.meta_ref is not None )and self.metadata_key is not None ), ValueError( "Required 'value' or 'ref' and 'key' attributes missing from filter" )
+ self.multiple = string_as_bool( elem.get( "multiple", "False" ) )
+ self.separator = elem.get( "separator", "," )
+ def filter_options( self, options, trans, other_values ):
+ if trans is not None and trans.workflow_building_mode: return options
+ assert self.value is not None or ( self.ref_name is not None and self.ref_name in other_values ) or (self.meta_ref is not None and self.meta_ref in other_values ) or ( trans is not None and trans.workflow_building_mode), Exception( "Required dependency '%s' or '%s' not found in incoming values" % ( self.ref_name, self.meta_ref ) )
+ def compare_value( option_value, filter_value ):
+ if isinstance( filter_value, list ):
+ if self.multiple:
+ option_value = option_value.split( self.separator )
+ for value in filter_value:
+ if value not in filter_value:
+ return False
+ return True
+ return option_value in filter_value
+ if self.multiple:
+ return filter_value in option_value.split( self.separator )
+ return option_value == filter_value
+ value = self.value
+ if value is None:
+ if self.ref_name is not None:
+ value = other_values.get( self.ref_name )
+ else:
+ data_ref = other_values.get( self.meta_ref )
+ if not isinstance( data_ref, self.dynamic_option.tool_param.tool.app.model.HistoryDatasetAssociation ):
+ return options #cannot modify options
+ value = data_ref.metadata.get( self.metadata_key, None )
+ return [ ( disp_name, optval, selected ) for disp_name, optval, selected in options if not compare_value( optval, value ) ]
+
class SortByColumnFilter( Filter ):
"""
Sorts an options list by a column
@@ -274,6 +323,7 @@
unique_value = UniqueValueFilter,
multiple_splitter = MultipleSplitterFilter,
add_value = AdditionalValueFilter,
+ remove_value = RemoveValueFilter,
sort_by = SortByColumnFilter )
class DynamicOptions( object ):
diff -r 108533bf35b8 -r b77721ef035d lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Fri Aug 21 15:18:17 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Fri Aug 21 17:03:42 2009 -0400
@@ -82,7 +82,7 @@
job = assoc.job
break
if not job:
- raise Exception("Failed to get job information for dataset hid %d" % hid)
+ raise Exception("Failed to get job information for dataset hid %d" % data.hid)
# Get the tool object
tool_id = job.tool_id
try:
1
0