galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2013
- 2 participants
- 189 discussions
commit/galaxy-central: greg: Handle new repository_metadata columns when resetting metadata on repositories in the tool shed.
by commits-noreply@bitbucket.org 25 Feb '13
by commits-noreply@bitbucket.org 25 Feb '13
25 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/993c232d34cd/
changeset: 993c232d34cd
user: greg
date: 2013-02-25 22:30:40
summary: Handle new repository_metadata columns when resetting metadata on repositories in the tool shed.
affected #: 3 files
diff -r ecbfab5f9f1b070bda03520700335d800b8fc761 -r 993c232d34cda550a5605f87d7416467d4e2c33f lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -34,8 +34,6 @@
REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = "data_manager_conf.xml"
MAX_CONTENT_SIZE = 32768
NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'repository_dependencies.xml', 'tool_dependencies.xml', REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ]
-GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
-TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
TOOL_TYPES_NOT_IN_TOOL_PANEL = [ 'manage_data' ]
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}&<>" )
@@ -484,10 +482,11 @@
repository_dependency_id=repository_dependency.id )
trans.sa_session.add( rrda )
trans.sa_session.flush()
-def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
+def build_repository_ids_select_field( trans, name='repository_ids', multiple=True, display='checkboxes' ):
"""Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
- if cntrller == TOOL_SHED_ADMIN_CONTROLLER:
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
for repository in trans.sa_session.query( trans.model.Repository ) \
.filter( trans.model.Repository.table.c.deleted == False ) \
.order_by( trans.model.Repository.table.c.name,
@@ -496,7 +495,8 @@
option_label = '%s (%s)' % ( repository.name, owner )
option_value = '%s' % trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
- elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER:
+ else:
+ # We're in Galaxy.
for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
.filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
.order_by( trans.model.ToolShedRepository.table.c.name,
@@ -899,6 +899,7 @@
if not os.path.exists( os.path.join( dest_path, copied_file ) ):
shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+ """Create or update a repository_metadatqa record in the tool shed."""
downloadable = is_downloadable( metadata_dict )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
@@ -909,6 +910,11 @@
changeset_revision=changeset_revision,
metadata=metadata_dict,
downloadable=downloadable )
+ # Always set the default values for the following columns. When resetting all metadata on a repository, this will reset the values.
+ repository_metadata.tools_functionally_correct = False
+ repository_metadata.do_not_test = False
+ repository_metadata.time_last_tested = None
+ repository_metadata.tool_test_errors = None
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
return repository_metadata
@@ -2949,7 +2955,7 @@
return True
return False
def is_downloadable( metadata_dict ):
- # NOTE: although repository README files are considered Galaxy utilities, they have no effect on determining if a revision is instakllable.
+ # NOTE: although repository README files are considered Galaxy utilities, they have no effect on determining if a revision is installable.
# See the comments in the compare_readme_files() method.
if 'datatypes' in metadata_dict:
# We have proprietary datatypes.
@@ -3503,7 +3509,8 @@
def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
"""Reset all metadata on a single repository in a tool shed."""
def reset_all_tool_versions( trans, id, repo ):
- changeset_revisions = []
+ """Reset tool version lineage for those changeset revisions that include valid tools."""
+ changeset_revisions_that_contain_tools = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -3511,10 +3518,10 @@
metadata = repository_metadata.metadata
if metadata:
if metadata.get( 'tools', None ):
- changeset_revisions.append( changeset_revision )
- # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
+ changeset_revisions_that_contain_tools.append( changeset_revision )
+ # The list of changeset_revisions_that_contain_tools is now filtered to contain only those that are downloadable and contain tools.
# If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
- for index, changeset_revision in enumerate( changeset_revisions ):
+ for index, changeset_revision in enumerate( changeset_revisions_that_contain_tools ):
tool_versions_dict = {}
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
metadata = repository_metadata.metadata
@@ -3531,7 +3538,7 @@
tool_dict[ 'id' ],
tool_dict[ 'version' ],
tool_dict[ 'guid' ],
- changeset_revisions[ 0:index ] )
+ changeset_revisions_that_contain_tools[ 0:index ] )
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
@@ -3556,7 +3563,7 @@
work_dir = tempfile.mkdtemp()
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = repo.changectx( changeset )
- log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
+ log.debug( "Cloning repository changeset revision: %s", str( ctx.rev() ) )
cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
if cloned_ok:
log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
@@ -3626,9 +3633,11 @@
reset_tool_data_tables( trans.app )
return invalid_file_tups, metadata_dict
def reset_metadata_on_selected_repositories( trans, **kwd ):
- # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required.
+ """
+ Inspect the repository changelog to reset metadata for all appropriate changeset revisions. This method is called from both Galaxy and the
+ Tool Shed.
+ """
repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
- CONTROLLER = kwd[ 'CONTROLLER' ]
message = ''
status = 'done'
if repository_ids:
@@ -3636,10 +3645,12 @@
unsuccessful_count = 0
for repository_id in repository_ids:
try:
- if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER':
+ if trans.webapp.name == 'community':
+ # We're in the tool shed.
repository = get_repository_in_tool_shed( trans, repository_id )
invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
- elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER':
+ else:
+ # We're in Galaxy.
repository = get_installed_tool_shed_repository( trans, repository_id )
invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
if invalid_file_tups:
diff -r ecbfab5f9f1b070bda03520700335d800b8fc761 -r 993c232d34cda550a5605f87d7416467d4e2c33f lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -721,12 +721,11 @@
@web.require_admin
def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = suc.TOOL_SHED_ADMIN_CONTROLLER
message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = suc.build_repository_ids_select_field( trans, suc.TOOL_SHED_ADMIN_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans )
return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
diff -r ecbfab5f9f1b070bda03520700335d800b8fc761 -r 993c232d34cda550a5605f87d7416467d4e2c33f lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1711,12 +1711,11 @@
@web.require_admin
def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- kwd[ 'CONTROLLER' ] = suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER
message, status = suc.reset_metadata_on_selected_repositories( trans, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
- repositories_select_field = suc.build_repository_ids_select_field( trans, suc.GALAXY_ADMIN_TOOL_SHED_CONTROLLER )
+ repositories_select_field = suc.build_repository_ids_select_field( trans )
return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dd4ecbbbbab3/
changeset: dd4ecbbbbab3
user: jmchilton
date: 2013-02-07 21:18:33
summary: Update lwr_client through https://bitbucket.org/jmchilton/lwr/commits/3034b5cb789a6c96b005b838542c6e7….
UChicago reported some issues with the use of mmap in the LWR client for large files. To get around this, I have implemented an optional alternative transport layer for the LWR client that is backed by pycurl instead of urllib2. This can be enabled by setting the environment variable LWR_CURL_TRANSPORT=1 for the Galaxy process. If LWR_CURL_TRANSPORT is set, the python pycurl package must be installed.
affected #: 4 files
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -5,15 +5,15 @@
This module contains logic for interfacing with an external LWR server.
"""
-import mmap
import os
import re
import time
import urllib
-import urllib2
import simplejson
+from transport import get_transport
+
class JobInputs(object):
"""
@@ -254,6 +254,18 @@
return self.job_inputs.rewritten_command_line
+class parseJson(object):
+
+ def __init__(self):
+ pass
+
+ def __call__(self, func):
+ def replacement(*args, **kwargs):
+ response = func(*args, **kwargs)
+ return simplejson.loads(response)
+ return replacement
+
+
class Client(object):
"""
Objects of this client class perform low-level communication with a remote LWR server.
@@ -283,9 +295,7 @@
self.remote_host = remote_host
self.job_id = job_id
self.private_key = private_key
-
- def _url_open(self, request, data):
- return urllib2.urlopen(request, data)
+ self.transport = get_transport()
def __build_url(self, command, args):
if self.private_key:
@@ -294,29 +304,20 @@
url = self.remote_host + command + "?" + data
return url
- def __raw_execute(self, command, args={}, data=None):
+ def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
url = self.__build_url(command, args)
- request = urllib2.Request(url=url, data=data)
- response = self._url_open(request, data)
+ response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
return response
- def __raw_execute_and_parse(self, command, args={}, data=None):
- response = self.__raw_execute(command, args, data)
- return simplejson.loads(response.read())
-
+ @parseJson()
def __upload_file(self, action, path, name=None, contents=None):
- input = open(path, 'rb')
- try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input, name)
- finally:
- input.close()
-
- def __upload_contents(self, action, path, contents, name=None):
if not name:
name = os.path.basename(path)
args = {"job_id": self.job_id, "name": name}
- return self.__raw_execute_and_parse(action, args, contents)
+ input_path = path
+ if contents:
+ input_path = None
+ return self.__raw_execute(action, args, contents, input_path)
def upload_tool_file(self, path):
"""
@@ -364,7 +365,7 @@
contents : str
Rewritten contents of the config file to upload.
"""
- return self.__upload_contents("upload_config_file", path, contents)
+ return self.__upload_file("upload_config_file", path, contents=contents)
def upload_working_directory_file(self, path):
"""
@@ -378,9 +379,10 @@
"""
return self.__upload_file("upload_working_directory_file", path)
+ @parseJson()
def _get_output_type(self, name):
- return self.__raw_execute_and_parse("get_output_type", {"name": name,
- "job_id": self.job_id})
+ return self.__raw_execute("get_output_type", {"name": name,
+ "job_id": self.job_id})
def download_work_dir_output(self, source, working_directory, output_path):
"""
@@ -414,25 +416,19 @@
name = os.path.basename(path)
output_type = self._get_output_type(name)
if output_type == "direct":
- output = open(path, "wb")
+ output_path = path
elif output_type == "task":
- output = open(os.path.join(working_directory, name), "wb")
+ output_path = os.path.join(working_directory, name)
else:
raise Exception("No remote output found for dataset with path %s" % path)
- self.__raw_download_output(name, self.job_id, output_type, output)
+ self.__raw_download_output(name, self.job_id, output_type, output_path)
- def __raw_download_output(self, name, job_id, output_type, output_file):
- response = self.__raw_execute("download_output", {"name": name,
- "job_id": self.job_id,
- "output_type": output_type})
- try:
- while True:
- buffer = response.read(1024)
- if buffer == "":
- break
- output_file.write(buffer)
- finally:
- output_file.close()
+ def __raw_download_output(self, name, job_id, output_type, output_path):
+ self.__raw_execute("download_output",
+ {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type},
+ output_path=output_path)
def launch(self, command_line):
"""
@@ -463,11 +459,12 @@
return complete_response
time.sleep(1)
+ @parseJson()
def raw_check_complete(self):
"""
Get check_complete response from the remote server.
"""
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id": self.job_id})
+ check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
return check_complete_response
def check_complete(self):
@@ -482,11 +479,12 @@
"""
self.__raw_execute("clean", {"job_id": self.job_id})
+ @parseJson()
def setup(self):
"""
Setup remote LWR server to run this job.
"""
- return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
+ return self.__raw_execute("setup", {"job_id": self.job_id})
def _read(path):
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
@@ -0,0 +1,16 @@
+from standard import Urllib2Transport
+from curl import PycurlTransport
+import os
+
+
+def get_transport(os_module=os):
+ use_curl = os_module.getenv('LWR_CURL_TRANSPORT', "0")
+ ## If LWR_CURL_TRANSPORT is unset or set to 0, use default,
+ ## else use curl.
+ if use_curl.isdigit() and not int(use_curl):
+ return Urllib2Transport()
+ else:
+ return PycurlTransport()
+
+
+__all__ = [get_transport]
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/transport/curl.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
@@ -0,0 +1,42 @@
+from cStringIO import StringIO
+try:
+ from pycurl import Curl
+except:
+ pass
+from os.path import getsize
+
+
+PYCURL_UNAVAILABLE_MESSAGE = \
+ "You are attempting to use the Pycurl version of the LWR client by pycurl is unavailable."
+
+
+class PycurlTransport(object):
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ buf = self._open_output(output_path)
+ try:
+ c = self._new_curl_object()
+ c.setopt(c.URL, url.encode('ascii'))
+ c.setopt(c.WRITEFUNCTION, buf.write)
+ if input_path:
+ c.setopt(c.UPLOAD, 1)
+ c.setopt(c.READFUNCTION, open(input_path, 'rb').read)
+ filesize = getsize(input_path)
+ c.setopt(c.INFILESIZE, filesize)
+ if data:
+ c.setopt(c.POST, 1)
+ c.setopt(c.POSTFIELDS, data)
+ c.perform()
+ if not output_path:
+ return buf.getvalue()
+ finally:
+ buf.close()
+
+ def _new_curl_object(self):
+ try:
+ return Curl()
+ except NameError:
+ raise ImportError(PYCURL_UNAVAILABLE_MESSAGE)
+
+ def _open_output(self, output_path):
+ return open(output_path, 'wb') if output_path else StringIO()
diff -r ce9789a35356da2b2ee4ae723506d5af57a0ce69 -r dd4ecbbbbab33020fb5ff482fc302f092824e514 lib/galaxy/jobs/runners/lwr_client/transport/standard.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
@@ -0,0 +1,33 @@
+"""
+LWR HTTP Client layer based on Python Standard Library (urllib2)
+"""
+import mmap
+import urllib2
+
+
+class Urllib2Transport(object):
+
+ def _url_open(self, request, data):
+ return urllib2.urlopen(request, data)
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ request = urllib2.Request(url=url, data=data)
+ input = None
+ try:
+ if input_path:
+ input = open(input_path, 'rb')
+ data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
+ response = self._url_open(request, data)
+ finally:
+ if input:
+ input.close()
+ if output_path:
+ with open(output_path, 'wb') as output:
+ while True:
+ buffer = response.read(1024)
+ if buffer == "":
+ break
+ output.write(buffer)
+ return response
+ else:
+ return response.read()
https://bitbucket.org/galaxy/galaxy-central/commits/ecbfab5f9f1b/
changeset: ecbfab5f9f1b
user: natefoo
date: 2013-02-25 21:16:30
summary: Merged in jmchilton/galaxy-central-lwr (pull request #118)
Implement optional, alternative pycurl backend for LWR client.
affected #: 4 files
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -5,15 +5,15 @@
This module contains logic for interfacing with an external LWR server.
"""
-import mmap
import os
import re
import time
import urllib
-import urllib2
import simplejson
+from transport import get_transport
+
class JobInputs(object):
"""
@@ -254,6 +254,18 @@
return self.job_inputs.rewritten_command_line
+class parseJson(object):
+
+ def __init__(self):
+ pass
+
+ def __call__(self, func):
+ def replacement(*args, **kwargs):
+ response = func(*args, **kwargs)
+ return simplejson.loads(response)
+ return replacement
+
+
class Client(object):
"""
Objects of this client class perform low-level communication with a remote LWR server.
@@ -283,9 +295,7 @@
self.remote_host = remote_host
self.job_id = job_id
self.private_key = private_key
-
- def _url_open(self, request, data):
- return urllib2.urlopen(request, data)
+ self.transport = get_transport()
def __build_url(self, command, args):
if self.private_key:
@@ -294,29 +304,20 @@
url = self.remote_host + command + "?" + data
return url
- def __raw_execute(self, command, args={}, data=None):
+ def __raw_execute(self, command, args={}, data=None, input_path=None, output_path=None):
url = self.__build_url(command, args)
- request = urllib2.Request(url=url, data=data)
- response = self._url_open(request, data)
+ response = self.transport.execute(url, data=data, input_path=input_path, output_path=output_path)
return response
- def __raw_execute_and_parse(self, command, args={}, data=None):
- response = self.__raw_execute(command, args, data)
- return simplejson.loads(response.read())
-
+ @parseJson()
def __upload_file(self, action, path, name=None, contents=None):
- input = open(path, 'rb')
- try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input, name)
- finally:
- input.close()
-
- def __upload_contents(self, action, path, contents, name=None):
if not name:
name = os.path.basename(path)
args = {"job_id": self.job_id, "name": name}
- return self.__raw_execute_and_parse(action, args, contents)
+ input_path = path
+ if contents:
+ input_path = None
+ return self.__raw_execute(action, args, contents, input_path)
def upload_tool_file(self, path):
"""
@@ -364,7 +365,7 @@
contents : str
Rewritten contents of the config file to upload.
"""
- return self.__upload_contents("upload_config_file", path, contents)
+ return self.__upload_file("upload_config_file", path, contents=contents)
def upload_working_directory_file(self, path):
"""
@@ -378,9 +379,10 @@
"""
return self.__upload_file("upload_working_directory_file", path)
+ @parseJson()
def _get_output_type(self, name):
- return self.__raw_execute_and_parse("get_output_type", {"name": name,
- "job_id": self.job_id})
+ return self.__raw_execute("get_output_type", {"name": name,
+ "job_id": self.job_id})
def download_work_dir_output(self, source, working_directory, output_path):
"""
@@ -414,25 +416,19 @@
name = os.path.basename(path)
output_type = self._get_output_type(name)
if output_type == "direct":
- output = open(path, "wb")
+ output_path = path
elif output_type == "task":
- output = open(os.path.join(working_directory, name), "wb")
+ output_path = os.path.join(working_directory, name)
else:
raise Exception("No remote output found for dataset with path %s" % path)
- self.__raw_download_output(name, self.job_id, output_type, output)
+ self.__raw_download_output(name, self.job_id, output_type, output_path)
- def __raw_download_output(self, name, job_id, output_type, output_file):
- response = self.__raw_execute("download_output", {"name": name,
- "job_id": self.job_id,
- "output_type": output_type})
- try:
- while True:
- buffer = response.read(1024)
- if buffer == "":
- break
- output_file.write(buffer)
- finally:
- output_file.close()
+ def __raw_download_output(self, name, job_id, output_type, output_path):
+ self.__raw_execute("download_output",
+ {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type},
+ output_path=output_path)
def launch(self, command_line):
"""
@@ -463,11 +459,12 @@
return complete_response
time.sleep(1)
+ @parseJson()
def raw_check_complete(self):
"""
Get check_complete response from the remote server.
"""
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id": self.job_id})
+ check_complete_response = self.__raw_execute("check_complete", {"job_id": self.job_id})
return check_complete_response
def check_complete(self):
@@ -482,11 +479,12 @@
"""
self.__raw_execute("clean", {"job_id": self.job_id})
+ @parseJson()
def setup(self):
"""
Setup remote LWR server to run this job.
"""
- return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
+ return self.__raw_execute("setup", {"job_id": self.job_id})
def _read(path):
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/__init__.py
@@ -0,0 +1,16 @@
+from standard import Urllib2Transport
+from curl import PycurlTransport
+import os
+
+
+def get_transport(os_module=os):
+ use_curl = os_module.getenv('LWR_CURL_TRANSPORT', "0")
+ ## If LWR_CURL_TRANSPORT is unset or set to 0, use default,
+ ## else use curl.
+ if use_curl.isdigit() and not int(use_curl):
+ return Urllib2Transport()
+ else:
+ return PycurlTransport()
+
+
+__all__ = [get_transport]
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/transport/curl.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/curl.py
@@ -0,0 +1,42 @@
+from cStringIO import StringIO
+try:
+ from pycurl import Curl
+except:
+ pass
+from os.path import getsize
+
+
+PYCURL_UNAVAILABLE_MESSAGE = \
+ "You are attempting to use the Pycurl version of the LWR client by pycurl is unavailable."
+
+
+class PycurlTransport(object):
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ buf = self._open_output(output_path)
+ try:
+ c = self._new_curl_object()
+ c.setopt(c.URL, url.encode('ascii'))
+ c.setopt(c.WRITEFUNCTION, buf.write)
+ if input_path:
+ c.setopt(c.UPLOAD, 1)
+ c.setopt(c.READFUNCTION, open(input_path, 'rb').read)
+ filesize = getsize(input_path)
+ c.setopt(c.INFILESIZE, filesize)
+ if data:
+ c.setopt(c.POST, 1)
+ c.setopt(c.POSTFIELDS, data)
+ c.perform()
+ if not output_path:
+ return buf.getvalue()
+ finally:
+ buf.close()
+
+ def _new_curl_object(self):
+ try:
+ return Curl()
+ except NameError:
+ raise ImportError(PYCURL_UNAVAILABLE_MESSAGE)
+
+ def _open_output(self, output_path):
+ return open(output_path, 'wb') if output_path else StringIO()
diff -r fa34924860aaa282fe3c3021a257f2523848a6e6 -r ecbfab5f9f1b070bda03520700335d800b8fc761 lib/galaxy/jobs/runners/lwr_client/transport/standard.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/transport/standard.py
@@ -0,0 +1,33 @@
+"""
+LWR HTTP Client layer based on Python Standard Library (urllib2)
+"""
+import mmap
+import urllib2
+
+
+class Urllib2Transport(object):
+
+ def _url_open(self, request, data):
+ return urllib2.urlopen(request, data)
+
+ def execute(self, url, data=None, input_path=None, output_path=None):
+ request = urllib2.Request(url=url, data=data)
+ input = None
+ try:
+ if input_path:
+ input = open(input_path, 'rb')
+ data = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
+ response = self._url_open(request, data)
+ finally:
+ if input:
+ input.close()
+ if output_path:
+ with open(output_path, 'wb') as output:
+ while True:
+ buffer = response.read(1024)
+ if buffer == "":
+ break
+ output.write(buffer)
+ return response
+ else:
+ return response.read()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/26f7825cc871/
changeset: 26f7825cc871
user: jmchilton
date: 2013-02-23 08:14:22
summary: Refactor code related to expanding multi inputs to more easily allow for switching between matched and product mode in the same workflow execution.
affected #: 1 file
diff -r 1c2e1625dd8a419aa1d335ddc19ba34857c05fc4 -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -2106,22 +2106,27 @@
def _expand_multiple_inputs(kwargs, mode):
- (input_combos, multi_inputs) = _build_input_combos(kwargs, mode)
+ (single_inputs, matched_multi_inputs, multiplied_multi_inputs) = \
+ _split_inputs(kwargs, mode)
+
+ # Build up every combination of inputs to be run together.
+ #input_combos = [single_inputs]
+ input_combos = _extend_with_matched_combos(single_inputs, matched_multi_inputs)
+ input_combos = _extend_with_multiplied_combos(input_combos, multiplied_multi_inputs)
+
+ # Input name that are multiply specified
+ multi_input_keys = \
+ matched_multi_inputs.keys() + multiplied_multi_inputs.keys()
+
for input_combo in input_combos:
for key, value in input_combo.iteritems():
kwargs[key] = value
- yield (kwargs, multi_inputs.keys())
+ yield (kwargs, multi_input_keys)
-def _build_input_combos(kwargs, mode):
- if mode == "product":
- return _build_input_combos_product(kwargs)
- else: # mode == "matched"
- return _build_input_combos_matched(kwargs)
-def _build_input_combos_matched(kwargs):
- (single_inputs, multi_inputs) = _split_inputs(kwargs)
+def _extend_with_matched_combos(single_inputs, multi_inputs):
if len(multi_inputs) == 0:
- return ([{}], {})
+ return [single_inputs]
matched_multi_inputs = []
@@ -2139,11 +2144,12 @@
raise Exception("Failed to match up multi-select inputs, must select equal number of data files in each multiselect")
for index, value in enumerate(multi_input_values):
matched_multi_inputs[index][multi_input_key] = value
- return (matched_multi_inputs, multi_inputs)
+ return matched_multi_inputs
-def _build_input_combos_product(kwargs):
- (single_inputs, multi_inputs) = _split_inputs(kwargs)
- combos = [single_inputs]
+
+def _extend_with_multiplied_combos(input_combos, multi_inputs):
+ combos = input_combos
+
for multi_input_key, multi_input_value in multi_inputs.iteritems():
iter_combos = []
@@ -2152,14 +2158,18 @@
iter_combos.append(_copy_and_extend_inputs(combo, multi_input_key, input_value))
combos = iter_combos
- return (combos, multi_inputs)
+ return combos
+
def _copy_and_extend_inputs(inputs, key, value):
new_inputs = dict(inputs)
new_inputs[key] = value
return new_inputs
-def _split_inputs(kwargs):
+
+def _split_inputs(kwargs, mode):
+ """
+ """
input_keys = filter(lambda a: a.endswith('|input'), kwargs)
single_inputs = {}
multi_inputs = {}
@@ -2169,4 +2179,10 @@
multi_inputs[input_key] = input_val
else:
single_inputs[input_key] = input_val
- return (single_inputs, multi_inputs)
+ matched_multi_inputs = {}
+ multiplied_multi_inputs = {}
+ if mode == "product":
+ multiplied_multi_inputs = multi_inputs
+ else:
+ matched_multi_inputs = multi_inputs
+ return (single_inputs, matched_multi_inputs, multiplied_multi_inputs)
https://bitbucket.org/galaxy/galaxy-central/commits/6f30725d5973/
changeset: 6f30725d5973
user: jmchilton
date: 2013-02-23 08:14:22
summary: Implement UI multi batch workflow inputs.
affected #: 4 files
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -2110,7 +2110,6 @@
_split_inputs(kwargs, mode)
# Build up every combination of inputs to be run together.
- #input_combos = [single_inputs]
input_combos = _extend_with_matched_combos(single_inputs, matched_multi_inputs)
input_combos = _extend_with_multiplied_combos(input_combos, multiplied_multi_inputs)
@@ -2172,17 +2171,18 @@
"""
input_keys = filter(lambda a: a.endswith('|input'), kwargs)
single_inputs = {}
- multi_inputs = {}
+ matched_multi_inputs = {}
+ multiplied_multi_inputs = {}
for input_key in input_keys:
input_val = kwargs[input_key]
if isinstance(input_val, list):
- multi_inputs[input_key] = input_val
+ input_base = input_key[:-len("|input")]
+ mode_key = "%s|multi_mode" % input_base
+ mode = kwargs.get(mode_key, "matched")
+ if mode == "matched":
+ matched_multi_inputs[input_key] = input_val
+ else:
+ multiplied_multi_inputs[input_key] = input_val
else:
single_inputs[input_key] = input_val
- matched_multi_inputs = {}
- multiplied_multi_inputs = {}
- if mode == "product":
- multiplied_multi_inputs = multi_inputs
- else:
- matched_multi_inputs = multi_inputs
return (single_inputs, matched_multi_inputs, multiplied_multi_inputs)
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 static/images/silk/link.png
Binary file static/images/silk/link.png has changed
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 static/images/silk/link_break.png
Binary file static/images/silk/link_break.png has changed
diff -r 26f7825cc871c752d1e79bbe8984857d67f1eb0e -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -33,12 +33,12 @@
} else {
select.val($('option:last', select).val());
}
+ select.siblings('img').hide();
select.removeAttr('multiple').removeAttr('size');
placeholder = 'type to filter';
} else {
- // Comment out the following line to multiple batch input workflows in UI.
- $('.multiinput').addClass('disabled');
$('.multiinput', select.closest('.form-row')).removeClass('disabled');
+ select.siblings('img').show();
select.attr('multiple', 'multiple').attr('size', 8);
placeholder = 'type to filter, [enter] to select all';
}
@@ -126,6 +126,57 @@
select.after(filter);
select.width(new_width);
});
+
+ // Augment hidden fields with icons.
+ // http://stackoverflow.com/a/2088430
+ var imgOn='${h.url_for("/static/images/silk/link.png")}';
+ var imgOff='${h.url_for("/static/images/silk/link_break.png")}';
+ $(function(){
+ $(".multi-mode").each(function(){
+ if($(this).val() == "matched") {
+ $(this).before($(document.createElement("img"))
+ .attr({src:imgOn,title:'Checkbox', id:$(this).attr("id")})
+ .css("display", $(this).css("display"))
+ .addClass("chkBoxImg"));
+ } else {
+ $(this).before($(document.createElement("img"))
+ .attr({src:imgOff, title:'Checkbox',id:$(this).attr("id")})
+ .css("display", $(this).css("display"))
+ .addClass("chkBoxImg"));
+ }
+ });
+ $("img.chkBoxImg").click(function(){
+ i= $(this).siblings("input[type=hidden]");
+ s=$(this).attr("src");
+ if(s==imgOn) {
+ $(this).attr("src",imgOff);
+ $(i).val("product");
+ } else {
+ $(this).attr("src",imgOn);
+ $(i).val("matched");
+ }
+ });
+ });
+ $("#tool_form").submit(function(e) {
+ var matchLength = -1;
+ $('span.multiinput_wrap select[name*="|input"]').each(function() {
+ var value = $(this).val();
+ if(value instanceof Array) {
+ // Multi-value
+ if($(this).siblings("input[type=hidden]").val() == "matched") {
+ var length = $(this).val().length;
+ if(matchLength == -1) {
+ matchLength = length;
+ } else if(length != matchLength) {
+ e.preventDefault();
+ alert("Linked inputs must be submitted in equal number.");
+ return false;
+ }
+ }
+ }
+ });
+ return true;
+ });
});
</script></%def>
@@ -260,6 +311,7 @@
%if step.type == 'data_input':
##Input Dataset Step, wrap for multiinput.
<span class='multiinput_wrap'>
+ <input class="multi-mode" type="hidden" name="${str(step.id)}|multi_mode" id="${str(step.id)}|multi_mode" value="matched" />
${param.get_html_field( t, value, other_values ).get_html( str(step.id) + "|" + prefix )}
</span>
%else:
@@ -340,30 +392,6 @@
<form id="tool_form" name="tool_form" method="POST">
## <input type="hidden" name="workflow_name" value="${h.to_unicode( workflow.name ) | h}" />
-<!-- TODO: Implement UI for selecting between product and matched mode
- for batch workflows in multiple inputs are selected for 2 or more
- params.
-
- 1) Delete this line above: $('.multiinput').addClass('disabled');
- 2) Allow user to select between product and matched mode.
-
- If user selected 5 inputs for one param and 5 inputs for another
- in matched mode that will be run the workflow 5 times matching
- each input and in product mode it will run the workflow 25 times
- with every combination of input pairs. If user selects 6 inputs
- for one param and 4 for another, in product mode 24 workflows
- will run and in matched mode the submission will fail.
-
- In matched mode the inputs are matched from top to bottom
- regardless of the order they are actually select in. This
- behavior is I assume the desired behavior but I have only tested
- it in chrome, care should be taken to test behavior on other
- browsers and augment UI to ensure numbers of inputs matches
- up.
--->
-<input type="hidden" name="multiple_input_mode" value="matched" /><!-- product or matched -->
-
-
%if wf_parms:
<div class="metadataForm"><div class="metadataFormTitle">Workflow Parameters</div>
https://bitbucket.org/galaxy/galaxy-central/commits/fa34924860aa/
changeset: fa34924860aa
user: jmchilton
date: 2013-02-23 08:14:22
summary: Touch up UI related to multi input batch mode - move icon up by the multi document icon, switch from icon to CSS span, cleanup variable names.
affected #: 1 file
diff -r 6f30725d59732a351ca9b4f6fd7bc14f43c8a4a1 -r fa34924860aaa282fe3c3021a257f2523848a6e6 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -1,5 +1,11 @@
<%inherit file="/base.mako"/>
+<style>
+/* TODO: Move this block into base.less? base.css? Someone more familiar with GUI should move this. */
+.icon-button.link {background:url(../images/silk/link.png) no-repeat;cursor:pointer;float:none;display:inline-block;margin-left:10px;}
+.icon-button.link-broken {background:url(../images/silk/link_break.png) no-repeat;cursor:pointer;float:none;display:inline-block;margin-left:10px;}
+</style>
+
<%def name="javascripts()">
${parent.javascripts()}
${h.js( "libs/jquery/jquery.autocomplete" )}
@@ -33,12 +39,12 @@
} else {
select.val($('option:last', select).val());
}
- select.siblings('img').hide();
+ select.closest('.form-row').children('label').children('span.mode-icon').hide();
select.removeAttr('multiple').removeAttr('size');
placeholder = 'type to filter';
} else {
$('.multiinput', select.closest('.form-row')).removeClass('disabled');
- select.siblings('img').show();
+ select.closest('.form-row').children('label').children('span.mode-icon').show();
select.attr('multiple', 'multiple').attr('size', 8);
placeholder = 'type to filter, [enter] to select all';
}
@@ -129,30 +135,25 @@
// Augment hidden fields with icons.
// http://stackoverflow.com/a/2088430
- var imgOn='${h.url_for("/static/images/silk/link.png")}';
- var imgOff='${h.url_for("/static/images/silk/link_break.png")}';
$(function(){
$(".multi-mode").each(function(){
if($(this).val() == "matched") {
- $(this).before($(document.createElement("img"))
- .attr({src:imgOn,title:'Checkbox', id:$(this).attr("id")})
- .css("display", $(this).css("display"))
- .addClass("chkBoxImg"));
+ $(this).closest('.form-row').children('label').append($('<span class="icon-button link mode-icon"></span>')
+ .attr({id:$(this).attr("id")})
+ .css("display", $(this).css("display")));
} else {
- $(this).before($(document.createElement("img"))
- .attr({src:imgOff, title:'Checkbox',id:$(this).attr("id")})
- .css("display", $(this).css("display"))
- .addClass("chkBoxImg"));
+ $(this).closest('.form-row').children('label').append($('<span class="icon-button link-broken mode-icon"></span>')
+ .attr({id:$(this).attr("id")})
+ .css("display", $(this).css("display")));
}
});
- $("img.chkBoxImg").click(function(){
- i= $(this).siblings("input[type=hidden]");
- s=$(this).attr("src");
- if(s==imgOn) {
- $(this).attr("src",imgOff);
+ $("span.mode-icon").click(function(){
+ i= $(this).closest('.form-row').find("input[type=hidden]");
+ if($(this).hasClass("link")) {
+ $(this).removeClass("link").addClass("link-broken");
$(i).val("product");
} else {
- $(this).attr("src",imgOn);
+ $(this).removeClass("link-broken").addClass("link");
$(i).val("matched");
}
});
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1c2e1625dd8a/
changeset: 1c2e1625dd8a
user: dan
date: 2013-02-22 23:03:49
summary: Fix for displaying error messages on DataToolParameter where optional=True.
affected #: 1 file
diff -r 574e22b584eb7ef8b6168902347473ed3adeccac -r 1c2e1625dd8a419aa1d335ddc19ba34857c05fc4 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1555,7 +1555,7 @@
# although, this should never be called in workflow mode right?
if trans.workflow_building_mode:
return None
- if not value:
+ if not value and not self.optional:
raise ValueError( "History does not include a dataset of the required format / build" )
if value in [None, "None"]:
return None
https://bitbucket.org/galaxy/galaxy-central/commits/c177960e4ed6/
changeset: c177960e4ed6
branch: stable
user: dan
date: 2013-02-22 23:03:49
summary: Fix for displaying error messages on DataToolParameter where optional=True.
affected #: 1 file
diff -r 31b09605fcc313b3e93efb927ac328bfcc42ad82 -r c177960e4ed61925a8b6c858e1f3f8d54c93cb37 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1552,7 +1552,7 @@
# although, this should never be called in workflow mode right?
if trans.workflow_building_mode:
return None
- if not value:
+ if not value and not self.optional:
raise ValueError( "History does not include a dataset of the required format / build" )
if value in [None, "None"]:
return None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Enhance filtering in the repository revisions api.
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/574e22b584eb/
changeset: 574e22b584eb
user: greg
date: 2013-02-22 22:41:56
summary: Enhance filtering in the repository revisions api.
affected #: 2 files
diff -r ed6104097dc9b9c519f71a392225e7ed45bfede3 -r 574e22b584eb7ef8b6168902347473ed3adeccac lib/galaxy/webapps/community/api/repository_revision_contents.py
--- a/lib/galaxy/webapps/community/api/repository_revision_contents.py
+++ b/lib/galaxy/webapps/community/api/repository_revision_contents.py
@@ -11,9 +11,11 @@
log = logging.getLogger( __name__ )
def default_value_mapper( trans, repository_metadata ):
- return { 'id' : trans.security.encode_id( repository_metadata.id ),
- 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
- 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+ value_mapper = { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
+ if repository_metadata.time_last_tested:
+ value_mapper[ 'time_last_tested' ] = time_ago( repository_metadata.time_last_tested )
+ return value_mapper
class RepositoryRevisionContentsController( BaseAPIController ):
@web.expose_api
diff -r ed6104097dc9b9c519f71a392225e7ed45bfede3 -r 574e22b584eb7ef8b6168902347473ed3adeccac lib/galaxy/webapps/community/api/repository_revisions.py
--- a/lib/galaxy/webapps/community/api/repository_revisions.py
+++ b/lib/galaxy/webapps/community/api/repository_revisions.py
@@ -2,6 +2,7 @@
from galaxy.web.framework.helpers import time_ago
import galaxy.util.shed_util_common as suc
from galaxy import web, util
+from galaxy.model.orm import and_, or_
from galaxy.web.base.controller import BaseAPIController
from galaxy.web.framework.helpers import is_true
@@ -13,29 +14,56 @@
log = logging.getLogger( __name__ )
def default_value_mapper( trans, repository_metadata ):
- return { 'id' : trans.security.encode_id( repository_metadata.id ),
- 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
- 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+ value_mapper = { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
+ if repository_metadata.time_last_tested:
+ value_mapper[ 'time_last_tested' ] = time_ago( repository_metadata.time_last_tested )
+ return value_mapper
class RepositoryRevisionsController( BaseAPIController ):
"""RESTful controller for interactions with tool shed repository revisions."""
@web.expose_api
- def index( self, trans, downloadable=True, **kwd ):
+ def index( self, trans, **kwd ):
"""
GET /api/repository_revisions
Displays a collection (list) of repository revisions.
"""
rval = []
- downloadable = util.string_as_bool( downloadable )
+ # Build up an anded clause list of filters.
+ clause_list = []
+ # Filter by downloadable if received.
+ downloadable = kwd.get( 'downloadable', None )
+ if downloadable is not None:
+ clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.string_as_bool( downloadable ) )
+ # Filter by tools_functionally_correct if received.
+ tools_functionally_correct = kwd.get( 'tools_functionally_correct', None )
+ if tools_functionally_correct is not None:
+ clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.string_as_bool( tools_functionally_correct ) )
+ # Filter by do_not_test if received.
+ do_not_test = kwd.get( 'do_not_test', None )
+ if do_not_test is not None:
+ clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.string_as_bool( do_not_test ) )
+ # Filter by must_include_tools if received.
+ must_include_tools = kwd.get( 'must_include_tools', False )
try:
query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
- .filter( trans.app.model.RepositoryMetadata.table.c.downloadable == downloadable ) \
+ .filter( and_( *clause_list ) ) \
.order_by( trans.app.model.RepositoryMetadata.table.c.repository_id ) \
.all()
for repository_metadata in query:
- item = repository_metadata.get_api_value( view='collection', value_mapper=default_value_mapper( trans, repository_metadata ) )
- item[ 'url' ] = web.url_for( 'repository_revision', id=trans.security.encode_id( repository_metadata.id ) )
- rval.append( item )
+ if must_include_tools:
+ metadata = repository_metadata.metadata
+ if 'tools' in metadata:
+ ok_to_return = True
+ else:
+ ok_to_return = False
+ else:
+ ok_to_return = True
+ if ok_to_return:
+ item = repository_metadata.get_api_value( view='collection',
+ value_mapper=default_value_mapper( trans, repository_metadata ) )
+ item[ 'url' ] = web.url_for( 'repository_revision', id=trans.security.encode_id( repository_metadata.id ) )
+ rval.append( item )
except Exception, e:
rval = "Error in the Tool Shed repository_revisions API in index: " + str( e )
log.error( rval + ": %s" % str( e ) )
@@ -49,7 +77,8 @@
"""
try:
repository_metadata = suc.get_repository_metadata_by_id( trans, id )
- repository_data = repository_metadata.get_api_value( view='element', value_mapper=default_value_mapper( trans, repository_metadata ) )
+ repository_data = repository_metadata.get_api_value( view='element',
+ value_mapper=default_value_mapper( trans, repository_metadata ) )
repository_data[ 'contents_url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=id )
except Exception, e:
message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for deleting and undeleting repositories in the tool shed: if deleting, all installable revisions are marked as not installable, and if undeleting, all revisions are inspected and those determined to be installable are marked accordingly.
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ed6104097dc9/
changeset: ed6104097dc9
user: greg
date: 2013-02-22 21:19:47
summary: Fix for deleting and undeleting repositories in the tool shed: if deleting, all installable revisions are marked as not installable, and if undeleting, all revisions are inspected and those determined to be installable are marked accordingly.
affected #: 1 file
diff -r 3bdb4291e5e666c4d4b86e184b4599e61b847864 -r ed6104097dc9b9c519f71a392225e7ed45bfede3 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -586,12 +586,17 @@
deleted_repositories = ""
for repository_id in ids:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- if not repository.deleted:
- repository.deleted = True
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- count += 1
- deleted_repositories += " %s " % repository.name
+ if repository:
+ if not repository.deleted:
+ # Mark all installable repository_metadata records as not installable.
+ for repository_metadata in repository.downloadable_revisions:
+ repository_metadata.downloadable = False
+ trans.sa_session.add( repository_metadata )
+ repository.deleted = True
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ count += 1
+ deleted_repositories += " %s " % repository.name
if count:
message = "Deleted %d %s: %s" % ( count, inflector.cond_plural( len( ids ), "repository" ), deleted_repositories )
else:
@@ -740,12 +745,20 @@
undeleted_repositories = ""
for repository_id in ids:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- if repository.deleted:
- repository.deleted = False
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- count += 1
- undeleted_repositories += " %s" % repository.name
+ if repository:
+ if repository.deleted:
+ # Inspect all repository_metadata records to determine those that are installable, and mark them accordingly.
+ for repository_metadata in repository.metadata_revisions:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if suc.is_downloadable( metadata ):
+ repository_metadata.downloadable = True
+ trans.sa_session.add( repository_metadata )
+ repository.deleted = False
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ count += 1
+ undeleted_repositories += " %s" % repository.name
if count:
message = "Undeleted %d %s: %s" % ( count, inflector.cond_plural( count, "repository" ), undeleted_repositories )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Fix to new-style display_application url mapping/formation; display_applications/link_generator: clean up
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3bdb4291e5e6/
changeset: 3bdb4291e5e6
user: carlfeberhard
date: 2013-02-22 20:58:50
summary: Fix to new-style display_application url mapping/formation; display_applications/link_generator: clean up
affected #: 2 files
diff -r 5f10f61335fbcfb7982a879edc80325f5a73402f -r 3bdb4291e5e666c4d4b86e184b4599e61b847864 lib/galaxy/datatypes/display_applications/link_generator.py
--- a/lib/galaxy/datatypes/display_applications/link_generator.py
+++ b/lib/galaxy/datatypes/display_applications/link_generator.py
@@ -1,4 +1,6 @@
-"""Separating Transaction based elements of display applications from datatypes.
+"""Classes to generate links for display applications.
+
+Separating Transaction based elements of display applications from datatypes.
"""
import urllib
@@ -10,19 +12,22 @@
from galaxy import util
from galaxy.web import url_for
-
from galaxy.datatypes.interval import Interval, Gff, Wiggle, CustomTrack
+#TODO: Ideally, these classes would be instantiated in the trans (or some other semi-persistant fixture)
+# Currently, these are instantiated per HDA which is not the best solution
+
+#TODO: these could be extended to handle file_function and parse/contain the builds.txt files
def get_display_app_link_generator( display_app_name ):
"""Returns an instance of the proper link generator class
based on the display_app_name or DisplayAppLinkGenerator
- if the name is unrecognized.
+ if the display_app_name is unrecognized.
"""
if display_app_name == 'ucsc':
return UCSCDisplayAppLinkGenerator()
- if display_app_name == 'gbrowse':
+ elif display_app_name == 'gbrowse':
return GBrowseDisplayAppLinkGenerator()
return DisplayAppLinkGenerator()
@@ -58,9 +63,10 @@
class UCSCDisplayAppLinkGenerator( DisplayAppLinkGenerator ):
- """Class for UCSC display application link generators.
+ """Class for generating links to display data in the
+ UCSC genome browser.
- This class returns UCSC main and test links for the following datatypes:
+ This class returns links for the following datatypes and their subclasses:
Interval, Wiggle, Gff, CustomTrack
"""
def __init__( self ):
@@ -69,7 +75,6 @@
def _link_function_from_datatype( self, datatype ):
"""Dispatch to proper link generating function based on datatype.
"""
- # they're all the same
if( ( isinstance( datatype, Interval ) )
or ( isinstance( datatype, Wiggle ) )
or ( isinstance( datatype, Gff ) )
@@ -83,8 +88,6 @@
and content of dataset.
"""
# this is a refactor of Interval.ucsc_links, GFF.ucsc_links, Wiggle.ucsc_links, and CustomTrack.ucsc_links
- # ...which are all the same function
-
#TODO: app vars can be moved into init (and base_url as well)
chrom, start, stop = dataset.datatype.get_estimated_display_viewport( dataset )
if chrom is None:
@@ -107,10 +110,11 @@
class GBrowseDisplayAppLinkGenerator( DisplayAppLinkGenerator ):
- """Class for UCSC display application link generators.
+ """Class for generating links to display data in the
+ GBrowse genome browser.
- This class returns UCSC main and test links for the following datatypes:
- Interval, Wiggle, Gff, CustomTrack
+ This class returns links for the following datatypes and their subclasses:
+ Gff, Wiggle
"""
def __init__( self ):
self.display_app_name = 'gbrowse'
@@ -118,7 +122,6 @@
def _link_function_from_datatype( self, datatype ):
"""Dispatch to proper link generating function based on datatype.
"""
- # they're all the same
if( ( isinstance( datatype, Gff ) )
or ( isinstance( datatype, Wiggle ) ) ):
return self.gbrowse_links
diff -r 5f10f61335fbcfb7982a879edc80325f5a73402f -r 3bdb4291e5e666c4d4b86e184b4599e61b847864 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -244,7 +244,7 @@
def get_display_app_url( display_app_link, hda, trans ):
web_url_for = routes.URLGenerator( trans.webapp.mapper, trans.environ )
dataset_hash, user_hash = util.encode_dataset_user( trans, hda, None )
- return web_url_for( controller='/dataset',
+ return web_url_for( controller='dataset',
action="display_application",
dataset_id=dataset_hash,
user_id=user_hash,
@@ -280,4 +280,3 @@
display_apps.append( dict( label=hda.datatype.get_display_label( display_app_name ), links=app_links ) )
return display_apps
-
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Enhancements to the tool shed api to enable the curretnly most important updates for repository_metadata records.
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5f10f61335fb/
changeset: 5f10f61335fb
user: greg
date: 2013-02-22 20:35:34
summary: Enhancements to the tool shed api to enable the curretnly most important updates for repository_metadata records.
affected #: 7 files
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repositories.py
--- a/lib/galaxy/webapps/community/api/repositories.py
+++ b/lib/galaxy/webapps/community/api/repositories.py
@@ -32,7 +32,7 @@
item[ 'url' ] = web.url_for( 'repository_contents', repository_id=trans.security.encode_id( repository.id ) )
rval.append( item )
except Exception, e:
- message = "Error in the Tool Shed API at index: %s" % str( e )
+ message = "Error in the Tool Shed repositories API in index: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
@@ -50,8 +50,8 @@
repository_data = repository.get_api_value( view='element', value_mapper=value_mapper )
repository_data[ 'contents_url' ] = web.url_for( 'repository_contents', repository_id=id )
except Exception, e:
- message = "Error in the Tool Shed API at show: %s" % str( e )
+ message = "Error in the Tool Shed repositories API in show: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
- return repository_data
\ No newline at end of file
+ return repository_data
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repository_contents.py
--- a/lib/galaxy/webapps/community/api/repository_contents.py
+++ b/lib/galaxy/webapps/community/api/repository_contents.py
@@ -11,21 +11,24 @@
class RepositoryContentsController( BaseAPIController ):
@web.expose_api
- def index( self, trans, repository_id, **kwd ):
+ def index( self, trans, **kwd ):
"""
- GET /api/repositories/{encoded_repsository_id}/contents
- Displays a collection (list) of repository contents.
+ GET /api/repositories/{encoded_repository_id}/contents
+ Displays a collection (dictionary) of repository contents.
:param repository_id: an encoded id string of the `Repository` to inspect
"""
rval = []
+ repository_id = kwd.get( 'repository_id', None )
try:
repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repository_dict = repository.as_dict( trans )
+ value_mapper={ 'id' : repository_id,
+ 'user_id' : trans.security.encode_id( repository.user_id ) }
+ repository_dict = repository.as_dict( value_mapper )
repository_dict[ 'url' ] = web.url_for( 'repository_contents', repository_id=repository_id )
rval.append( repository_dict )
except Exception, e:
- message = "Error in repository_contents API: %s" % str( e )
+ message = "Error in the Tool Shed repository_contents API in index: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repository_revision_contents.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/api/repository_revision_contents.py
@@ -0,0 +1,39 @@
+import logging
+from galaxy.web.framework.helpers import time_ago
+import galaxy.util.shed_util_common as suc
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+
+import pkg_resources
+pkg_resources.require( "Routes" )
+import routes
+
+log = logging.getLogger( __name__ )
+
+def default_value_mapper( trans, repository_metadata ):
+ return { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
+ 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+
+class RepositoryRevisionContentsController( BaseAPIController ):
+ @web.expose_api
+ def index( self, trans, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded_repository_metadata_id}/contents
+ Displays a collection (dictionary) of repository_metadata contents.
+
+ :param repository_metadata_id: an encoded id string of the `RepositoryMetadata` to inspect
+ """
+ rval = []
+ repository_metadata_id = kwd.get( 'repository_metadata_id', None )
+ try:
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ repository_dict = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
+ repository_dict[ 'url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=repository_metadata_id )
+ rval.append( repository_dict )
+ except Exception, e:
+ message = "Error in the Tool Shed repository_revision_contents API in index: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+ return rval
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/api/repository_revisions.py
--- a/lib/galaxy/webapps/community/api/repository_revisions.py
+++ b/lib/galaxy/webapps/community/api/repository_revisions.py
@@ -1,3 +1,5 @@
+import datetime
+from galaxy.web.framework.helpers import time_ago
import galaxy.util.shed_util_common as suc
from galaxy import web, util
from galaxy.web.base.controller import BaseAPIController
@@ -10,6 +12,11 @@
log = logging.getLogger( __name__ )
+def default_value_mapper( trans, repository_metadata ):
+ return { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ),
+ 'time_last_tested' : time_ago( repository_metadata.time_last_tested ) }
+
class RepositoryRevisionsController( BaseAPIController ):
"""RESTful controller for interactions with tool shed repository revisions."""
@web.expose_api
@@ -26,13 +33,56 @@
.order_by( trans.app.model.RepositoryMetadata.table.c.repository_id ) \
.all()
for repository_metadata in query:
- value_mapper={ 'id' : trans.security.encode_id( repository_metadata.id ),
- 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
- item = repository_metadata.get_api_value( view='collection', value_mapper=value_mapper )
+ item = repository_metadata.get_api_value( view='collection', value_mapper=default_value_mapper( trans, repository_metadata ) )
item[ 'url' ] = web.url_for( 'repository_revision', id=trans.security.encode_id( repository_metadata.id ) )
rval.append( item )
except Exception, e:
- rval = "Error in repository_revisions API at index: " + str( e )
+ rval = "Error in the Tool Shed repository_revisions API in index: " + str( e )
log.error( rval + ": %s" % str( e ) )
trans.response.status = 500
return rval
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/repository_revisions/{encoded_repository_metadata_id}
+ Displays information about a repository_metadata record in the Tool Shed.
+ """
+ try:
+ repository_metadata = suc.get_repository_metadata_by_id( trans, id )
+ repository_data = repository_metadata.get_api_value( view='element', value_mapper=default_value_mapper( trans, repository_metadata ) )
+ repository_data[ 'contents_url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=id )
+ except Exception, e:
+ message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+ return repository_data
+ @web.expose_api
+ def update( self, trans, payload, **kwd ):
+ """
+ PUT /api/repository_revisions/{encoded_repository_metadata_id}/{payload}
+ Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload.
+ """
+ repository_metadata_id = kwd.get( 'id', None )
+ try:
+ repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id )
+ flush_needed = False
+ for key, new_value in payload.items():
+ if hasattr( repository_metadata, key ):
+ old_value = getattr( repository_metadata, key )
+ setattr( repository_metadata, key, new_value )
+ if key in [ 'tools_functionally_correct', 'time_last_tested' ]:
+ # Automatically update repository_metadata.time_last_tested.
+ repository_metadata.time_last_tested = datetime.datetime.utcnow()
+ flush_needed = True
+ if flush_needed:
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ except Exception, e:
+ message = "Error in the Tool Shed repository_revisions API in update: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+ item = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
+ item[ 'url' ] = web.url_for( 'repository_revision', id=repository_metadata_id )
+ return [ item ]
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/buildapp.py
--- a/lib/galaxy/webapps/community/buildapp.py
+++ b/lib/galaxy/webapps/community/buildapp.py
@@ -77,6 +77,12 @@
name_prefix='repository_',
path_prefix='/api/repositories/:repository_id',
parent_resources=dict( member_name='repository', collection_name='repositories' ) )
+ webapp.api_mapper.resource( 'content',
+ 'contents',
+ controller='repository_revision_contents',
+ name_prefix='repository_revision_',
+ path_prefix='/api/repository_revisions/:repository_metadata_id',
+ parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) )
webapp.api_mapper.resource( 'repository', 'repositories', path_prefix='/api' )
webapp.api_mapper.resource( 'repository_revision', 'repository_revisions', path_prefix='/api' )
webapp.finalize_config()
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -133,11 +133,8 @@
self.email_alerts = email_alerts
self.times_downloaded = times_downloaded
self.deprecated = deprecated
- def as_dict( self, trans ):
- value_mapper={ 'id' : trans.security.encode_id( self.id ),
- 'user_id' : trans.security.encode_id( self.user_id ) }
- repository_dict = self.get_api_value( view='element', value_mapper=value_mapper )
- return repository_dict
+ def as_dict( self, value_mapper=None ):
+ return self.get_api_value( view='element', value_mapper=value_mapper )
def get_api_value( self, view='collection', value_mapper=None ):
if value_mapper is None:
value_mapper = {}
@@ -194,10 +191,13 @@
fp.close()
class RepositoryMetadata( object, APIItem ):
- api_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable' )
- api_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable' )
- def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False,
+ api_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'tools_functionally_correct',
+ 'do_not_test', 'time_last_tested', 'tool_test_errors' )
+ api_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'tools_functionally_correct',
+ 'do_not_test', 'time_last_tested', 'tool_test_errors' )
+ def __init__( self, id=None, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False,
tools_functionally_correct=False, do_not_test=False, time_last_tested=None, tool_test_errors=None ):
+ self.id = id
self.repository_id = repository_id
self.changeset_revision = changeset_revision
self.metadata = metadata or dict()
@@ -208,6 +208,8 @@
self.do_not_test = do_not_test
self.time_last_tested = time_last_tested
self.tool_test_errors = tool_test_errors
+ def as_dict( self, value_mapper=None ):
+ return self.get_api_value( view='element', value_mapper=value_mapper )
def get_api_value( self, view='collection', value_mapper=None ):
if value_mapper is None:
value_mapper = {}
diff -r 8019917d7c309b2e9d4ca25078a6bda15569f94f -r 5f10f61335fbcfb7982a879edc80325f5a73402f scripts/api/tool_shed_repository_revision_update.py
--- /dev/null
+++ b/scripts/api/tool_shed_repository_revision_update.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+"""
+PUT/update script to update appropriate values in a repository_metadata table record in the Tool Shed.
+
+usage: tool_shed_repository_revision_update.py key url key1=value1 key2=value2 ...
+"""
+
+import os, sys
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import update
+
+import pkg_resources
+pkg_resources.require( "simplejson" )
+
+import simplejson
+
+to_json_string = simplejson.dumps
+from_json_string = simplejson.loads
+
+data = {}
+for key, value in [ kwarg.split( '=', 1 ) for kwarg in sys.argv[ 3: ] ]:
+ """
+ This example script will properly handle updating the value of one or more of the following RepositoryMetadata attributes:
+ tools_functionally_correct, do_not_test, tool_test_errors
+ """
+ if key in [ 'tools_functionally_correct', 'do_not_test' ]:
+ if str( value ).lower() in [ 'true', 'yes', 'on' ]:
+ new_value = True
+ else:
+ new_value = False
+ elif key in [ 'tool_test_errors' ]:
+ new_value = from_json_string( value )
+ else:
+ new_value = str( value )
+ data[ key ] = new_value
+
+update( sys.argv[ 1 ], sys.argv[ 2 ], data )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Update documentation for max_data_lines defaulting to 100000.
by commits-noreply@bitbucket.org 22 Feb '13
by commits-noreply@bitbucket.org 22 Feb '13
22 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8019917d7c30/
changeset: 8019917d7c30
user: dannon
date: 2013-02-22 17:14:37
summary: Update documentation for max_data_lines defaulting to 100000.
affected #: 1 file
diff -r 6714ad95a7f3dc9096e4630633beab30df54cdd8 -r 8019917d7c309b2e9d4ca25078a6bda15569f94f lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -35,19 +35,19 @@
data.Text.init_meta( self, dataset, copy_from=copy_from )
def set_meta( self, dataset, overwrite = True, skip = None, max_data_lines = 100000, max_guess_type_data_lines = None, **kwd ):
"""
- Tries to determine the number of columns as well as those columns
- that contain numerical values in the dataset. A skip parameter is
- used because various tabular data types reuse this function, and
- their data type classes are responsible to determine how many invalid
- comment lines should be skipped. Using None for skip will cause skip
- to be zero, but the first line will be processed as a header. A
+ Tries to determine the number of columns as well as those columns that
+ contain numerical values in the dataset. A skip parameter is used
+ because various tabular data types reuse this function, and their data
+ type classes are responsible to determine how many invalid comment
+ lines should be skipped. Using None for skip will cause skip to be
+ zero, but the first line will be processed as a header. A
max_data_lines parameter is used because various tabular data types
reuse this function, and their data type classes are responsible to
determine how many data lines should be processed to ensure that the
non-optional metadata parameters are properly set; if used, optional
metadata parameters will be set to None, unless the entire file has
- already been read. Using None (default) for max_data_lines will
- process all data lines.
+ already been read. Using None for max_data_lines will process all data
+ lines.
Items of interest:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: generate display application links outside of datatype classes
by commits-noreply@bitbucket.org 21 Feb '13
by commits-noreply@bitbucket.org 21 Feb '13
21 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6714ad95a7f3/
changeset: 6714ad95a7f3
user: carlfeberhard
date: 2013-02-22 05:30:55
summary: generate display application links outside of datatype classes
affected #: 2 files
diff -r 429103684d864f93563cd92c30888bd623fdb80e -r 6714ad95a7f3dc9096e4630633beab30df54cdd8 lib/galaxy/datatypes/display_applications/link_generator.py
--- /dev/null
+++ b/lib/galaxy/datatypes/display_applications/link_generator.py
@@ -0,0 +1,153 @@
+"""Separating Transaction based elements of display applications from datatypes.
+"""
+
+import urllib
+
+# for the url_for hack
+import pkg_resources
+pkg_resources.require( "Routes" )
+import routes
+
+from galaxy import util
+from galaxy.web import url_for
+
+from galaxy.datatypes.interval import Interval, Gff, Wiggle, CustomTrack
+
+
+def get_display_app_link_generator( display_app_name ):
+ """Returns an instance of the proper link generator class
+ based on the display_app_name or DisplayAppLinkGenerator
+ if the name is unrecognized.
+ """
+ if display_app_name == 'ucsc':
+ return UCSCDisplayAppLinkGenerator()
+
+ if display_app_name == 'gbrowse':
+ return GBrowseDisplayAppLinkGenerator()
+
+ return DisplayAppLinkGenerator()
+
+
+class DisplayAppLinkGenerator( object ):
+ """Base class for display application link generators.
+
+ This class returns an empty list of links for all datatypes.
+ """
+ def __init__( self ):
+ self.display_app_name = ''
+
+ def no_links_available( self, dataset, app, base_url, url_for=url_for ):
+ """Called when no display application links are available
+ for this display app name and datatype combination.
+ """
+ return []
+
+ def _link_function_from_datatype( self, datatype ):
+ """Dispatch to proper link generating function on datatype.
+ """
+ return self.no_links_available
+
+ def generate_links( self, trans, dataset ):
+ # here's the hack - which is expensive (time)
+ web_url_for = routes.URLGenerator( trans.webapp.mapper, trans.environ )
+
+ link_function = self._link_function_from_datatype( dataset.datatype )
+ display_links = link_function( dataset, trans.app, trans.request.base, url_for=web_url_for )
+
+ return display_links
+
+
+class UCSCDisplayAppLinkGenerator( DisplayAppLinkGenerator ):
+ """Class for UCSC display application link generators.
+
+ This class returns UCSC main and test links for the following datatypes:
+ Interval, Wiggle, Gff, CustomTrack
+ """
+ def __init__( self ):
+ self.display_app_name = 'ucsc'
+
+ def _link_function_from_datatype( self, datatype ):
+ """Dispatch to proper link generating function based on datatype.
+ """
+ # they're all the same
+ if( ( isinstance( datatype, Interval ) )
+ or ( isinstance( datatype, Wiggle ) )
+ or ( isinstance( datatype, Gff ) )
+ or ( isinstance( datatype, CustomTrack ) ) ):
+ return self.ucsc_links
+ else:
+ return super( UCSCDisplayAppLinkGenerator, self )._link_function_from_datatype( datatype )
+
+ def ucsc_links( self, dataset, app, base_url, url_for=url_for ):
+ """Generate links to UCSC genome browser sites based on the dbkey
+ and content of dataset.
+ """
+ # this is a refactor of Interval.ucsc_links, GFF.ucsc_links, Wiggle.ucsc_links, and CustomTrack.ucsc_links
+ # ...which are all the same function
+
+ #TODO: app vars can be moved into init (and base_url as well)
+ chrom, start, stop = dataset.datatype.get_estimated_display_viewport( dataset )
+ if chrom is None:
+ return []
+ ret_val = []
+ for site_name, site_url in util.get_ucsc_by_build(dataset.dbkey):
+ if site_name in app.config.ucsc_display_sites:
+ internal_url = url_for( controller='dataset', dataset_id=dataset.id,
+ action='display_at', filename='%s_%s' % ( self.display_app_name, site_name ) )
+ base_url = app.config.get( "display_at_callback", base_url )
+ display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at"
+ % (base_url, url_for( controller='root' ), dataset.id, self.display_app_name) )
+ redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s"
+ % (site_url, dataset.dbkey, chrom, start, stop ) )
+
+ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+ ret_val.append( ( site_name, link ) )
+
+ return ret_val
+
+
+class GBrowseDisplayAppLinkGenerator( DisplayAppLinkGenerator ):
+ """Class for UCSC display application link generators.
+
+ This class returns UCSC main and test links for the following datatypes:
+ Interval, Wiggle, Gff, CustomTrack
+ """
+ def __init__( self ):
+ self.display_app_name = 'gbrowse'
+
+ def _link_function_from_datatype( self, datatype ):
+ """Dispatch to proper link generating function based on datatype.
+ """
+ # they're all the same
+ if( ( isinstance( datatype, Gff ) )
+ or ( isinstance( datatype, Wiggle ) ) ):
+ return self.gbrowse_links
+ else:
+ return super( GBrowseDisplayAppLinkGenerator, self )._link_function_from_datatype( datatype )
+
+ def gbrowse_links( self, dataset, app, base_url, url_for=url_for ):
+ """Generate links to GBrowse genome browser sites based on the dbkey
+ and content of dataset.
+ """
+ # when normalized for var names, Gff.gbrowse_links and Wiggle.gbrowse_links are the same
+ # also: almost identical to ucsc_links except for the 'chr' stripping, sites_by_build, config key
+ # could be refactored even more
+ chrom, start, stop = dataset.datatype.get_estimated_display_viewport( dataset )
+ if chrom is None:
+ return []
+ ret_val = []
+ for site_name, site_url in util.get_gbrowse_sites_by_build( dataset.dbkey ):
+ if site_name in app.config.gbrowse_display_sites:
+ # strip chr from seqid
+ if chrom.startswith( 'chr' ) and len ( chrom ) > 3:
+ chrom = chrom[3:]
+ internal_url = url_for( controller='dataset', dataset_id=dataset.id,
+ action='display_at', filename='%s_%s' % ( self.display_app_name, site_name ) )
+ redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, chrom, start, stop ) )
+ base_url = app.config.get( "display_at_callback", base_url )
+ display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at"
+ % ( base_url, url_for( controller='root' ), dataset.id, self.display_app_name ) )
+ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+ ret_val.append( ( site_name, link ) )
+
+ return ret_val
diff -r 429103684d864f93563cd92c30888bd623fdb80e -r 6714ad95a7f3dc9096e4630633beab30df54cdd8 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -14,6 +14,8 @@
from galaxy.datatypes.display_applications import util
from galaxy.datatypes.metadata import FileParameter
+from galaxy.datatypes.display_applications.link_generator import get_display_app_link_generator
+
import pkg_resources
pkg_resources.require( "Routes" )
import routes
@@ -128,8 +130,6 @@
"""
GET /api/histories/{encoded_history_id}/contents/{encoded_content_id}
Displays information about a history content (dataset).
-
-
"""
hda_dict = {}
try:
@@ -225,7 +225,8 @@
hda_dict[ 'meta_files' ] = meta_files
hda_dict[ 'display_apps' ] = get_display_apps( trans, hda )
- #hda_dict[ 'display_types' ] = get_display_types( trans, hda )
+ hda_dict[ 'display_types' ] = get_old_display_applications( trans, hda )
+
hda_dict[ 'visualizations' ] = hda.get_visualizations()
hda_dict[ 'peek' ] = to_unicode( hda.display_peek() )
@@ -262,21 +263,21 @@
return display_apps
-def get_display_types( trans, hda ):
- #TODO: make more straightforward (somehow)
- #FIXME: need to force a transition to all new-style display applications
+def get_old_display_applications( trans, hda ):
display_apps = []
-
- for display_app in hda.datatype.get_display_types():
+ for display_app_name in hda.datatype.get_display_types():
+ link_generator = get_display_app_link_generator( display_app_name )
+ display_links = link_generator.generate_links( trans, hda )
+
app_links = []
- target_frame, display_links = hda.datatype.get_display_links( hda, display_app, trans.app, trans.request.base )
for display_name, display_link in display_links:
app_links.append({
- 'target' : target_frame,
+ 'target' : '_blank',
'href' : display_link,
'text' : display_name
})
if app_links:
- display_apps.append( dict( label=hda.datatype.get_display_label( display_app ), links=app_links ) )
+ display_apps.append( dict( label=hda.datatype.get_display_label( display_app_name ), links=app_links ) )
return display_apps
+
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0