galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
January 2014
- 1 participants
- 280 discussions
6 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c21482ac06f8/
Changeset: c21482ac06f8
Branch: next-stable
User: natefoo
Date: 2014-01-27 19:37:33
Summary: Remove 4 tools from tool_conf.xml.main that were removed in migration 9.
Affected #: 1 file
diff -r 43d7d4a42cd2a3e31e02d86d0cf7d66dc928c631 -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -139,7 +139,6 @@
<tool file="plotting/histogram2.xml" /><tool file="plotting/scatterplot.xml" /><tool file="plotting/boxplot.xml" />
- <tool file="visualization/GMAJ.xml" /><tool file="visualization/build_ucsc_custom_track.xml" /><tool file="maf/vcf_to_maf_customtrack.xml" /><tool file="mutation/visualize.xml" />
@@ -175,9 +174,6 @@
<tool file="hyphy/hyphy_nj_tree_wrapper.xml" /><!-- <tool file="hyphy/hyphy_dnds_wrapper.xml" /> --></section>
- <section id="motifs" name="Motif Tools">
- <tool file="rgenetics/rgWebLogo3.xml" />
- </section><section id="clustal" name="Multiple Alignments"><tool file="rgenetics/rgClustalw.xml" /></section>
@@ -253,10 +249,6 @@
<tool file="gatk/variant_eval.xml" /><tool file="gatk/variant_combine.xml" /></section>
- <section id="peak_calling" name="NGS: Peak Calling">
- <tool file="peak_calling/macs_wrapper.xml" />
- <tool file="peak_calling/sicer_wrapper.xml" />
- </section><section id="ngs-rna-tools" name="NGS: RNA Analysis"><label id="rna_seq" text="RNA-seq" /><label id="filtering" text="Filtering" />
https://bitbucket.org/galaxy/galaxy-central/commits/401ee23dcf2f/
Changeset: 401ee23dcf2f
Branch: next-stable
User: natefoo
Date: 2014-01-27 19:58:31
Summary: Remove broken and/or obsolete tools.
Affected #: 33 files
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -4,28 +4,6 @@
: ${HOSTTYPE:=`uname -m`}
-# link to HYPHY is arch-dependent
-case "$OSTYPE" in
- linux-gnu)
- kernel=`uname -r | cut -f1,2 -d.`
- HYPHY="/galaxy/software/linux$kernel-$HOSTTYPE/hyphy"
- ;;
- darwin*)
- this_minor=`uname -r | awk -F. '{print ($1-4)}'`
- machine=`machine`
- for minor in `jot - 3 $this_minor 1`; do
- HYPHY="/galaxy/software/macosx10.$minor-$machine/hyphy"
- [ -d "$HYPHY" ] && break
- done
- [ ! -d "$HYPHY" ] && unset HYPHY
- ;;
- solaris2.10)
- # For the psu-production builder which is Solaris, but jobs run on a
- # Linux cluster
- HYPHY="/galaxy/software/linux2.6-x86_64/hyphy"
- ;;
-esac
-
LINKS="
/galaxy/data/location/add_scores.loc
/galaxy/data/location/all_fasta.loc
@@ -121,12 +99,6 @@
ln -sf $link tool-data
done
- if [ -d "$HYPHY" ]; then
- echo "Linking $HYPHY"
- rm -f tool-data/HYPHY
- ln -sf $HYPHY tool-data/HYPHY
- fi
-
if [ -d "$JARS" ]; then
echo "Linking $JARS"
rm -f tool-data/shared/jars
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 doc/source/lib/galaxy.tools.util.rst
--- a/doc/source/lib/galaxy.tools.util.rst
+++ b/doc/source/lib/galaxy.tools.util.rst
@@ -9,14 +9,6 @@
:undoc-members:
:show-inheritance:
-:mod:`hyphy_util` Module
-------------------------
-
-.. automodule:: galaxy.tools.util.hyphy_util
- :members:
- :undoc-members:
- :show-inheritance:
-
:mod:`maf_utilities` Module
---------------------------
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 lib/galaxy/tools/util/hyphy_util.py
--- a/lib/galaxy/tools/util/hyphy_util.py
+++ /dev/null
@@ -1,1163 +0,0 @@
-#Dan Blankenberg
-#Contains file contents and helper methods for HYPHY configurations
-import tempfile, os
-
-def get_filled_temp_filename(contents):
- fh = tempfile.NamedTemporaryFile('w')
- filename = fh.name
- fh.close()
- fh = open(filename, 'w')
- fh.write(contents)
- fh.close()
- return filename
-
-NJ_tree_shared_ibf = """
-COUNT_GAPS_IN_FREQUENCIES = 0;
-methodIndex = 1;
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function InferTreeTopology(verbFlag)
-{
- distanceMatrix = {ds.species,ds.species};
-
- MESSAGE_LOGGING = 0;
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"chooseDistanceFormula.def");
- InitializeDistances (0);
-
- for (i = 0; i<ds.species; i=i+1)
- {
- for (j = i+1; j<ds.species; j = j+1)
- {
- distanceMatrix[i][j] = ComputeDistanceFormula (i,j);
- }
- }
-
- MESSAGE_LOGGING = 1;
- cladesMade = 1;
-
-
- if (ds.species == 2)
- {
- d1 = distanceMatrix[0][1]/2;
- treeNodes = {{0,1,d1__},
- {1,1,d1__},
- {2,0,0}};
-
- cladesInfo = {{2,0}};
- }
- else
- {
- if (ds.species == 3)
- {
- /* generate least squares estimates here */
-
- d1 = (distanceMatrix[0][1]+distanceMatrix[0][2]-distanceMatrix[1][2])/2;
- d2 = (distanceMatrix[0][1]-distanceMatrix[0][2]+distanceMatrix[1][2])/2;
- d3 = (distanceMatrix[1][2]+distanceMatrix[0][2]-distanceMatrix[0][1])/2;
-
- treeNodes = {{0,1,d1__},
- {1,1,d2__},
- {2,1,d3__}
- {3,0,0}};
-
- cladesInfo = {{3,0}};
- }
- else
- {
- njm = (distanceMatrix > methodIndex)>=ds.species;
-
- treeNodes = {2*(ds.species+1),3};
- cladesInfo = {ds.species-1,2};
-
- for (i=Rows(treeNodes)-1; i>=0; i=i-1)
- {
- treeNodes[i][0] = njm[i][0];
- treeNodes[i][1] = njm[i][1];
- treeNodes[i][2] = njm[i][2];
- }
-
- for (i=Rows(cladesInfo)-1; i>=0; i=i-1)
- {
- cladesInfo[i][0] = njm[i][3];
- cladesInfo[i][1] = njm[i][4];
- }
-
- njm = 0;
- }
- }
- return 1.0;
-}
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function TreeMatrix2TreeString (doLengths)
-{
- treeString = "";
- p = 0;
- k = 0;
- m = treeNodes[0][1];
- n = treeNodes[0][0];
- treeString*(Rows(treeNodes)*25);
-
- while (m)
- {
- if (m>p)
- {
- if (p)
- {
- treeString*",";
- }
- for (j=p;j<m;j=j+1)
- {
- treeString*"(";
- }
- }
- else
- {
- if (m<p)
- {
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
- }
- else
- {
- treeString*",";
- }
- }
- if (n<ds.species)
- {
- GetString (nodeName, ds, n);
- if (doLengths != 1)
- {
- treeString*nodeName;
- }
- else
- {
- treeString*taxonNameMap[nodeName];
- }
- }
- if (doLengths>.5)
- {
- nodeName = ":"+treeNodes[k][2];
- treeString*nodeName;
- }
- k=k+1;
- p=m;
- n=treeNodes[k][0];
- m=treeNodes[k][1];
- }
-
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
-
- treeString*0;
- return treeString;
-}
-"""
-
-def get_NJ_tree (filename):
- return """
-DISTANCE_PROMPTS = 1;
-ExecuteAFile ("%s");
-
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-/* do sequence to branch map */
-
-taxonNameMap = {};
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
-}
-
-DataSetFilter filteredData = CreateFilter (ds,1);
-InferTreeTopology (0);
-treeString = TreeMatrix2TreeString (1);
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, treeString);
-fscanf (stdin, "String", ps_file);
-
-if (Abs(ps_file))
-{
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (ps_file, CLEAR_FILE, drawLetter, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
-}
-""" % (filename)
-
-def get_NJ_treeMF (filename):
- return """
-ExecuteAFile ("%s");
-
-VERBOSITY_LEVEL = -1;
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-_linesIn = Columns (inLines);
-isomorphicTreesBySequenceCount = {};
-
-/*---------------------------------------------------------*/
-
-_currentGene = 1;
-_currentState = 0;
-geneSeqs = "";
-geneSeqs * 128;
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, KEEP_OPEN);
-treeOutFile = LAST_FILE_PATH;
-
-fscanf (stdin,"String", ps_file);
-if (Abs(ps_file))
-{
- fprintf (ps_file, CLEAR_FILE, KEEP_OPEN);
-}
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- if (Abs(geneSeqs))
- {
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- }
-}
-
-fprintf (treeOutFile,CLOSE_FILE);
-if (Abs(ps_file))
-{
- fprintf (ps_file,CLOSE_FILE);
-}
-/*---------------------------------------------------------*/
-
-function _processAGene (_geneID, nwk_file, ps_file)
-{
- if (ds.species == 1)
- {
- fprintf (nwk_file, _geneID-1, "\\tNone \\tNone\\n");
- return 0;
-
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- /* do sequence to branch map */
-
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}});
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- DISTANCE_PROMPTS = (_geneID==1);
-
- InferTreeTopology (0);
- baseTree = TreeMatrix2TreeString (0);
- UseModel (USE_NO_MODEL);
-
- Tree baseTop = baseTree;
-
- /* standardize this top */
-
- for (k=0; k<Abs(isomorphicTreesBySequenceCount[filteredData.species]); k=k+1)
- {
- testString = (isomorphicTreesBySequenceCount[filteredData.species])[k];
- Tree testTree = testString;
- if (testTree == baseTop)
- {
- baseTree = testString;
- break;
- }
- }
- if (k==Abs(isomorphicTreesBySequenceCount[filteredData.species]))
- {
- if (k==0)
- {
- isomorphicTreesBySequenceCount[filteredData.species] = {};
- }
- (isomorphicTreesBySequenceCount[filteredData.species])[k] = baseTree;
- }
-
- fprintf (nwk_file, _geneID-1, "\\t", baseTree, "\\t", TreeMatrix2TreeString (1), "\\n");
- if (Abs(ps_file))
- {
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (stdout, _geneID, ":", givenTree,"\\n");
- fprintf (ps_file, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
- }
- return 0;
-}
-""" % (filename)
-
-BranchLengthsMF = """
-VERBOSITY_LEVEL = -1;
-
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-
-
-_linesIn = Columns (inLines);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-_currentGene = 1;
-
-_currentState = 0;
-
-geneSeqs = "";
-
-geneSeqs * 128;
-
-
-
-for (l=0; l<_linesIn; l=l+1)
-
-{
-
- if (Abs(inLines[l]) == 0)
-
- {
-
- if (_currentState == 1)
-
- {
-
- geneSeqs * 0;
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- geneSeqs * 128;
-
- _currentGene = _currentGene + 1;
-
- }
-
- }
-
- else
-
- {
-
- if (_currentState == 0)
-
- {
-
- _currentState = 1;
-
- }
-
- geneSeqs * inLines[l];
-
- geneSeqs * "\\n";
-
- }
-
-}
-
-
-
-if (_currentState == 1)
-
-{
-
- geneSeqs * 0;
-
- if (Abs(geneSeqs))
-
- {
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- }
-
-}
-
-
-
-fprintf (resultFile,CLOSE_FILE);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-function _processAGene (_geneID)
-
-{
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- if (_currentGene == 1)
-
- {
-
- SelectTemplateModel (filteredData);
-
-
-
- SetDialogPrompt ("Tree file");
-
- fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
- fscanf (stdin, "String", resultFile);
-
-
-
- /* do sequence to branch map */
-
-
-
- validNames = {};
-
- taxonNameMap = {};
-
-
-
- for (k=0; k<TipCount(givenTree); k=k+1)
-
- {
-
- validNames[TipName(givenTree,k)&&1] = 1;
-
- }
-
-
-
- for (k=0; k<BranchCount(givenTree); k=k+1)
-
- {
-
- thisName = BranchName(givenTree,k);
-
- taxonNameMap[thisName&&1] = thisName;
-
- }
-
-
-
- storeValidNames = validNames;
-
- fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Block\\tBranch\\tLength\\tLowerBound\\tUpperBound\\n");
-
- }
-
- else
-
- {
-
- HarvestFrequencies (vectorOfFrequencies, filteredData, 1,1,1);
-
- validNames = storeValidNames;
-
- }
-
-
-
- for (k=0; k<ds.species; k=k+1)
-
- {
-
- GetString (thisName, ds,k);
-
- shortName = (thisName^{{"\\\\..+",""}})&&1;
-
- if (validNames[shortName])
-
- {
-
- taxonNameMap[shortName] = thisName;
-
- validNames - (shortName);
-
- SetParameter (ds,k,shortName);
-
- }
-
- else
-
- {
-
- fprintf (resultFile,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree,"\\n");
-
- return 0;
-
- }
-
- }
-
-
-
- /* */
-
-
-
- LikelihoodFunction lf = (filteredData,givenTree);
-
- Optimize (res,lf);
-
-
-
- timer = Time(0)-timer;
-
-
-
- branchNames = BranchName (givenTree,-1);
-
- branchLengths = BranchLength (givenTree,-1);
-
-
-
-
-
- for (k=0; k<Columns(branchNames)-1; k=k+1)
-
- {
-
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- if (k==0)
-
- {
-
- /* compute a scaling factor */
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
-
- scaleFactor = BranchLength (givenTree,0);
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
-
- }
-
- fprintf (resultFile,_geneID,"\\t",taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-
- }
-
-
-
- ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-
- global treeScaler = 1;
-
- ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-
- COVARIANCE_PARAMETER = "treeScaler";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- fprintf (resultFile,_geneID,"\\tTotal Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-
- ClearConstraints (givenTree);
-
- return 0;
-
-}
-"""
-
-BranchLengths = """
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-SelectTemplateModel (filteredData);
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-fscanf (stdin, "String", resultFile);
-
-/* do sequence to branch map */
-
-validNames = {};
-taxonNameMap = {};
-
-for (k=0; k<TipCount(givenTree); k=k+1)
-{
- validNames[TipName(givenTree,k)&&1] = 1;
-}
-
-for (k=0; k<BranchCount(givenTree); k=k+1)
-{
- thisName = BranchName(givenTree,k);
- taxonNameMap[thisName&&1] = thisName;
-}
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- if (validNames[shortName])
- {
- taxonNameMap[shortName] = thisName;
- validNames - (shortName);
- SetParameter (ds,k,shortName);
- }
- else
- {
- fprintf (resultFile,CLEAR_FILE,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree);
- return 0;
- }
-}
-
-/* */
-
-LikelihoodFunction lf = (filteredData,givenTree);
-
-Optimize (res,lf);
-
-timer = Time(0)-timer;
-
-branchNames = BranchName (givenTree,-1);
-branchLengths = BranchLength (givenTree,-1);
-
-fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Branch\\tLength\\tLowerBound\\tUpperBound\\n");
-
-for (k=0; k<Columns(branchNames)-1; k=k+1)
-{
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- if (k==0)
- {
- /* compute a scaling factor */
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
- scaleFactor = BranchLength (givenTree,0);
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
- }
- fprintf (resultFile,taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-}
-
-ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-global treeScaler = 1;
-ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-COVARIANCE_PARAMETER = "treeScaler";
-COVARIANCE_PRECISION = 0.95;
-CovarianceMatrix (cmx,lf);
-ClearConstraints (givenTree);
-fprintf (resultFile,"Total Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-fprintf (resultFile,CLOSE_FILE);
-"""
-
-SimpleLocalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "AC";
- _analysisHeaders[7] = "AT";
- _analysisHeaders[8] = "CG";
- _analysisHeaders[9] = "CT";
- _analysisHeaders[10] = "GT";
- _analysisHeaders[11] = "Tree";
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _analysisHeaders [Abs(_analysisHeaders)] = "length("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dS("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dN("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "omega("+branchName+")";
- }
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Local";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = treeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = treeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _returnMe ["length("+branchName+")"] = (_cL["Total"])[_biterator];
- _returnMe ["dS("+branchName+")"] = (_cL["Syn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["S_sites"]);
- _returnMe ["dN("+branchName+")"] = (_cL["NonSyn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["NS_sites"]);
-
- ExecuteCommands ("_lom = _standardizeRatio(codonTree."+treeBranchNames[_biterator]+".nonSynRate,codonTree."+treeBranchNames[_biterator]+".synRate);");
- _returnMe ["omega("+branchName+")"] = _lom;
- }
-
- return _returnMe;
-}
-
-"""
-
-SimpleGlobalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "omega";
- _analysisHeaders[7] = "omega_range";
- _analysisHeaders[8] = "AC";
- _analysisHeaders[9] = "AT";
- _analysisHeaders[10] = "CG";
- _analysisHeaders[11] = "CT";
- _analysisHeaders[12] = "GT";
- _analysisHeaders[13] = "Tree";
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- fprintf (stdout, "[SimpleGlobalFitter.bf on GENE ", myID, "]\\n");
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- _nucSites = filteredData.sites;
-
- if (Abs(treeString))
- {
- givenTreeString = treeString;
- }
- else
- {
- if (_currentGene==1)
- {
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"NJ.bf");
- }
- givenTreeString = InferTreeTopology (0);
- treeString = "";
- }
-
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Global";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = givenTreeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = givenTreeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["Stop_codons"] = (_nucSites-filteredData.sites*3)$3;
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["omega"] = R;
- COVARIANCE_PARAMETER = "R";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- _returnMe ["omega_range"] = ""+cmx[0]+"-"+cmx[2];
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
-
- return _returnMe;
-}
-"""
-
-FastaReader = """
-fscanf (stdin, "String", _coreAnalysis);
-fscanf (stdin, "String", _outputDriver);
-
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"chooseGeneticCode.def");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"dSdNTreeTools.ibf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"CodonTools.bf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"GrabBag.bf");
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
-treeBranchNames = BranchName (givenTree,-1);
-treeBranchCount = Columns (treeBranchNames)-1;
-treeString = Format (givenTree,1,1);
-
-SetDialogPrompt ("Multiple gene FASTA file");
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-fscanf (stdin, "String", modelSpecString);
-fscanf (stdin, "String", _outPath);
-
-ExecuteAFile (_outputDriver);
-ExecuteAFile (_coreAnalysis);
-
-/*---------------------------------------------------------*/
-
-_linesIn = Columns (inLines);
-_currentGene = 1;
- _currentState = 0;
-/* 0 - waiting for a non-empty line */
-/* 1 - reading files */
-
-geneSeqs = "";
-geneSeqs * 0;
-
-_prepareFileOutput (_outPath);
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
-}
-
-_finishFileOutput (0);
-"""
-
-TabWriter = """
-/*---------------------------------------------------------*/
-function _prepareFileOutput (_outPath)
-{
- _outputFilePath = _outPath;
-
- _returnHeaders = returnResultHeaders(0);
-
- fprintf (_outputFilePath, CLEAR_FILE, KEEP_OPEN, _returnHeaders[0]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",_returnHeaders[_biterator]);
- }
-
-
-
- fprintf (_outputFilePath,"\\n");
- return 0;
-}
-
-/*---------------------------------------------------------*/
-
-function _processAGene (valid, _geneID)
-{
- if (valid)
- {
- returnValue = runAGeneFit (_geneID);
- fprintf (_outputFilePath, returnValue[_returnHeaders[0]]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",returnValue[_returnHeaders[_biterator]]);
- }
- fprintf (_outputFilePath, "\\n");
- }
- /*
- else
- {
- fprintf (_outputFilePath,
- _geneID, ", Incorrect number of sequences\\n");
- }
- */
- _currentState = 0;
- return 0;
-}
-
-/*---------------------------------------------------------*/
-function _finishFileOutput (dummy)
-{
- return 0;
-}
-"""
-
-def get_dnds_config_filename(Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename ):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the DATA READER */
-
-_genomeScreenOptions ["0"] = "%s";
- /* which analysis to run on each gene; */
-_genomeScreenOptions ["1"] = "%s";
- /* what output to produce; */
-_genomeScreenOptions ["2"] = "%s";
- /* genetic code */
-_genomeScreenOptions ["3"] = "%s";
- /* tree file */
-_genomeScreenOptions ["4"] = "%s";
- /* alignment file */
-_genomeScreenOptions ["5"] = "%s";
- /* nucleotide bias string; can define any of the 203 models */
-_genomeScreenOptions ["6"] = "%s";
- /* output csv file */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename )
- return get_filled_temp_filename(contents)
-
-
-def get_branch_lengths_config_filename(input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the NucDataBranchLengths.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "CUSTOM";
- /* use an arbitrary nucleotide model */
-_genomeScreenOptions ["2"] = "%s";
- /* which model to use */
-_genomeScreenOptions ["3"] = "%s";
- /* model options */
-_genomeScreenOptions ["4"] = "Estimated";
- /* rate parameters */
-_genomeScreenOptions ["5"] = "%s";
- /* base frequencies */
-_genomeScreenOptions ["6"] = "%s";
- /* the tree to use; */
-_genomeScreenOptions ["7"] = "%s";
- /* write .csv output to; */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_tree_config_filename(input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTree.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-_genomeScreenOptions ["2"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["3"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_treeMF_config_filename(input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTreeMF.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the multiple alignment file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["2"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-_genomeScreenOptions ["3"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename)
- return get_filled_temp_filename(contents)
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tool-data/shared/ucsc/ucsc_build_sites.txt
--- a/tool-data/shared/ucsc/ucsc_build_sites.txt
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt
@@ -5,4 +5,3 @@
#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
test http://genome-test.cse.ucsc.edu/cgi-bin/hgTracks? anoCar1,ce4,ce3,ce2,ce1,loxAfr1,rn2,eschColi_O157H7_1,rn4,droYak1,heliPylo_J99_1,droYak2,dp3,dp2,caeRem2,caeRem1,oryLat1,eschColi_K12_1,homIni13,homIni14,droAna1,droAna2,oryCun1,sacCer1,heliHepa1,droGri1,sc1,dasNov1,choHof1,tupBel1,mm9,mm8,vibrChol1,mm5,mm4,mm7,mm6,mm3,mm2,rn3,venter1,galGal3,galGal2,ornAna1,equCab1,cioSav2,rheMac2,eutHer13,droPer1,droVir2,droVir1,heliPylo_26695_1,euaGli13,calJac1,campJeju1,droSim1,hg13,hg15,hg16,hg17,monDom1,monDom4,droMoj1,petMar1,droMoj2,vibrChol_MO10_1,vibrPara1,gliRes13,vibrVuln_YJ016_1,braFlo1,cioSav1,lauRas13,dm1,canFam1,canFam2,ci1,echTel1,ci2,caePb1,dm3,ponAbe2,falciparum,xenTro1,xenTro2,nonAfr13,fr2,fr1,gasAcu1,dm2,apiMel1,apiMel2,eschColi_O157H7EDL933_1,priPac1,panTro1,hg18,panTro2,campJeju_RM1221_1,canHg12,vibrChol_O395_1,vibrFisc_ES114_1,danRer5,danRer4,danRer3,danRer2,danRer1,tetNig1,afrOth13,bosTau1,eschColi_CFT073_1,bosTau3,bosTau2,bosTau4,rodEnt13,droEre1,priMat13,vibrVuln_CMCP6_1,cb2,cb3,cb1,borEut13,droSec1,felCat3,strPur1,strPur2,otoGar1,catArr1,anoGam1,triCas2
ucla http://epigenomics.mcdb.ucla.edu/cgi-bin/hgTracks? araTha1
-psu bx main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -4,32 +4,19 @@
<tool file="data_source/upload.xml" /><tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/biomart.xml" /><tool file="data_source/gramene_mart.xml" />
- <tool file="data_source/flymine.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -87,7 +74,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="features" name="Extract Features"><tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
@@ -111,7 +97,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -169,11 +154,6 @@
<tool file="multivariate_stats/kpca.xml" /><tool file="multivariate_stats/kcca.xml" /></section>
- <section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <!-- <tool file="hyphy/hyphy_dnds_wrapper.xml" /> -->
- </section><section id="clustal" name="Multiple Alignments"><tool file="rgenetics/rgClustalw.xml" /></section>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -5,7 +5,6 @@
<tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_test.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/microbial_import.xml" /><tool file="data_source/biomart.xml" />
@@ -13,34 +12,18 @@
<tool file="data_source/cbi_rice_mart.xml" /><tool file="data_source/gramene_mart.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/flymine.xml" />
- <tool file="data_source/flymine_test.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" />
- <tool file="data_source/metabolicmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/wormbase_test.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" />
- <tool file="data_source/epigraph_import_test.xml" /><tool file="data_source/hbvar.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /><tool file="validation/fix_errors.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" />
- <tool file="data_destination/epigraph_test.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -81,7 +64,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="convert" name="Convert Formats"><tool file="filters/axt_to_concat_fasta.xml" />
@@ -124,7 +106,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -189,9 +170,6 @@
<tool file="multivariate_stats/kcca.xml" /></section><section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <tool file="hyphy/hyphy_dnds_wrapper.xml" /><tool file="evolution/codingSnps.xml" /><tool file="evolution/add_scores.xml" /></section>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_destination/epigraph.xml
--- a/tools/data_destination/epigraph.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_export">
- <description> and prediction with EpiGRAPH</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
-
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_destination/epigraph_test.xml
--- a/tools/data_destination/epigraph_test.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_test_export">
- <description> and prediction with EpiGRAPH Test</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH test website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/bx_browser.xml
--- a/tools/data_source/bx_browser.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="BX" id="bx_browser" tool_type="data_source">
- <description>table browser</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://main.genome-browser.bx.psu.edu/cgi-bin/hgTables" check_values="false" method="get">
- <display>go to BX Browser $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
- <param name="tool_id" type="hidden" value="bx_browser" />
- <param name="sendToGalaxy" type="hidden" value="1" />
- <param name="hgta_compressType" type="hidden" value="none" />
- <param name="hgta_outputType" type="hidden" value="bed" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
- <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
- <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
- <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="tabular" >
- <value_translation>
- <value galaxy_value="tabular" remote_value="primaryTable" />
- <value galaxy_value="tabular" remote_value="selectedFields" />
- <value galaxy_value="wig" remote_value="wigData" />
- <value galaxy_value="interval" remote_value="tab" />
- <value galaxy_value="html" remote_value="hyperlinks" />
- <value galaxy_value="fasta" remote_value="sequence" />
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="tabular" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/encode_db.xml
--- a/tools/data_source/encode_db.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0"?>
-
-<tool name="EncodeDB" id="encode_db1">
-
- <description>
- at NHGRI
- </description>
-
- <command interpreter="python">
- fetch.py "$url" $output
- </command>
-
- <inputs action="http://research.nhgri.nih.gov/projects/ENCODEdb/cgi-bin/power_query.cgi" target="_top">
-<!-- <inputs action="http://localhost:9000/prepared"> -->
- <display>go to EncodeDB $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/async/encode_db1" />
- </inputs>
-
- <uihints minwidth="800"/>
-
- <outputs>
- <data format="bed" name="output" />
- </outputs>
-
- <options sanitize="False" refresh="True"/>
-
-</tool>
\ No newline at end of file
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/epigraph_import.xml
--- a/tools/data_source/epigraph_import.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import" tool_type="data_source">
- <description> server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/epigraph_import_test.xml
--- a/tools/data_source/epigraph_import_test.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import_test" tool_type="data_source">
- <description> test server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import_test" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/flymine.xml
--- a/tools/data_source/flymine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine" id="flymine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.flymine.org" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/flymine_test.xml
--- a/tools/data_source/flymine_test.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine test" id="flymine_test" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://preview.flymine.org/preview/begin.do" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/metabolicmine.xml
--- a/tools/data_source/metabolicmine.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0"?>
-<tool name="metabolicMine" id="metabolicmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.metabolicmine.org/beta/begin.do" check_values="false" method="get">
- <display>go to metabolicMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/modmine.xml
--- a/tools/data_source/modmine.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="modENCODE modMine" id="modmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://intermine.modencode.org/" check_values="false" method="get">
- <display>go to modENCODE modMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/mousemine.xml
--- a/tools/data_source/mousemine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="MouseMine" id="mousemine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.mousemine.org/mousemine/begin.do" check_values="false" method="get">
- <display>go to MouseMine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=mousemine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="MouseMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/ratmine.xml
--- a/tools/data_source/ratmine.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Ratmine" id="ratmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://ratmine.mcw.edu/ratmine/begin.do" check_values="false" method="get">
- <display>go to Ratmine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=ratmine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="Ratmine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/data_source/yeastmine.xml
--- a/tools/data_source/yeastmine.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0"?>
-<tool name="YeastMine" id="yeastmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://yeastmine.yeastgenome.org/yeastmine/begin.do" check_values="false" method="get">
- <display>go to yeastMine server $GALAXY_URL</display>
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/encode/gencode_partition.xml
--- a/tools/encode/gencode_partition.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<tool id="gencode_partition1" name="Gencode Partition">
- <description>an interval file</description>
- <command interpreter="python">split_by_partitions.py ${GALAXY_DATA_INDEX_DIR} $input1 $out_file1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Partition"/>
- </inputs>
- <outputs>
- <data name="out_file1" format="bed"/>
- </outputs>
- <tests>
- <test>
- <param name="input1" value="encode_1.bed"/>
- <output name="out_file1" file="gencode_partition_out.bed"/>
- </test>
- </tests>
- <help>
-For detailed information about partitioning, click here_.
-
-.. _here: http://genome.imim.es/gencode/wiki/index.php/Collecting_Feature_Sets_from_A…
-
-Datasets are partitioned according to the protocol below:
-
-A partition scheme has been defined that is similar to what has previously been done with TARs/TRANSFRAGs such that any feature can be classified as falling into one of the following 6 categories:
- 1. **Coding** -- coding exons defined from the GENCODE experimentally verified coding set (coding in any transcript)
- 2. **5UTR** -- 5' UTR exons defined from the GENCODE experimentally verified coding set (5' UTR in some transcript but never coding in any other)
- 3. **3UTR** -- 3' UTR exons defined from the GENCODE experimentally verified coding set (3' UTR in some transcript but never coding in any other)
- 4. **Intronic Proximal** -- intronic and no more than 5kb away from an exon.
- 5. **Intergenic Proximal** -- between genes and no more than 5kb away from an exon.
- 6. **Intronic Distal** -- intronic and greater than 5kb away from an exon.
- 7. **Intergenic Distal** -- between genes and greater than 5kb away from an exon.
-
------
-
-.. class:: infomark
-
-**Note:** Features overlapping more than one partition will take the identity of the lower-numbered partition.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
diff -r c21482ac06f8319cd3879eaebcdb17b6939b0e9e -r 401ee23dcf2f70d4be0e975bb3e00a43ae1dfdd0 tools/encode/random_intervals.xml
--- a/tools/encode/random_intervals.xml
+++ /dev/null
@@ -1,64 +0,0 @@
-<tool id="random_intervals1" name="Random Intervals">
-<description>create a random set of intervals</description>
- <command interpreter="python">random_intervals_no_bits.py $regions $input2 $input1 $out_file1 ${input2.metadata.chromCol} ${input2.metadata.startCol} ${input2.metadata.endCol} ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} $use_mask $strand_overlaps ${GALAXY_DATA_INDEX_DIR}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Mimick">
- <validator type="unspecified_build" message="Unspecified build, this tool works with data from genome builds hg16 or hg17. Click the pencil icon in your history item to set the genome build."/>
- </param>
- <param name="input2" type="data" format="interval" label="Intervals to Mask"/>
- <param name="use_mask" type="select" label="Use mask">
- <option value="no_mask">No</option>
- <option value="use_mask">Yes</option>
- </param>
- <param name="strand_overlaps" type="select" label="Allow overlaps">
- <option value="all">Any</option>
- <option value="strand">Across Strands</option>
- <option value="none">None</option>
- </param>
- <param name="regions" type="select" label="Regions to use">
- <options from_file="regions.loc">
- <column name="name" index="2"/>
- <column name="value" index="1"/>
- <column name="dbkey" index="0"/>
- <filter type="data_meta" ref="input1" key="dbkey" column="0" />
- <validator type="no_options" message="This tool currently only works with ENCODE data from genome builds hg16 or hg17."/>
- </options>
- </param>
- </inputs>
- <outputs>
- <data name="out_file1" format="input"/>
- </outputs>
- <help>
-
-.. class:: warningmark
-
-This tool currently only works with ENCODE data from genome builds hg16 or hg17.
-
------
-
-.. class:: infomark
-
-**Note:** If you do not wish to mask a set of intervals, change the Use Mask option to No, this option will override any Mask files selected.
-
------
-
-**Syntax**
-
-This tool will attempt to create a random set of intervals that mimic those found within your source file. You may also specify a set of intervals to mask.
-
-**Allow overlaps** options
- * **Across Strands** - random regions are allowed to overlap only if they are on different strands.
- * **Any** - all overlaps are allowed.
- * **None** - no overlapping regions are allowed.
-
-**Regions to use** options
- * Bounding region of interest based on the dataset build.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/275a49bb9e1d/
Changeset: 275a49bb9e1d
User: natefoo
Date: 2014-01-27 19:59:03
Summary: Merge tool removal from next-stable.
Affected #: 33 files
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -4,28 +4,6 @@
: ${HOSTTYPE:=`uname -m`}
-# link to HYPHY is arch-dependent
-case "$OSTYPE" in
- linux-gnu)
- kernel=`uname -r | cut -f1,2 -d.`
- HYPHY="/galaxy/software/linux$kernel-$HOSTTYPE/hyphy"
- ;;
- darwin*)
- this_minor=`uname -r | awk -F. '{print ($1-4)}'`
- machine=`machine`
- for minor in `jot - 3 $this_minor 1`; do
- HYPHY="/galaxy/software/macosx10.$minor-$machine/hyphy"
- [ -d "$HYPHY" ] && break
- done
- [ ! -d "$HYPHY" ] && unset HYPHY
- ;;
- solaris2.10)
- # For the psu-production builder which is Solaris, but jobs run on a
- # Linux cluster
- HYPHY="/galaxy/software/linux2.6-x86_64/hyphy"
- ;;
-esac
-
LINKS="
/galaxy/data/location/add_scores.loc
/galaxy/data/location/all_fasta.loc
@@ -121,12 +99,6 @@
ln -sf $link tool-data
done
- if [ -d "$HYPHY" ]; then
- echo "Linking $HYPHY"
- rm -f tool-data/HYPHY
- ln -sf $HYPHY tool-data/HYPHY
- fi
-
if [ -d "$JARS" ]; then
echo "Linking $JARS"
rm -f tool-data/shared/jars
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b doc/source/lib/galaxy.tools.util.rst
--- a/doc/source/lib/galaxy.tools.util.rst
+++ b/doc/source/lib/galaxy.tools.util.rst
@@ -9,14 +9,6 @@
:undoc-members:
:show-inheritance:
-:mod:`hyphy_util` Module
-------------------------
-
-.. automodule:: galaxy.tools.util.hyphy_util
- :members:
- :undoc-members:
- :show-inheritance:
-
:mod:`maf_utilities` Module
---------------------------
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b lib/galaxy/tools/util/hyphy_util.py
--- a/lib/galaxy/tools/util/hyphy_util.py
+++ /dev/null
@@ -1,1163 +0,0 @@
-#Dan Blankenberg
-#Contains file contents and helper methods for HYPHY configurations
-import tempfile, os
-
-def get_filled_temp_filename(contents):
- fh = tempfile.NamedTemporaryFile('w')
- filename = fh.name
- fh.close()
- fh = open(filename, 'w')
- fh.write(contents)
- fh.close()
- return filename
-
-NJ_tree_shared_ibf = """
-COUNT_GAPS_IN_FREQUENCIES = 0;
-methodIndex = 1;
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function InferTreeTopology(verbFlag)
-{
- distanceMatrix = {ds.species,ds.species};
-
- MESSAGE_LOGGING = 0;
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"chooseDistanceFormula.def");
- InitializeDistances (0);
-
- for (i = 0; i<ds.species; i=i+1)
- {
- for (j = i+1; j<ds.species; j = j+1)
- {
- distanceMatrix[i][j] = ComputeDistanceFormula (i,j);
- }
- }
-
- MESSAGE_LOGGING = 1;
- cladesMade = 1;
-
-
- if (ds.species == 2)
- {
- d1 = distanceMatrix[0][1]/2;
- treeNodes = {{0,1,d1__},
- {1,1,d1__},
- {2,0,0}};
-
- cladesInfo = {{2,0}};
- }
- else
- {
- if (ds.species == 3)
- {
- /* generate least squares estimates here */
-
- d1 = (distanceMatrix[0][1]+distanceMatrix[0][2]-distanceMatrix[1][2])/2;
- d2 = (distanceMatrix[0][1]-distanceMatrix[0][2]+distanceMatrix[1][2])/2;
- d3 = (distanceMatrix[1][2]+distanceMatrix[0][2]-distanceMatrix[0][1])/2;
-
- treeNodes = {{0,1,d1__},
- {1,1,d2__},
- {2,1,d3__}
- {3,0,0}};
-
- cladesInfo = {{3,0}};
- }
- else
- {
- njm = (distanceMatrix > methodIndex)>=ds.species;
-
- treeNodes = {2*(ds.species+1),3};
- cladesInfo = {ds.species-1,2};
-
- for (i=Rows(treeNodes)-1; i>=0; i=i-1)
- {
- treeNodes[i][0] = njm[i][0];
- treeNodes[i][1] = njm[i][1];
- treeNodes[i][2] = njm[i][2];
- }
-
- for (i=Rows(cladesInfo)-1; i>=0; i=i-1)
- {
- cladesInfo[i][0] = njm[i][3];
- cladesInfo[i][1] = njm[i][4];
- }
-
- njm = 0;
- }
- }
- return 1.0;
-}
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function TreeMatrix2TreeString (doLengths)
-{
- treeString = "";
- p = 0;
- k = 0;
- m = treeNodes[0][1];
- n = treeNodes[0][0];
- treeString*(Rows(treeNodes)*25);
-
- while (m)
- {
- if (m>p)
- {
- if (p)
- {
- treeString*",";
- }
- for (j=p;j<m;j=j+1)
- {
- treeString*"(";
- }
- }
- else
- {
- if (m<p)
- {
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
- }
- else
- {
- treeString*",";
- }
- }
- if (n<ds.species)
- {
- GetString (nodeName, ds, n);
- if (doLengths != 1)
- {
- treeString*nodeName;
- }
- else
- {
- treeString*taxonNameMap[nodeName];
- }
- }
- if (doLengths>.5)
- {
- nodeName = ":"+treeNodes[k][2];
- treeString*nodeName;
- }
- k=k+1;
- p=m;
- n=treeNodes[k][0];
- m=treeNodes[k][1];
- }
-
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
-
- treeString*0;
- return treeString;
-}
-"""
-
-def get_NJ_tree (filename):
- return """
-DISTANCE_PROMPTS = 1;
-ExecuteAFile ("%s");
-
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-/* do sequence to branch map */
-
-taxonNameMap = {};
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
-}
-
-DataSetFilter filteredData = CreateFilter (ds,1);
-InferTreeTopology (0);
-treeString = TreeMatrix2TreeString (1);
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, treeString);
-fscanf (stdin, "String", ps_file);
-
-if (Abs(ps_file))
-{
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (ps_file, CLEAR_FILE, drawLetter, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
-}
-""" % (filename)
-
-def get_NJ_treeMF (filename):
- return """
-ExecuteAFile ("%s");
-
-VERBOSITY_LEVEL = -1;
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-_linesIn = Columns (inLines);
-isomorphicTreesBySequenceCount = {};
-
-/*---------------------------------------------------------*/
-
-_currentGene = 1;
-_currentState = 0;
-geneSeqs = "";
-geneSeqs * 128;
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, KEEP_OPEN);
-treeOutFile = LAST_FILE_PATH;
-
-fscanf (stdin,"String", ps_file);
-if (Abs(ps_file))
-{
- fprintf (ps_file, CLEAR_FILE, KEEP_OPEN);
-}
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- if (Abs(geneSeqs))
- {
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- }
-}
-
-fprintf (treeOutFile,CLOSE_FILE);
-if (Abs(ps_file))
-{
- fprintf (ps_file,CLOSE_FILE);
-}
-/*---------------------------------------------------------*/
-
-function _processAGene (_geneID, nwk_file, ps_file)
-{
- if (ds.species == 1)
- {
- fprintf (nwk_file, _geneID-1, "\\tNone \\tNone\\n");
- return 0;
-
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- /* do sequence to branch map */
-
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}});
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- DISTANCE_PROMPTS = (_geneID==1);
-
- InferTreeTopology (0);
- baseTree = TreeMatrix2TreeString (0);
- UseModel (USE_NO_MODEL);
-
- Tree baseTop = baseTree;
-
- /* standardize this top */
-
- for (k=0; k<Abs(isomorphicTreesBySequenceCount[filteredData.species]); k=k+1)
- {
- testString = (isomorphicTreesBySequenceCount[filteredData.species])[k];
- Tree testTree = testString;
- if (testTree == baseTop)
- {
- baseTree = testString;
- break;
- }
- }
- if (k==Abs(isomorphicTreesBySequenceCount[filteredData.species]))
- {
- if (k==0)
- {
- isomorphicTreesBySequenceCount[filteredData.species] = {};
- }
- (isomorphicTreesBySequenceCount[filteredData.species])[k] = baseTree;
- }
-
- fprintf (nwk_file, _geneID-1, "\\t", baseTree, "\\t", TreeMatrix2TreeString (1), "\\n");
- if (Abs(ps_file))
- {
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (stdout, _geneID, ":", givenTree,"\\n");
- fprintf (ps_file, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
- }
- return 0;
-}
-""" % (filename)
-
-BranchLengthsMF = """
-VERBOSITY_LEVEL = -1;
-
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-
-
-_linesIn = Columns (inLines);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-_currentGene = 1;
-
-_currentState = 0;
-
-geneSeqs = "";
-
-geneSeqs * 128;
-
-
-
-for (l=0; l<_linesIn; l=l+1)
-
-{
-
- if (Abs(inLines[l]) == 0)
-
- {
-
- if (_currentState == 1)
-
- {
-
- geneSeqs * 0;
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- geneSeqs * 128;
-
- _currentGene = _currentGene + 1;
-
- }
-
- }
-
- else
-
- {
-
- if (_currentState == 0)
-
- {
-
- _currentState = 1;
-
- }
-
- geneSeqs * inLines[l];
-
- geneSeqs * "\\n";
-
- }
-
-}
-
-
-
-if (_currentState == 1)
-
-{
-
- geneSeqs * 0;
-
- if (Abs(geneSeqs))
-
- {
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- }
-
-}
-
-
-
-fprintf (resultFile,CLOSE_FILE);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-function _processAGene (_geneID)
-
-{
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- if (_currentGene == 1)
-
- {
-
- SelectTemplateModel (filteredData);
-
-
-
- SetDialogPrompt ("Tree file");
-
- fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
- fscanf (stdin, "String", resultFile);
-
-
-
- /* do sequence to branch map */
-
-
-
- validNames = {};
-
- taxonNameMap = {};
-
-
-
- for (k=0; k<TipCount(givenTree); k=k+1)
-
- {
-
- validNames[TipName(givenTree,k)&&1] = 1;
-
- }
-
-
-
- for (k=0; k<BranchCount(givenTree); k=k+1)
-
- {
-
- thisName = BranchName(givenTree,k);
-
- taxonNameMap[thisName&&1] = thisName;
-
- }
-
-
-
- storeValidNames = validNames;
-
- fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Block\\tBranch\\tLength\\tLowerBound\\tUpperBound\\n");
-
- }
-
- else
-
- {
-
- HarvestFrequencies (vectorOfFrequencies, filteredData, 1,1,1);
-
- validNames = storeValidNames;
-
- }
-
-
-
- for (k=0; k<ds.species; k=k+1)
-
- {
-
- GetString (thisName, ds,k);
-
- shortName = (thisName^{{"\\\\..+",""}})&&1;
-
- if (validNames[shortName])
-
- {
-
- taxonNameMap[shortName] = thisName;
-
- validNames - (shortName);
-
- SetParameter (ds,k,shortName);
-
- }
-
- else
-
- {
-
- fprintf (resultFile,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree,"\\n");
-
- return 0;
-
- }
-
- }
-
-
-
- /* */
-
-
-
- LikelihoodFunction lf = (filteredData,givenTree);
-
- Optimize (res,lf);
-
-
-
- timer = Time(0)-timer;
-
-
-
- branchNames = BranchName (givenTree,-1);
-
- branchLengths = BranchLength (givenTree,-1);
-
-
-
-
-
- for (k=0; k<Columns(branchNames)-1; k=k+1)
-
- {
-
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- if (k==0)
-
- {
-
- /* compute a scaling factor */
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
-
- scaleFactor = BranchLength (givenTree,0);
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
-
- }
-
- fprintf (resultFile,_geneID,"\\t",taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-
- }
-
-
-
- ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-
- global treeScaler = 1;
-
- ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-
- COVARIANCE_PARAMETER = "treeScaler";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- fprintf (resultFile,_geneID,"\\tTotal Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-
- ClearConstraints (givenTree);
-
- return 0;
-
-}
-"""
-
-BranchLengths = """
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-SelectTemplateModel (filteredData);
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-fscanf (stdin, "String", resultFile);
-
-/* do sequence to branch map */
-
-validNames = {};
-taxonNameMap = {};
-
-for (k=0; k<TipCount(givenTree); k=k+1)
-{
- validNames[TipName(givenTree,k)&&1] = 1;
-}
-
-for (k=0; k<BranchCount(givenTree); k=k+1)
-{
- thisName = BranchName(givenTree,k);
- taxonNameMap[thisName&&1] = thisName;
-}
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- if (validNames[shortName])
- {
- taxonNameMap[shortName] = thisName;
- validNames - (shortName);
- SetParameter (ds,k,shortName);
- }
- else
- {
- fprintf (resultFile,CLEAR_FILE,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree);
- return 0;
- }
-}
-
-/* */
-
-LikelihoodFunction lf = (filteredData,givenTree);
-
-Optimize (res,lf);
-
-timer = Time(0)-timer;
-
-branchNames = BranchName (givenTree,-1);
-branchLengths = BranchLength (givenTree,-1);
-
-fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Branch\\tLength\\tLowerBound\\tUpperBound\\n");
-
-for (k=0; k<Columns(branchNames)-1; k=k+1)
-{
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- if (k==0)
- {
- /* compute a scaling factor */
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
- scaleFactor = BranchLength (givenTree,0);
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
- }
- fprintf (resultFile,taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-}
-
-ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-global treeScaler = 1;
-ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-COVARIANCE_PARAMETER = "treeScaler";
-COVARIANCE_PRECISION = 0.95;
-CovarianceMatrix (cmx,lf);
-ClearConstraints (givenTree);
-fprintf (resultFile,"Total Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-fprintf (resultFile,CLOSE_FILE);
-"""
-
-SimpleLocalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "AC";
- _analysisHeaders[7] = "AT";
- _analysisHeaders[8] = "CG";
- _analysisHeaders[9] = "CT";
- _analysisHeaders[10] = "GT";
- _analysisHeaders[11] = "Tree";
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _analysisHeaders [Abs(_analysisHeaders)] = "length("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dS("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dN("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "omega("+branchName+")";
- }
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Local";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = treeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = treeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _returnMe ["length("+branchName+")"] = (_cL["Total"])[_biterator];
- _returnMe ["dS("+branchName+")"] = (_cL["Syn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["S_sites"]);
- _returnMe ["dN("+branchName+")"] = (_cL["NonSyn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["NS_sites"]);
-
- ExecuteCommands ("_lom = _standardizeRatio(codonTree."+treeBranchNames[_biterator]+".nonSynRate,codonTree."+treeBranchNames[_biterator]+".synRate);");
- _returnMe ["omega("+branchName+")"] = _lom;
- }
-
- return _returnMe;
-}
-
-"""
-
-SimpleGlobalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "omega";
- _analysisHeaders[7] = "omega_range";
- _analysisHeaders[8] = "AC";
- _analysisHeaders[9] = "AT";
- _analysisHeaders[10] = "CG";
- _analysisHeaders[11] = "CT";
- _analysisHeaders[12] = "GT";
- _analysisHeaders[13] = "Tree";
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- fprintf (stdout, "[SimpleGlobalFitter.bf on GENE ", myID, "]\\n");
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- _nucSites = filteredData.sites;
-
- if (Abs(treeString))
- {
- givenTreeString = treeString;
- }
- else
- {
- if (_currentGene==1)
- {
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"NJ.bf");
- }
- givenTreeString = InferTreeTopology (0);
- treeString = "";
- }
-
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Global";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = givenTreeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = givenTreeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["Stop_codons"] = (_nucSites-filteredData.sites*3)$3;
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["omega"] = R;
- COVARIANCE_PARAMETER = "R";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- _returnMe ["omega_range"] = ""+cmx[0]+"-"+cmx[2];
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
-
- return _returnMe;
-}
-"""
-
-FastaReader = """
-fscanf (stdin, "String", _coreAnalysis);
-fscanf (stdin, "String", _outputDriver);
-
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"chooseGeneticCode.def");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"dSdNTreeTools.ibf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"CodonTools.bf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"GrabBag.bf");
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
-treeBranchNames = BranchName (givenTree,-1);
-treeBranchCount = Columns (treeBranchNames)-1;
-treeString = Format (givenTree,1,1);
-
-SetDialogPrompt ("Multiple gene FASTA file");
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-fscanf (stdin, "String", modelSpecString);
-fscanf (stdin, "String", _outPath);
-
-ExecuteAFile (_outputDriver);
-ExecuteAFile (_coreAnalysis);
-
-/*---------------------------------------------------------*/
-
-_linesIn = Columns (inLines);
-_currentGene = 1;
- _currentState = 0;
-/* 0 - waiting for a non-empty line */
-/* 1 - reading files */
-
-geneSeqs = "";
-geneSeqs * 0;
-
-_prepareFileOutput (_outPath);
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
-}
-
-_finishFileOutput (0);
-"""
-
-TabWriter = """
-/*---------------------------------------------------------*/
-function _prepareFileOutput (_outPath)
-{
- _outputFilePath = _outPath;
-
- _returnHeaders = returnResultHeaders(0);
-
- fprintf (_outputFilePath, CLEAR_FILE, KEEP_OPEN, _returnHeaders[0]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",_returnHeaders[_biterator]);
- }
-
-
-
- fprintf (_outputFilePath,"\\n");
- return 0;
-}
-
-/*---------------------------------------------------------*/
-
-function _processAGene (valid, _geneID)
-{
- if (valid)
- {
- returnValue = runAGeneFit (_geneID);
- fprintf (_outputFilePath, returnValue[_returnHeaders[0]]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",returnValue[_returnHeaders[_biterator]]);
- }
- fprintf (_outputFilePath, "\\n");
- }
- /*
- else
- {
- fprintf (_outputFilePath,
- _geneID, ", Incorrect number of sequences\\n");
- }
- */
- _currentState = 0;
- return 0;
-}
-
-/*---------------------------------------------------------*/
-function _finishFileOutput (dummy)
-{
- return 0;
-}
-"""
-
-def get_dnds_config_filename(Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename ):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the DATA READER */
-
-_genomeScreenOptions ["0"] = "%s";
- /* which analysis to run on each gene; */
-_genomeScreenOptions ["1"] = "%s";
- /* what output to produce; */
-_genomeScreenOptions ["2"] = "%s";
- /* genetic code */
-_genomeScreenOptions ["3"] = "%s";
- /* tree file */
-_genomeScreenOptions ["4"] = "%s";
- /* alignment file */
-_genomeScreenOptions ["5"] = "%s";
- /* nucleotide bias string; can define any of the 203 models */
-_genomeScreenOptions ["6"] = "%s";
- /* output csv file */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename )
- return get_filled_temp_filename(contents)
-
-
-def get_branch_lengths_config_filename(input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the NucDataBranchLengths.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "CUSTOM";
- /* use an arbitrary nucleotide model */
-_genomeScreenOptions ["2"] = "%s";
- /* which model to use */
-_genomeScreenOptions ["3"] = "%s";
- /* model options */
-_genomeScreenOptions ["4"] = "Estimated";
- /* rate parameters */
-_genomeScreenOptions ["5"] = "%s";
- /* base frequencies */
-_genomeScreenOptions ["6"] = "%s";
- /* the tree to use; */
-_genomeScreenOptions ["7"] = "%s";
- /* write .csv output to; */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_tree_config_filename(input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTree.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-_genomeScreenOptions ["2"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["3"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_treeMF_config_filename(input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTreeMF.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the multiple alignment file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["2"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-_genomeScreenOptions ["3"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename)
- return get_filled_temp_filename(contents)
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tool-data/shared/ucsc/ucsc_build_sites.txt
--- a/tool-data/shared/ucsc/ucsc_build_sites.txt
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt
@@ -5,4 +5,3 @@
#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
test http://genome-test.cse.ucsc.edu/cgi-bin/hgTracks? anoCar1,ce4,ce3,ce2,ce1,loxAfr1,rn2,eschColi_O157H7_1,rn4,droYak1,heliPylo_J99_1,droYak2,dp3,dp2,caeRem2,caeRem1,oryLat1,eschColi_K12_1,homIni13,homIni14,droAna1,droAna2,oryCun1,sacCer1,heliHepa1,droGri1,sc1,dasNov1,choHof1,tupBel1,mm9,mm8,vibrChol1,mm5,mm4,mm7,mm6,mm3,mm2,rn3,venter1,galGal3,galGal2,ornAna1,equCab1,cioSav2,rheMac2,eutHer13,droPer1,droVir2,droVir1,heliPylo_26695_1,euaGli13,calJac1,campJeju1,droSim1,hg13,hg15,hg16,hg17,monDom1,monDom4,droMoj1,petMar1,droMoj2,vibrChol_MO10_1,vibrPara1,gliRes13,vibrVuln_YJ016_1,braFlo1,cioSav1,lauRas13,dm1,canFam1,canFam2,ci1,echTel1,ci2,caePb1,dm3,ponAbe2,falciparum,xenTro1,xenTro2,nonAfr13,fr2,fr1,gasAcu1,dm2,apiMel1,apiMel2,eschColi_O157H7EDL933_1,priPac1,panTro1,hg18,panTro2,campJeju_RM1221_1,canHg12,vibrChol_O395_1,vibrFisc_ES114_1,danRer5,danRer4,danRer3,danRer2,danRer1,tetNig1,afrOth13,bosTau1,eschColi_CFT073_1,bosTau3,bosTau2,bosTau4,rodEnt13,droEre1,priMat13,vibrVuln_CMCP6_1,cb2,cb3,cb1,borEut13,droSec1,felCat3,strPur1,strPur2,otoGar1,catArr1,anoGam1,triCas2
ucla http://epigenomics.mcdb.ucla.edu/cgi-bin/hgTracks? araTha1
-psu bx main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -4,32 +4,19 @@
<tool file="data_source/upload.xml" /><tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/biomart.xml" /><tool file="data_source/gramene_mart.xml" />
- <tool file="data_source/flymine.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -87,7 +74,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="features" name="Extract Features"><tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
@@ -111,7 +97,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -139,7 +124,6 @@
<tool file="plotting/histogram2.xml" /><tool file="plotting/scatterplot.xml" /><tool file="plotting/boxplot.xml" />
- <tool file="visualization/GMAJ.xml" /><tool file="visualization/build_ucsc_custom_track.xml" /><tool file="maf/vcf_to_maf_customtrack.xml" /><tool file="mutation/visualize.xml" />
@@ -170,14 +154,6 @@
<tool file="multivariate_stats/kpca.xml" /><tool file="multivariate_stats/kcca.xml" /></section>
- <section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <!-- <tool file="hyphy/hyphy_dnds_wrapper.xml" /> -->
- </section>
- <section id="motifs" name="Motif Tools">
- <tool file="rgenetics/rgWebLogo3.xml" />
- </section><section id="clustal" name="Multiple Alignments"><tool file="rgenetics/rgClustalw.xml" /></section>
@@ -253,10 +229,6 @@
<tool file="gatk/variant_eval.xml" /><tool file="gatk/variant_combine.xml" /></section>
- <section id="peak_calling" name="NGS: Peak Calling">
- <tool file="peak_calling/macs_wrapper.xml" />
- <tool file="peak_calling/sicer_wrapper.xml" />
- </section><section id="ngs-rna-tools" name="NGS: RNA Analysis"><label id="rna_seq" text="RNA-seq" /><label id="filtering" text="Filtering" />
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -5,7 +5,6 @@
<tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_test.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/microbial_import.xml" /><tool file="data_source/biomart.xml" />
@@ -13,34 +12,18 @@
<tool file="data_source/cbi_rice_mart.xml" /><tool file="data_source/gramene_mart.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/flymine.xml" />
- <tool file="data_source/flymine_test.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" />
- <tool file="data_source/metabolicmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/wormbase_test.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" />
- <tool file="data_source/epigraph_import_test.xml" /><tool file="data_source/hbvar.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /><tool file="validation/fix_errors.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" />
- <tool file="data_destination/epigraph_test.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -81,7 +64,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="convert" name="Convert Formats"><tool file="filters/axt_to_concat_fasta.xml" />
@@ -124,7 +106,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -189,9 +170,6 @@
<tool file="multivariate_stats/kcca.xml" /></section><section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <tool file="hyphy/hyphy_dnds_wrapper.xml" /><tool file="evolution/codingSnps.xml" /><tool file="evolution/add_scores.xml" /></section>
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_destination/epigraph.xml
--- a/tools/data_destination/epigraph.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_export">
- <description> and prediction with EpiGRAPH</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
-
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_destination/epigraph_test.xml
--- a/tools/data_destination/epigraph_test.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_test_export">
- <description> and prediction with EpiGRAPH Test</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH test website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/bx_browser.xml
--- a/tools/data_source/bx_browser.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="BX" id="bx_browser" tool_type="data_source">
- <description>table browser</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://main.genome-browser.bx.psu.edu/cgi-bin/hgTables" check_values="false" method="get">
- <display>go to BX Browser $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
- <param name="tool_id" type="hidden" value="bx_browser" />
- <param name="sendToGalaxy" type="hidden" value="1" />
- <param name="hgta_compressType" type="hidden" value="none" />
- <param name="hgta_outputType" type="hidden" value="bed" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
- <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
- <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
- <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="tabular" >
- <value_translation>
- <value galaxy_value="tabular" remote_value="primaryTable" />
- <value galaxy_value="tabular" remote_value="selectedFields" />
- <value galaxy_value="wig" remote_value="wigData" />
- <value galaxy_value="interval" remote_value="tab" />
- <value galaxy_value="html" remote_value="hyperlinks" />
- <value galaxy_value="fasta" remote_value="sequence" />
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="tabular" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/encode_db.xml
--- a/tools/data_source/encode_db.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0"?>
-
-<tool name="EncodeDB" id="encode_db1">
-
- <description>
- at NHGRI
- </description>
-
- <command interpreter="python">
- fetch.py "$url" $output
- </command>
-
- <inputs action="http://research.nhgri.nih.gov/projects/ENCODEdb/cgi-bin/power_query.cgi" target="_top">
-<!-- <inputs action="http://localhost:9000/prepared"> -->
- <display>go to EncodeDB $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/async/encode_db1" />
- </inputs>
-
- <uihints minwidth="800"/>
-
- <outputs>
- <data format="bed" name="output" />
- </outputs>
-
- <options sanitize="False" refresh="True"/>
-
-</tool>
\ No newline at end of file
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/epigraph_import.xml
--- a/tools/data_source/epigraph_import.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import" tool_type="data_source">
- <description> server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/epigraph_import_test.xml
--- a/tools/data_source/epigraph_import_test.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import_test" tool_type="data_source">
- <description> test server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import_test" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/flymine.xml
--- a/tools/data_source/flymine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine" id="flymine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.flymine.org" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/flymine_test.xml
--- a/tools/data_source/flymine_test.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine test" id="flymine_test" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://preview.flymine.org/preview/begin.do" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/metabolicmine.xml
--- a/tools/data_source/metabolicmine.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0"?>
-<tool name="metabolicMine" id="metabolicmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.metabolicmine.org/beta/begin.do" check_values="false" method="get">
- <display>go to metabolicMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/modmine.xml
--- a/tools/data_source/modmine.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="modENCODE modMine" id="modmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://intermine.modencode.org/" check_values="false" method="get">
- <display>go to modENCODE modMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/mousemine.xml
--- a/tools/data_source/mousemine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="MouseMine" id="mousemine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.mousemine.org/mousemine/begin.do" check_values="false" method="get">
- <display>go to MouseMine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=mousemine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="MouseMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/ratmine.xml
--- a/tools/data_source/ratmine.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Ratmine" id="ratmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://ratmine.mcw.edu/ratmine/begin.do" check_values="false" method="get">
- <display>go to Ratmine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=ratmine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="Ratmine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/data_source/yeastmine.xml
--- a/tools/data_source/yeastmine.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0"?>
-<tool name="YeastMine" id="yeastmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://yeastmine.yeastgenome.org/yeastmine/begin.do" check_values="false" method="get">
- <display>go to yeastMine server $GALAXY_URL</display>
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/encode/gencode_partition.xml
--- a/tools/encode/gencode_partition.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<tool id="gencode_partition1" name="Gencode Partition">
- <description>an interval file</description>
- <command interpreter="python">split_by_partitions.py ${GALAXY_DATA_INDEX_DIR} $input1 $out_file1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Partition"/>
- </inputs>
- <outputs>
- <data name="out_file1" format="bed"/>
- </outputs>
- <tests>
- <test>
- <param name="input1" value="encode_1.bed"/>
- <output name="out_file1" file="gencode_partition_out.bed"/>
- </test>
- </tests>
- <help>
-For detailed information about partitioning, click here_.
-
-.. _here: http://genome.imim.es/gencode/wiki/index.php/Collecting_Feature_Sets_from_A…
-
-Datasets are partitioned according to the protocol below:
-
-A partition scheme has been defined that is similar to what has previously been done with TARs/TRANSFRAGs such that any feature can be classified as falling into one of the following 6 categories:
- 1. **Coding** -- coding exons defined from the GENCODE experimentally verified coding set (coding in any transcript)
- 2. **5UTR** -- 5' UTR exons defined from the GENCODE experimentally verified coding set (5' UTR in some transcript but never coding in any other)
- 3. **3UTR** -- 3' UTR exons defined from the GENCODE experimentally verified coding set (3' UTR in some transcript but never coding in any other)
- 4. **Intronic Proximal** -- intronic and no more than 5kb away from an exon.
- 5. **Intergenic Proximal** -- between genes and no more than 5kb away from an exon.
- 6. **Intronic Distal** -- intronic and greater than 5kb away from an exon.
- 7. **Intergenic Distal** -- between genes and greater than 5kb away from an exon.
-
------
-
-.. class:: infomark
-
-**Note:** Features overlapping more than one partition will take the identity of the lower-numbered partition.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 275a49bb9e1d1ae9771d1207b96c68334509b72b tools/encode/random_intervals.xml
--- a/tools/encode/random_intervals.xml
+++ /dev/null
@@ -1,64 +0,0 @@
-<tool id="random_intervals1" name="Random Intervals">
-<description>create a random set of intervals</description>
- <command interpreter="python">random_intervals_no_bits.py $regions $input2 $input1 $out_file1 ${input2.metadata.chromCol} ${input2.metadata.startCol} ${input2.metadata.endCol} ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} $use_mask $strand_overlaps ${GALAXY_DATA_INDEX_DIR}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Mimick">
- <validator type="unspecified_build" message="Unspecified build, this tool works with data from genome builds hg16 or hg17. Click the pencil icon in your history item to set the genome build."/>
- </param>
- <param name="input2" type="data" format="interval" label="Intervals to Mask"/>
- <param name="use_mask" type="select" label="Use mask">
- <option value="no_mask">No</option>
- <option value="use_mask">Yes</option>
- </param>
- <param name="strand_overlaps" type="select" label="Allow overlaps">
- <option value="all">Any</option>
- <option value="strand">Across Strands</option>
- <option value="none">None</option>
- </param>
- <param name="regions" type="select" label="Regions to use">
- <options from_file="regions.loc">
- <column name="name" index="2"/>
- <column name="value" index="1"/>
- <column name="dbkey" index="0"/>
- <filter type="data_meta" ref="input1" key="dbkey" column="0" />
- <validator type="no_options" message="This tool currently only works with ENCODE data from genome builds hg16 or hg17."/>
- </options>
- </param>
- </inputs>
- <outputs>
- <data name="out_file1" format="input"/>
- </outputs>
- <help>
-
-.. class:: warningmark
-
-This tool currently only works with ENCODE data from genome builds hg16 or hg17.
-
------
-
-.. class:: infomark
-
-**Note:** If you do not wish to mask a set of intervals, change the Use Mask option to No, this option will override any Mask files selected.
-
------
-
-**Syntax**
-
-This tool will attempt to create a random set of intervals that mimic those found within your source file. You may also specify a set of intervals to mask.
-
-**Allow overlaps** options
- * **Across Strands** - random regions are allowed to overlap only if they are on different strands.
- * **Any** - all overlaps are allowed.
- * **None** - no overlapping regions are allowed.
-
-**Regions to use** options
- * Bounding region of interest based on the dataset build.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/0c6bf744c5b2/
Changeset: 0c6bf744c5b2
Branch: next-stable
User: natefoo
Date: 2014-01-27 20:00:16
Summary: Merge heads on next-stable
Affected #: 33 files
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -4,28 +4,6 @@
: ${HOSTTYPE:=`uname -m`}
-# link to HYPHY is arch-dependent
-case "$OSTYPE" in
- linux-gnu)
- kernel=`uname -r | cut -f1,2 -d.`
- HYPHY="/galaxy/software/linux$kernel-$HOSTTYPE/hyphy"
- ;;
- darwin*)
- this_minor=`uname -r | awk -F. '{print ($1-4)}'`
- machine=`machine`
- for minor in `jot - 3 $this_minor 1`; do
- HYPHY="/galaxy/software/macosx10.$minor-$machine/hyphy"
- [ -d "$HYPHY" ] && break
- done
- [ ! -d "$HYPHY" ] && unset HYPHY
- ;;
- solaris2.10)
- # For the psu-production builder which is Solaris, but jobs run on a
- # Linux cluster
- HYPHY="/galaxy/software/linux2.6-x86_64/hyphy"
- ;;
-esac
-
LINKS="
/galaxy/data/location/add_scores.loc
/galaxy/data/location/all_fasta.loc
@@ -121,12 +99,6 @@
ln -sf $link tool-data
done
- if [ -d "$HYPHY" ]; then
- echo "Linking $HYPHY"
- rm -f tool-data/HYPHY
- ln -sf $HYPHY tool-data/HYPHY
- fi
-
if [ -d "$JARS" ]; then
echo "Linking $JARS"
rm -f tool-data/shared/jars
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf doc/source/lib/galaxy.tools.util.rst
--- a/doc/source/lib/galaxy.tools.util.rst
+++ b/doc/source/lib/galaxy.tools.util.rst
@@ -9,14 +9,6 @@
:undoc-members:
:show-inheritance:
-:mod:`hyphy_util` Module
-------------------------
-
-.. automodule:: galaxy.tools.util.hyphy_util
- :members:
- :undoc-members:
- :show-inheritance:
-
:mod:`maf_utilities` Module
---------------------------
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf lib/galaxy/tools/util/hyphy_util.py
--- a/lib/galaxy/tools/util/hyphy_util.py
+++ /dev/null
@@ -1,1163 +0,0 @@
-#Dan Blankenberg
-#Contains file contents and helper methods for HYPHY configurations
-import tempfile, os
-
-def get_filled_temp_filename(contents):
- fh = tempfile.NamedTemporaryFile('w')
- filename = fh.name
- fh.close()
- fh = open(filename, 'w')
- fh.write(contents)
- fh.close()
- return filename
-
-NJ_tree_shared_ibf = """
-COUNT_GAPS_IN_FREQUENCIES = 0;
-methodIndex = 1;
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function InferTreeTopology(verbFlag)
-{
- distanceMatrix = {ds.species,ds.species};
-
- MESSAGE_LOGGING = 0;
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"chooseDistanceFormula.def");
- InitializeDistances (0);
-
- for (i = 0; i<ds.species; i=i+1)
- {
- for (j = i+1; j<ds.species; j = j+1)
- {
- distanceMatrix[i][j] = ComputeDistanceFormula (i,j);
- }
- }
-
- MESSAGE_LOGGING = 1;
- cladesMade = 1;
-
-
- if (ds.species == 2)
- {
- d1 = distanceMatrix[0][1]/2;
- treeNodes = {{0,1,d1__},
- {1,1,d1__},
- {2,0,0}};
-
- cladesInfo = {{2,0}};
- }
- else
- {
- if (ds.species == 3)
- {
- /* generate least squares estimates here */
-
- d1 = (distanceMatrix[0][1]+distanceMatrix[0][2]-distanceMatrix[1][2])/2;
- d2 = (distanceMatrix[0][1]-distanceMatrix[0][2]+distanceMatrix[1][2])/2;
- d3 = (distanceMatrix[1][2]+distanceMatrix[0][2]-distanceMatrix[0][1])/2;
-
- treeNodes = {{0,1,d1__},
- {1,1,d2__},
- {2,1,d3__}
- {3,0,0}};
-
- cladesInfo = {{3,0}};
- }
- else
- {
- njm = (distanceMatrix > methodIndex)>=ds.species;
-
- treeNodes = {2*(ds.species+1),3};
- cladesInfo = {ds.species-1,2};
-
- for (i=Rows(treeNodes)-1; i>=0; i=i-1)
- {
- treeNodes[i][0] = njm[i][0];
- treeNodes[i][1] = njm[i][1];
- treeNodes[i][2] = njm[i][2];
- }
-
- for (i=Rows(cladesInfo)-1; i>=0; i=i-1)
- {
- cladesInfo[i][0] = njm[i][3];
- cladesInfo[i][1] = njm[i][4];
- }
-
- njm = 0;
- }
- }
- return 1.0;
-}
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function TreeMatrix2TreeString (doLengths)
-{
- treeString = "";
- p = 0;
- k = 0;
- m = treeNodes[0][1];
- n = treeNodes[0][0];
- treeString*(Rows(treeNodes)*25);
-
- while (m)
- {
- if (m>p)
- {
- if (p)
- {
- treeString*",";
- }
- for (j=p;j<m;j=j+1)
- {
- treeString*"(";
- }
- }
- else
- {
- if (m<p)
- {
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
- }
- else
- {
- treeString*",";
- }
- }
- if (n<ds.species)
- {
- GetString (nodeName, ds, n);
- if (doLengths != 1)
- {
- treeString*nodeName;
- }
- else
- {
- treeString*taxonNameMap[nodeName];
- }
- }
- if (doLengths>.5)
- {
- nodeName = ":"+treeNodes[k][2];
- treeString*nodeName;
- }
- k=k+1;
- p=m;
- n=treeNodes[k][0];
- m=treeNodes[k][1];
- }
-
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
-
- treeString*0;
- return treeString;
-}
-"""
-
-def get_NJ_tree (filename):
- return """
-DISTANCE_PROMPTS = 1;
-ExecuteAFile ("%s");
-
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-/* do sequence to branch map */
-
-taxonNameMap = {};
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
-}
-
-DataSetFilter filteredData = CreateFilter (ds,1);
-InferTreeTopology (0);
-treeString = TreeMatrix2TreeString (1);
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, treeString);
-fscanf (stdin, "String", ps_file);
-
-if (Abs(ps_file))
-{
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (ps_file, CLEAR_FILE, drawLetter, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
-}
-""" % (filename)
-
-def get_NJ_treeMF (filename):
- return """
-ExecuteAFile ("%s");
-
-VERBOSITY_LEVEL = -1;
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-_linesIn = Columns (inLines);
-isomorphicTreesBySequenceCount = {};
-
-/*---------------------------------------------------------*/
-
-_currentGene = 1;
-_currentState = 0;
-geneSeqs = "";
-geneSeqs * 128;
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, KEEP_OPEN);
-treeOutFile = LAST_FILE_PATH;
-
-fscanf (stdin,"String", ps_file);
-if (Abs(ps_file))
-{
- fprintf (ps_file, CLEAR_FILE, KEEP_OPEN);
-}
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- if (Abs(geneSeqs))
- {
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- }
-}
-
-fprintf (treeOutFile,CLOSE_FILE);
-if (Abs(ps_file))
-{
- fprintf (ps_file,CLOSE_FILE);
-}
-/*---------------------------------------------------------*/
-
-function _processAGene (_geneID, nwk_file, ps_file)
-{
- if (ds.species == 1)
- {
- fprintf (nwk_file, _geneID-1, "\\tNone \\tNone\\n");
- return 0;
-
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- /* do sequence to branch map */
-
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}});
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- DISTANCE_PROMPTS = (_geneID==1);
-
- InferTreeTopology (0);
- baseTree = TreeMatrix2TreeString (0);
- UseModel (USE_NO_MODEL);
-
- Tree baseTop = baseTree;
-
- /* standardize this top */
-
- for (k=0; k<Abs(isomorphicTreesBySequenceCount[filteredData.species]); k=k+1)
- {
- testString = (isomorphicTreesBySequenceCount[filteredData.species])[k];
- Tree testTree = testString;
- if (testTree == baseTop)
- {
- baseTree = testString;
- break;
- }
- }
- if (k==Abs(isomorphicTreesBySequenceCount[filteredData.species]))
- {
- if (k==0)
- {
- isomorphicTreesBySequenceCount[filteredData.species] = {};
- }
- (isomorphicTreesBySequenceCount[filteredData.species])[k] = baseTree;
- }
-
- fprintf (nwk_file, _geneID-1, "\\t", baseTree, "\\t", TreeMatrix2TreeString (1), "\\n");
- if (Abs(ps_file))
- {
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (stdout, _geneID, ":", givenTree,"\\n");
- fprintf (ps_file, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
- }
- return 0;
-}
-""" % (filename)
-
-BranchLengthsMF = """
-VERBOSITY_LEVEL = -1;
-
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-
-
-_linesIn = Columns (inLines);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-_currentGene = 1;
-
-_currentState = 0;
-
-geneSeqs = "";
-
-geneSeqs * 128;
-
-
-
-for (l=0; l<_linesIn; l=l+1)
-
-{
-
- if (Abs(inLines[l]) == 0)
-
- {
-
- if (_currentState == 1)
-
- {
-
- geneSeqs * 0;
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- geneSeqs * 128;
-
- _currentGene = _currentGene + 1;
-
- }
-
- }
-
- else
-
- {
-
- if (_currentState == 0)
-
- {
-
- _currentState = 1;
-
- }
-
- geneSeqs * inLines[l];
-
- geneSeqs * "\\n";
-
- }
-
-}
-
-
-
-if (_currentState == 1)
-
-{
-
- geneSeqs * 0;
-
- if (Abs(geneSeqs))
-
- {
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- }
-
-}
-
-
-
-fprintf (resultFile,CLOSE_FILE);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-function _processAGene (_geneID)
-
-{
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- if (_currentGene == 1)
-
- {
-
- SelectTemplateModel (filteredData);
-
-
-
- SetDialogPrompt ("Tree file");
-
- fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
- fscanf (stdin, "String", resultFile);
-
-
-
- /* do sequence to branch map */
-
-
-
- validNames = {};
-
- taxonNameMap = {};
-
-
-
- for (k=0; k<TipCount(givenTree); k=k+1)
-
- {
-
- validNames[TipName(givenTree,k)&&1] = 1;
-
- }
-
-
-
- for (k=0; k<BranchCount(givenTree); k=k+1)
-
- {
-
- thisName = BranchName(givenTree,k);
-
- taxonNameMap[thisName&&1] = thisName;
-
- }
-
-
-
- storeValidNames = validNames;
-
- fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Block\\tBranch\\tLength\\tLowerBound\\tUpperBound\\n");
-
- }
-
- else
-
- {
-
- HarvestFrequencies (vectorOfFrequencies, filteredData, 1,1,1);
-
- validNames = storeValidNames;
-
- }
-
-
-
- for (k=0; k<ds.species; k=k+1)
-
- {
-
- GetString (thisName, ds,k);
-
- shortName = (thisName^{{"\\\\..+",""}})&&1;
-
- if (validNames[shortName])
-
- {
-
- taxonNameMap[shortName] = thisName;
-
- validNames - (shortName);
-
- SetParameter (ds,k,shortName);
-
- }
-
- else
-
- {
-
- fprintf (resultFile,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree,"\\n");
-
- return 0;
-
- }
-
- }
-
-
-
- /* */
-
-
-
- LikelihoodFunction lf = (filteredData,givenTree);
-
- Optimize (res,lf);
-
-
-
- timer = Time(0)-timer;
-
-
-
- branchNames = BranchName (givenTree,-1);
-
- branchLengths = BranchLength (givenTree,-1);
-
-
-
-
-
- for (k=0; k<Columns(branchNames)-1; k=k+1)
-
- {
-
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- if (k==0)
-
- {
-
- /* compute a scaling factor */
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
-
- scaleFactor = BranchLength (givenTree,0);
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
-
- }
-
- fprintf (resultFile,_geneID,"\\t",taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-
- }
-
-
-
- ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-
- global treeScaler = 1;
-
- ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-
- COVARIANCE_PARAMETER = "treeScaler";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- fprintf (resultFile,_geneID,"\\tTotal Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-
- ClearConstraints (givenTree);
-
- return 0;
-
-}
-"""
-
-BranchLengths = """
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-SelectTemplateModel (filteredData);
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-fscanf (stdin, "String", resultFile);
-
-/* do sequence to branch map */
-
-validNames = {};
-taxonNameMap = {};
-
-for (k=0; k<TipCount(givenTree); k=k+1)
-{
- validNames[TipName(givenTree,k)&&1] = 1;
-}
-
-for (k=0; k<BranchCount(givenTree); k=k+1)
-{
- thisName = BranchName(givenTree,k);
- taxonNameMap[thisName&&1] = thisName;
-}
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- if (validNames[shortName])
- {
- taxonNameMap[shortName] = thisName;
- validNames - (shortName);
- SetParameter (ds,k,shortName);
- }
- else
- {
- fprintf (resultFile,CLEAR_FILE,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree);
- return 0;
- }
-}
-
-/* */
-
-LikelihoodFunction lf = (filteredData,givenTree);
-
-Optimize (res,lf);
-
-timer = Time(0)-timer;
-
-branchNames = BranchName (givenTree,-1);
-branchLengths = BranchLength (givenTree,-1);
-
-fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Branch\\tLength\\tLowerBound\\tUpperBound\\n");
-
-for (k=0; k<Columns(branchNames)-1; k=k+1)
-{
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- if (k==0)
- {
- /* compute a scaling factor */
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
- scaleFactor = BranchLength (givenTree,0);
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
- }
- fprintf (resultFile,taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-}
-
-ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-global treeScaler = 1;
-ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-COVARIANCE_PARAMETER = "treeScaler";
-COVARIANCE_PRECISION = 0.95;
-CovarianceMatrix (cmx,lf);
-ClearConstraints (givenTree);
-fprintf (resultFile,"Total Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-fprintf (resultFile,CLOSE_FILE);
-"""
-
-SimpleLocalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "AC";
- _analysisHeaders[7] = "AT";
- _analysisHeaders[8] = "CG";
- _analysisHeaders[9] = "CT";
- _analysisHeaders[10] = "GT";
- _analysisHeaders[11] = "Tree";
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _analysisHeaders [Abs(_analysisHeaders)] = "length("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dS("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dN("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "omega("+branchName+")";
- }
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Local";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = treeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = treeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _returnMe ["length("+branchName+")"] = (_cL["Total"])[_biterator];
- _returnMe ["dS("+branchName+")"] = (_cL["Syn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["S_sites"]);
- _returnMe ["dN("+branchName+")"] = (_cL["NonSyn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["NS_sites"]);
-
- ExecuteCommands ("_lom = _standardizeRatio(codonTree."+treeBranchNames[_biterator]+".nonSynRate,codonTree."+treeBranchNames[_biterator]+".synRate);");
- _returnMe ["omega("+branchName+")"] = _lom;
- }
-
- return _returnMe;
-}
-
-"""
-
-SimpleGlobalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "omega";
- _analysisHeaders[7] = "omega_range";
- _analysisHeaders[8] = "AC";
- _analysisHeaders[9] = "AT";
- _analysisHeaders[10] = "CG";
- _analysisHeaders[11] = "CT";
- _analysisHeaders[12] = "GT";
- _analysisHeaders[13] = "Tree";
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- fprintf (stdout, "[SimpleGlobalFitter.bf on GENE ", myID, "]\\n");
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- _nucSites = filteredData.sites;
-
- if (Abs(treeString))
- {
- givenTreeString = treeString;
- }
- else
- {
- if (_currentGene==1)
- {
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"NJ.bf");
- }
- givenTreeString = InferTreeTopology (0);
- treeString = "";
- }
-
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Global";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = givenTreeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = givenTreeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["Stop_codons"] = (_nucSites-filteredData.sites*3)$3;
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["omega"] = R;
- COVARIANCE_PARAMETER = "R";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- _returnMe ["omega_range"] = ""+cmx[0]+"-"+cmx[2];
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
-
- return _returnMe;
-}
-"""
-
-FastaReader = """
-fscanf (stdin, "String", _coreAnalysis);
-fscanf (stdin, "String", _outputDriver);
-
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"chooseGeneticCode.def");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"dSdNTreeTools.ibf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"CodonTools.bf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"GrabBag.bf");
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
-treeBranchNames = BranchName (givenTree,-1);
-treeBranchCount = Columns (treeBranchNames)-1;
-treeString = Format (givenTree,1,1);
-
-SetDialogPrompt ("Multiple gene FASTA file");
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-fscanf (stdin, "String", modelSpecString);
-fscanf (stdin, "String", _outPath);
-
-ExecuteAFile (_outputDriver);
-ExecuteAFile (_coreAnalysis);
-
-/*---------------------------------------------------------*/
-
-_linesIn = Columns (inLines);
-_currentGene = 1;
- _currentState = 0;
-/* 0 - waiting for a non-empty line */
-/* 1 - reading files */
-
-geneSeqs = "";
-geneSeqs * 0;
-
-_prepareFileOutput (_outPath);
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
-}
-
-_finishFileOutput (0);
-"""
-
-TabWriter = """
-/*---------------------------------------------------------*/
-function _prepareFileOutput (_outPath)
-{
- _outputFilePath = _outPath;
-
- _returnHeaders = returnResultHeaders(0);
-
- fprintf (_outputFilePath, CLEAR_FILE, KEEP_OPEN, _returnHeaders[0]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",_returnHeaders[_biterator]);
- }
-
-
-
- fprintf (_outputFilePath,"\\n");
- return 0;
-}
-
-/*---------------------------------------------------------*/
-
-function _processAGene (valid, _geneID)
-{
- if (valid)
- {
- returnValue = runAGeneFit (_geneID);
- fprintf (_outputFilePath, returnValue[_returnHeaders[0]]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",returnValue[_returnHeaders[_biterator]]);
- }
- fprintf (_outputFilePath, "\\n");
- }
- /*
- else
- {
- fprintf (_outputFilePath,
- _geneID, ", Incorrect number of sequences\\n");
- }
- */
- _currentState = 0;
- return 0;
-}
-
-/*---------------------------------------------------------*/
-function _finishFileOutput (dummy)
-{
- return 0;
-}
-"""
-
-def get_dnds_config_filename(Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename ):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the DATA READER */
-
-_genomeScreenOptions ["0"] = "%s";
- /* which analysis to run on each gene; */
-_genomeScreenOptions ["1"] = "%s";
- /* what output to produce; */
-_genomeScreenOptions ["2"] = "%s";
- /* genetic code */
-_genomeScreenOptions ["3"] = "%s";
- /* tree file */
-_genomeScreenOptions ["4"] = "%s";
- /* alignment file */
-_genomeScreenOptions ["5"] = "%s";
- /* nucleotide bias string; can define any of the 203 models */
-_genomeScreenOptions ["6"] = "%s";
- /* output csv file */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename )
- return get_filled_temp_filename(contents)
-
-
-def get_branch_lengths_config_filename(input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the NucDataBranchLengths.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "CUSTOM";
- /* use an arbitrary nucleotide model */
-_genomeScreenOptions ["2"] = "%s";
- /* which model to use */
-_genomeScreenOptions ["3"] = "%s";
- /* model options */
-_genomeScreenOptions ["4"] = "Estimated";
- /* rate parameters */
-_genomeScreenOptions ["5"] = "%s";
- /* base frequencies */
-_genomeScreenOptions ["6"] = "%s";
- /* the tree to use; */
-_genomeScreenOptions ["7"] = "%s";
- /* write .csv output to; */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_tree_config_filename(input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTree.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-_genomeScreenOptions ["2"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["3"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_treeMF_config_filename(input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTreeMF.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the multiple alignment file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["2"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-_genomeScreenOptions ["3"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename)
- return get_filled_temp_filename(contents)
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tool-data/shared/ucsc/ucsc_build_sites.txt
--- a/tool-data/shared/ucsc/ucsc_build_sites.txt
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt
@@ -5,4 +5,3 @@
#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
test http://genome-test.cse.ucsc.edu/cgi-bin/hgTracks? anoCar1,ce4,ce3,ce2,ce1,loxAfr1,rn2,eschColi_O157H7_1,rn4,droYak1,heliPylo_J99_1,droYak2,dp3,dp2,caeRem2,caeRem1,oryLat1,eschColi_K12_1,homIni13,homIni14,droAna1,droAna2,oryCun1,sacCer1,heliHepa1,droGri1,sc1,dasNov1,choHof1,tupBel1,mm9,mm8,vibrChol1,mm5,mm4,mm7,mm6,mm3,mm2,rn3,venter1,galGal3,galGal2,ornAna1,equCab1,cioSav2,rheMac2,eutHer13,droPer1,droVir2,droVir1,heliPylo_26695_1,euaGli13,calJac1,campJeju1,droSim1,hg13,hg15,hg16,hg17,monDom1,monDom4,droMoj1,petMar1,droMoj2,vibrChol_MO10_1,vibrPara1,gliRes13,vibrVuln_YJ016_1,braFlo1,cioSav1,lauRas13,dm1,canFam1,canFam2,ci1,echTel1,ci2,caePb1,dm3,ponAbe2,falciparum,xenTro1,xenTro2,nonAfr13,fr2,fr1,gasAcu1,dm2,apiMel1,apiMel2,eschColi_O157H7EDL933_1,priPac1,panTro1,hg18,panTro2,campJeju_RM1221_1,canHg12,vibrChol_O395_1,vibrFisc_ES114_1,danRer5,danRer4,danRer3,danRer2,danRer1,tetNig1,afrOth13,bosTau1,eschColi_CFT073_1,bosTau3,bosTau2,bosTau4,rodEnt13,droEre1,priMat13,vibrVuln_CMCP6_1,cb2,cb3,cb1,borEut13,droSec1,felCat3,strPur1,strPur2,otoGar1,catArr1,anoGam1,triCas2
ucla http://epigenomics.mcdb.ucla.edu/cgi-bin/hgTracks? araTha1
-psu bx main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -4,32 +4,19 @@
<tool file="data_source/upload.xml" /><tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/biomart.xml" /><tool file="data_source/gramene_mart.xml" />
- <tool file="data_source/flymine.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -87,7 +74,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="features" name="Extract Features"><tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
@@ -111,7 +97,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -139,7 +124,6 @@
<tool file="plotting/histogram2.xml" /><tool file="plotting/scatterplot.xml" /><tool file="plotting/boxplot.xml" />
- <tool file="visualization/GMAJ.xml" /><tool file="visualization/build_ucsc_custom_track.xml" /><tool file="maf/vcf_to_maf_customtrack.xml" /><tool file="mutation/visualize.xml" />
@@ -170,14 +154,6 @@
<tool file="multivariate_stats/kpca.xml" /><tool file="multivariate_stats/kcca.xml" /></section>
- <section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <!-- <tool file="hyphy/hyphy_dnds_wrapper.xml" /> -->
- </section>
- <section id="motifs" name="Motif Tools">
- <tool file="rgenetics/rgWebLogo3.xml" />
- </section><section id="clustal" name="Multiple Alignments"><tool file="rgenetics/rgClustalw.xml" /></section>
@@ -253,10 +229,6 @@
<tool file="gatk/variant_eval.xml" /><tool file="gatk/variant_combine.xml" /></section>
- <section id="peak_calling" name="NGS: Peak Calling">
- <tool file="peak_calling/macs_wrapper.xml" />
- <tool file="peak_calling/sicer_wrapper.xml" />
- </section><section id="ngs-rna-tools" name="NGS: RNA Analysis"><label id="rna_seq" text="RNA-seq" /><label id="filtering" text="Filtering" />
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -5,7 +5,6 @@
<tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_test.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/microbial_import.xml" /><tool file="data_source/biomart.xml" />
@@ -13,34 +12,18 @@
<tool file="data_source/cbi_rice_mart.xml" /><tool file="data_source/gramene_mart.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/flymine.xml" />
- <tool file="data_source/flymine_test.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" />
- <tool file="data_source/metabolicmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/wormbase_test.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" />
- <tool file="data_source/epigraph_import_test.xml" /><tool file="data_source/hbvar.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /><tool file="validation/fix_errors.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" />
- <tool file="data_destination/epigraph_test.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -81,7 +64,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="convert" name="Convert Formats"><tool file="filters/axt_to_concat_fasta.xml" />
@@ -124,7 +106,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -189,9 +170,6 @@
<tool file="multivariate_stats/kcca.xml" /></section><section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <tool file="hyphy/hyphy_dnds_wrapper.xml" /><tool file="evolution/codingSnps.xml" /><tool file="evolution/add_scores.xml" /></section>
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_destination/epigraph.xml
--- a/tools/data_destination/epigraph.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_export">
- <description> and prediction with EpiGRAPH</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
-
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_destination/epigraph_test.xml
--- a/tools/data_destination/epigraph_test.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_test_export">
- <description> and prediction with EpiGRAPH Test</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH test website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/bx_browser.xml
--- a/tools/data_source/bx_browser.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="BX" id="bx_browser" tool_type="data_source">
- <description>table browser</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://main.genome-browser.bx.psu.edu/cgi-bin/hgTables" check_values="false" method="get">
- <display>go to BX Browser $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
- <param name="tool_id" type="hidden" value="bx_browser" />
- <param name="sendToGalaxy" type="hidden" value="1" />
- <param name="hgta_compressType" type="hidden" value="none" />
- <param name="hgta_outputType" type="hidden" value="bed" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
- <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
- <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
- <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="tabular" >
- <value_translation>
- <value galaxy_value="tabular" remote_value="primaryTable" />
- <value galaxy_value="tabular" remote_value="selectedFields" />
- <value galaxy_value="wig" remote_value="wigData" />
- <value galaxy_value="interval" remote_value="tab" />
- <value galaxy_value="html" remote_value="hyperlinks" />
- <value galaxy_value="fasta" remote_value="sequence" />
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="tabular" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/encode_db.xml
--- a/tools/data_source/encode_db.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0"?>
-
-<tool name="EncodeDB" id="encode_db1">
-
- <description>
- at NHGRI
- </description>
-
- <command interpreter="python">
- fetch.py "$url" $output
- </command>
-
- <inputs action="http://research.nhgri.nih.gov/projects/ENCODEdb/cgi-bin/power_query.cgi" target="_top">
-<!-- <inputs action="http://localhost:9000/prepared"> -->
- <display>go to EncodeDB $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/async/encode_db1" />
- </inputs>
-
- <uihints minwidth="800"/>
-
- <outputs>
- <data format="bed" name="output" />
- </outputs>
-
- <options sanitize="False" refresh="True"/>
-
-</tool>
\ No newline at end of file
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/epigraph_import.xml
--- a/tools/data_source/epigraph_import.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import" tool_type="data_source">
- <description> server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/epigraph_import_test.xml
--- a/tools/data_source/epigraph_import_test.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import_test" tool_type="data_source">
- <description> test server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import_test" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/flymine.xml
--- a/tools/data_source/flymine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine" id="flymine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.flymine.org" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/flymine_test.xml
--- a/tools/data_source/flymine_test.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine test" id="flymine_test" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://preview.flymine.org/preview/begin.do" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/metabolicmine.xml
--- a/tools/data_source/metabolicmine.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0"?>
-<tool name="metabolicMine" id="metabolicmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.metabolicmine.org/beta/begin.do" check_values="false" method="get">
- <display>go to metabolicMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/modmine.xml
--- a/tools/data_source/modmine.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="modENCODE modMine" id="modmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://intermine.modencode.org/" check_values="false" method="get">
- <display>go to modENCODE modMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/mousemine.xml
--- a/tools/data_source/mousemine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="MouseMine" id="mousemine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.mousemine.org/mousemine/begin.do" check_values="false" method="get">
- <display>go to MouseMine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=mousemine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="MouseMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/ratmine.xml
--- a/tools/data_source/ratmine.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Ratmine" id="ratmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://ratmine.mcw.edu/ratmine/begin.do" check_values="false" method="get">
- <display>go to Ratmine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=ratmine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="Ratmine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/data_source/yeastmine.xml
--- a/tools/data_source/yeastmine.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0"?>
-<tool name="YeastMine" id="yeastmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://yeastmine.yeastgenome.org/yeastmine/begin.do" check_values="false" method="get">
- <display>go to yeastMine server $GALAXY_URL</display>
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/encode/gencode_partition.xml
--- a/tools/encode/gencode_partition.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<tool id="gencode_partition1" name="Gencode Partition">
- <description>an interval file</description>
- <command interpreter="python">split_by_partitions.py ${GALAXY_DATA_INDEX_DIR} $input1 $out_file1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Partition"/>
- </inputs>
- <outputs>
- <data name="out_file1" format="bed"/>
- </outputs>
- <tests>
- <test>
- <param name="input1" value="encode_1.bed"/>
- <output name="out_file1" file="gencode_partition_out.bed"/>
- </test>
- </tests>
- <help>
-For detailed information about partitioning, click here_.
-
-.. _here: http://genome.imim.es/gencode/wiki/index.php/Collecting_Feature_Sets_from_A…
-
-Datasets are partitioned according to the protocol below:
-
-A partition scheme has been defined that is similar to what has previously been done with TARs/TRANSFRAGs such that any feature can be classified as falling into one of the following 6 categories:
- 1. **Coding** -- coding exons defined from the GENCODE experimentally verified coding set (coding in any transcript)
- 2. **5UTR** -- 5' UTR exons defined from the GENCODE experimentally verified coding set (5' UTR in some transcript but never coding in any other)
- 3. **3UTR** -- 3' UTR exons defined from the GENCODE experimentally verified coding set (3' UTR in some transcript but never coding in any other)
- 4. **Intronic Proximal** -- intronic and no more than 5kb away from an exon.
- 5. **Intergenic Proximal** -- between genes and no more than 5kb away from an exon.
- 6. **Intronic Distal** -- intronic and greater than 5kb away from an exon.
- 7. **Intergenic Distal** -- between genes and greater than 5kb away from an exon.
-
------
-
-.. class:: infomark
-
-**Note:** Features overlapping more than one partition will take the identity of the lower-numbered partition.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
diff -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 -r 0c6bf744c5b25377145df3994198f994193ba0bf tools/encode/random_intervals.xml
--- a/tools/encode/random_intervals.xml
+++ /dev/null
@@ -1,64 +0,0 @@
-<tool id="random_intervals1" name="Random Intervals">
-<description>create a random set of intervals</description>
- <command interpreter="python">random_intervals_no_bits.py $regions $input2 $input1 $out_file1 ${input2.metadata.chromCol} ${input2.metadata.startCol} ${input2.metadata.endCol} ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} $use_mask $strand_overlaps ${GALAXY_DATA_INDEX_DIR}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Mimick">
- <validator type="unspecified_build" message="Unspecified build, this tool works with data from genome builds hg16 or hg17. Click the pencil icon in your history item to set the genome build."/>
- </param>
- <param name="input2" type="data" format="interval" label="Intervals to Mask"/>
- <param name="use_mask" type="select" label="Use mask">
- <option value="no_mask">No</option>
- <option value="use_mask">Yes</option>
- </param>
- <param name="strand_overlaps" type="select" label="Allow overlaps">
- <option value="all">Any</option>
- <option value="strand">Across Strands</option>
- <option value="none">None</option>
- </param>
- <param name="regions" type="select" label="Regions to use">
- <options from_file="regions.loc">
- <column name="name" index="2"/>
- <column name="value" index="1"/>
- <column name="dbkey" index="0"/>
- <filter type="data_meta" ref="input1" key="dbkey" column="0" />
- <validator type="no_options" message="This tool currently only works with ENCODE data from genome builds hg16 or hg17."/>
- </options>
- </param>
- </inputs>
- <outputs>
- <data name="out_file1" format="input"/>
- </outputs>
- <help>
-
-.. class:: warningmark
-
-This tool currently only works with ENCODE data from genome builds hg16 or hg17.
-
------
-
-.. class:: infomark
-
-**Note:** If you do not wish to mask a set of intervals, change the Use Mask option to No, this option will override any Mask files selected.
-
------
-
-**Syntax**
-
-This tool will attempt to create a random set of intervals that mimic those found within your source file. You may also specify a set of intervals to mask.
-
-**Allow overlaps** options
- * **Across Strands** - random regions are allowed to overlap only if they are on different strands.
- * **Any** - all overlaps are allowed.
- * **None** - no overlapping regions are allowed.
-
-**Regions to use** options
- * Bounding region of interest based on the dataset build.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/2210b9de666e/
Changeset: 2210b9de666e
User: natefoo
Date: 2014-01-27 20:00:52
Summary: Merge heads in default.
Affected #: 33 files
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -4,28 +4,6 @@
: ${HOSTTYPE:=`uname -m`}
-# link to HYPHY is arch-dependent
-case "$OSTYPE" in
- linux-gnu)
- kernel=`uname -r | cut -f1,2 -d.`
- HYPHY="/galaxy/software/linux$kernel-$HOSTTYPE/hyphy"
- ;;
- darwin*)
- this_minor=`uname -r | awk -F. '{print ($1-4)}'`
- machine=`machine`
- for minor in `jot - 3 $this_minor 1`; do
- HYPHY="/galaxy/software/macosx10.$minor-$machine/hyphy"
- [ -d "$HYPHY" ] && break
- done
- [ ! -d "$HYPHY" ] && unset HYPHY
- ;;
- solaris2.10)
- # For the psu-production builder which is Solaris, but jobs run on a
- # Linux cluster
- HYPHY="/galaxy/software/linux2.6-x86_64/hyphy"
- ;;
-esac
-
LINKS="
/galaxy/data/location/add_scores.loc
/galaxy/data/location/all_fasta.loc
@@ -121,12 +99,6 @@
ln -sf $link tool-data
done
- if [ -d "$HYPHY" ]; then
- echo "Linking $HYPHY"
- rm -f tool-data/HYPHY
- ln -sf $HYPHY tool-data/HYPHY
- fi
-
if [ -d "$JARS" ]; then
echo "Linking $JARS"
rm -f tool-data/shared/jars
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e doc/source/lib/galaxy.tools.util.rst
--- a/doc/source/lib/galaxy.tools.util.rst
+++ b/doc/source/lib/galaxy.tools.util.rst
@@ -9,14 +9,6 @@
:undoc-members:
:show-inheritance:
-:mod:`hyphy_util` Module
-------------------------
-
-.. automodule:: galaxy.tools.util.hyphy_util
- :members:
- :undoc-members:
- :show-inheritance:
-
:mod:`maf_utilities` Module
---------------------------
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e lib/galaxy/tools/util/hyphy_util.py
--- a/lib/galaxy/tools/util/hyphy_util.py
+++ /dev/null
@@ -1,1163 +0,0 @@
-#Dan Blankenberg
-#Contains file contents and helper methods for HYPHY configurations
-import tempfile, os
-
-def get_filled_temp_filename(contents):
- fh = tempfile.NamedTemporaryFile('w')
- filename = fh.name
- fh.close()
- fh = open(filename, 'w')
- fh.write(contents)
- fh.close()
- return filename
-
-NJ_tree_shared_ibf = """
-COUNT_GAPS_IN_FREQUENCIES = 0;
-methodIndex = 1;
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function InferTreeTopology(verbFlag)
-{
- distanceMatrix = {ds.species,ds.species};
-
- MESSAGE_LOGGING = 0;
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"chooseDistanceFormula.def");
- InitializeDistances (0);
-
- for (i = 0; i<ds.species; i=i+1)
- {
- for (j = i+1; j<ds.species; j = j+1)
- {
- distanceMatrix[i][j] = ComputeDistanceFormula (i,j);
- }
- }
-
- MESSAGE_LOGGING = 1;
- cladesMade = 1;
-
-
- if (ds.species == 2)
- {
- d1 = distanceMatrix[0][1]/2;
- treeNodes = {{0,1,d1__},
- {1,1,d1__},
- {2,0,0}};
-
- cladesInfo = {{2,0}};
- }
- else
- {
- if (ds.species == 3)
- {
- /* generate least squares estimates here */
-
- d1 = (distanceMatrix[0][1]+distanceMatrix[0][2]-distanceMatrix[1][2])/2;
- d2 = (distanceMatrix[0][1]-distanceMatrix[0][2]+distanceMatrix[1][2])/2;
- d3 = (distanceMatrix[1][2]+distanceMatrix[0][2]-distanceMatrix[0][1])/2;
-
- treeNodes = {{0,1,d1__},
- {1,1,d2__},
- {2,1,d3__}
- {3,0,0}};
-
- cladesInfo = {{3,0}};
- }
- else
- {
- njm = (distanceMatrix > methodIndex)>=ds.species;
-
- treeNodes = {2*(ds.species+1),3};
- cladesInfo = {ds.species-1,2};
-
- for (i=Rows(treeNodes)-1; i>=0; i=i-1)
- {
- treeNodes[i][0] = njm[i][0];
- treeNodes[i][1] = njm[i][1];
- treeNodes[i][2] = njm[i][2];
- }
-
- for (i=Rows(cladesInfo)-1; i>=0; i=i-1)
- {
- cladesInfo[i][0] = njm[i][3];
- cladesInfo[i][1] = njm[i][4];
- }
-
- njm = 0;
- }
- }
- return 1.0;
-}
-
-/*-----------------------------------------------------------------------------------------------------------------------------------------*/
-
-function TreeMatrix2TreeString (doLengths)
-{
- treeString = "";
- p = 0;
- k = 0;
- m = treeNodes[0][1];
- n = treeNodes[0][0];
- treeString*(Rows(treeNodes)*25);
-
- while (m)
- {
- if (m>p)
- {
- if (p)
- {
- treeString*",";
- }
- for (j=p;j<m;j=j+1)
- {
- treeString*"(";
- }
- }
- else
- {
- if (m<p)
- {
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
- }
- else
- {
- treeString*",";
- }
- }
- if (n<ds.species)
- {
- GetString (nodeName, ds, n);
- if (doLengths != 1)
- {
- treeString*nodeName;
- }
- else
- {
- treeString*taxonNameMap[nodeName];
- }
- }
- if (doLengths>.5)
- {
- nodeName = ":"+treeNodes[k][2];
- treeString*nodeName;
- }
- k=k+1;
- p=m;
- n=treeNodes[k][0];
- m=treeNodes[k][1];
- }
-
- for (j=m;j<p;j=j+1)
- {
- treeString*")";
- }
-
- treeString*0;
- return treeString;
-}
-"""
-
-def get_NJ_tree (filename):
- return """
-DISTANCE_PROMPTS = 1;
-ExecuteAFile ("%s");
-
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-/* do sequence to branch map */
-
-taxonNameMap = {};
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
-}
-
-DataSetFilter filteredData = CreateFilter (ds,1);
-InferTreeTopology (0);
-treeString = TreeMatrix2TreeString (1);
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, treeString);
-fscanf (stdin, "String", ps_file);
-
-if (Abs(ps_file))
-{
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (ps_file, CLEAR_FILE, drawLetter, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
-}
-""" % (filename)
-
-def get_NJ_treeMF (filename):
- return """
-ExecuteAFile ("%s");
-
-VERBOSITY_LEVEL = -1;
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-_linesIn = Columns (inLines);
-isomorphicTreesBySequenceCount = {};
-
-/*---------------------------------------------------------*/
-
-_currentGene = 1;
-_currentState = 0;
-geneSeqs = "";
-geneSeqs * 128;
-
-fprintf (PROMPT_FOR_FILE, CLEAR_FILE, KEEP_OPEN);
-treeOutFile = LAST_FILE_PATH;
-
-fscanf (stdin,"String", ps_file);
-if (Abs(ps_file))
-{
- fprintf (ps_file, CLEAR_FILE, KEEP_OPEN);
-}
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- if (Abs(geneSeqs))
- {
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (_currentGene,treeOutFile,ps_file);
- }
-}
-
-fprintf (treeOutFile,CLOSE_FILE);
-if (Abs(ps_file))
-{
- fprintf (ps_file,CLOSE_FILE);
-}
-/*---------------------------------------------------------*/
-
-function _processAGene (_geneID, nwk_file, ps_file)
-{
- if (ds.species == 1)
- {
- fprintf (nwk_file, _geneID-1, "\\tNone \\tNone\\n");
- return 0;
-
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- /* do sequence to branch map */
-
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}});
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- DISTANCE_PROMPTS = (_geneID==1);
-
- InferTreeTopology (0);
- baseTree = TreeMatrix2TreeString (0);
- UseModel (USE_NO_MODEL);
-
- Tree baseTop = baseTree;
-
- /* standardize this top */
-
- for (k=0; k<Abs(isomorphicTreesBySequenceCount[filteredData.species]); k=k+1)
- {
- testString = (isomorphicTreesBySequenceCount[filteredData.species])[k];
- Tree testTree = testString;
- if (testTree == baseTop)
- {
- baseTree = testString;
- break;
- }
- }
- if (k==Abs(isomorphicTreesBySequenceCount[filteredData.species]))
- {
- if (k==0)
- {
- isomorphicTreesBySequenceCount[filteredData.species] = {};
- }
- (isomorphicTreesBySequenceCount[filteredData.species])[k] = baseTree;
- }
-
- fprintf (nwk_file, _geneID-1, "\\t", baseTree, "\\t", TreeMatrix2TreeString (1), "\\n");
- if (Abs(ps_file))
- {
- treeString = TreeMatrix2TreeString (2);
- UseModel (USE_NO_MODEL);
- Tree givenTree = treeString;
- baseHeight = TipCount (givenTree)*28;
- TREE_OUTPUT_OPTIONS = {};
- TREE_OUTPUT_OPTIONS["__FONT_SIZE__"] = 14;
- baseWidth = 0;
- treeAVL = givenTree^0;
- drawLetter = "/drawletter {"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$4+" -"+TREE_OUTPUT_OPTIONS["__FONT_SIZE__"]$2+ " show} def\\n";
- for (k3 = 1; k3 < Abs(treeAVL); k3=k3+1)
- {
- nodeName = (treeAVL[k3])["Name"];
- if(Abs((treeAVL[k3])["Children"]) == 0)
- {
- mySpecs = {};
- mySpecs ["TREE_OUTPUT_BRANCH_LABEL"] = "(" + taxonNameMap[nodeName] + ") drawLetter";
- baseWidth = Max (baseWidth, (treeAVL[k3])["Depth"]);
- }
- }
- baseWidth = 40*baseWidth;
-
- fprintf (stdout, _geneID, ":", givenTree,"\\n");
- fprintf (ps_file, PSTreeString (givenTree, "STRING_SUPPLIED_LENGTHS",{{baseWidth,baseHeight}}));
- }
- return 0;
-}
-""" % (filename)
-
-BranchLengthsMF = """
-VERBOSITY_LEVEL = -1;
-
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-
-
-
-_linesIn = Columns (inLines);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-_currentGene = 1;
-
-_currentState = 0;
-
-geneSeqs = "";
-
-geneSeqs * 128;
-
-
-
-for (l=0; l<_linesIn; l=l+1)
-
-{
-
- if (Abs(inLines[l]) == 0)
-
- {
-
- if (_currentState == 1)
-
- {
-
- geneSeqs * 0;
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- geneSeqs * 128;
-
- _currentGene = _currentGene + 1;
-
- }
-
- }
-
- else
-
- {
-
- if (_currentState == 0)
-
- {
-
- _currentState = 1;
-
- }
-
- geneSeqs * inLines[l];
-
- geneSeqs * "\\n";
-
- }
-
-}
-
-
-
-if (_currentState == 1)
-
-{
-
- geneSeqs * 0;
-
- if (Abs(geneSeqs))
-
- {
-
- DataSet ds = ReadFromString (geneSeqs);
-
- _processAGene (_currentGene);
-
- }
-
-}
-
-
-
-fprintf (resultFile,CLOSE_FILE);
-
-
-
-/*---------------------------------------------------------*/
-
-
-
-function _processAGene (_geneID)
-
-{
-
- DataSetFilter filteredData = CreateFilter (ds,1);
-
- if (_currentGene == 1)
-
- {
-
- SelectTemplateModel (filteredData);
-
-
-
- SetDialogPrompt ("Tree file");
-
- fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
- fscanf (stdin, "String", resultFile);
-
-
-
- /* do sequence to branch map */
-
-
-
- validNames = {};
-
- taxonNameMap = {};
-
-
-
- for (k=0; k<TipCount(givenTree); k=k+1)
-
- {
-
- validNames[TipName(givenTree,k)&&1] = 1;
-
- }
-
-
-
- for (k=0; k<BranchCount(givenTree); k=k+1)
-
- {
-
- thisName = BranchName(givenTree,k);
-
- taxonNameMap[thisName&&1] = thisName;
-
- }
-
-
-
- storeValidNames = validNames;
-
- fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Block\\tBranch\\tLength\\tLowerBound\\tUpperBound\\n");
-
- }
-
- else
-
- {
-
- HarvestFrequencies (vectorOfFrequencies, filteredData, 1,1,1);
-
- validNames = storeValidNames;
-
- }
-
-
-
- for (k=0; k<ds.species; k=k+1)
-
- {
-
- GetString (thisName, ds,k);
-
- shortName = (thisName^{{"\\\\..+",""}})&&1;
-
- if (validNames[shortName])
-
- {
-
- taxonNameMap[shortName] = thisName;
-
- validNames - (shortName);
-
- SetParameter (ds,k,shortName);
-
- }
-
- else
-
- {
-
- fprintf (resultFile,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree,"\\n");
-
- return 0;
-
- }
-
- }
-
-
-
- /* */
-
-
-
- LikelihoodFunction lf = (filteredData,givenTree);
-
- Optimize (res,lf);
-
-
-
- timer = Time(0)-timer;
-
-
-
- branchNames = BranchName (givenTree,-1);
-
- branchLengths = BranchLength (givenTree,-1);
-
-
-
-
-
- for (k=0; k<Columns(branchNames)-1; k=k+1)
-
- {
-
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- if (k==0)
-
- {
-
- /* compute a scaling factor */
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
-
- scaleFactor = BranchLength (givenTree,0);
-
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
-
- }
-
- fprintf (resultFile,_geneID,"\\t",taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-
- }
-
-
-
- ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-
- global treeScaler = 1;
-
- ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-
- COVARIANCE_PARAMETER = "treeScaler";
-
- COVARIANCE_PRECISION = 0.95;
-
- CovarianceMatrix (cmx,lf);
-
- fprintf (resultFile,_geneID,"\\tTotal Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-
- ClearConstraints (givenTree);
-
- return 0;
-
-}
-"""
-
-BranchLengths = """
-DataSet ds = ReadDataFile (PROMPT_FOR_FILE);
-DataSetFilter filteredData = CreateFilter (ds,1);
-
-SelectTemplateModel (filteredData);
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-fscanf (stdin, "String", resultFile);
-
-/* do sequence to branch map */
-
-validNames = {};
-taxonNameMap = {};
-
-for (k=0; k<TipCount(givenTree); k=k+1)
-{
- validNames[TipName(givenTree,k)&&1] = 1;
-}
-
-for (k=0; k<BranchCount(givenTree); k=k+1)
-{
- thisName = BranchName(givenTree,k);
- taxonNameMap[thisName&&1] = thisName;
-}
-
-for (k=0; k<ds.species; k=k+1)
-{
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- if (validNames[shortName])
- {
- taxonNameMap[shortName] = thisName;
- validNames - (shortName);
- SetParameter (ds,k,shortName);
- }
- else
- {
- fprintf (resultFile,CLEAR_FILE,"ERROR:", thisName, " could not be matched to any of the leaves in tree ", givenTree);
- return 0;
- }
-}
-
-/* */
-
-LikelihoodFunction lf = (filteredData,givenTree);
-
-Optimize (res,lf);
-
-timer = Time(0)-timer;
-
-branchNames = BranchName (givenTree,-1);
-branchLengths = BranchLength (givenTree,-1);
-
-fprintf (resultFile,CLEAR_FILE,KEEP_OPEN,"Branch\\tLength\\tLowerBound\\tUpperBound\\n");
-
-for (k=0; k<Columns(branchNames)-1; k=k+1)
-{
- COVARIANCE_PARAMETER = "givenTree."+branchNames[k]+".t";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- if (k==0)
- {
- /* compute a scaling factor */
- ExecuteCommands ("givenTree."+branchNames[0]+".t=1");
- scaleFactor = BranchLength (givenTree,0);
- ExecuteCommands ("givenTree."+branchNames[0]+".t="+cmx[0][1]);
- }
- fprintf (resultFile,taxonNameMap[branchNames[k]&&1],"\\t",branchLengths[k],"\\t",scaleFactor*cmx[0][0],"\\t",scaleFactor*cmx[0][2],"\\n");
-}
-
-ttl = (branchLengths*(Transpose(branchLengths["1"])))[0];
-global treeScaler = 1;
-ReplicateConstraint ("this1.?.t:=treeScaler*this2.?.t__",givenTree,givenTree);
-COVARIANCE_PARAMETER = "treeScaler";
-COVARIANCE_PRECISION = 0.95;
-CovarianceMatrix (cmx,lf);
-ClearConstraints (givenTree);
-fprintf (resultFile,"Total Tree\\t",ttl,"\\t",ttl*cmx[0][0],"\\t",ttl*cmx[0][2],"\\n");
-fprintf (resultFile,CLOSE_FILE);
-"""
-
-SimpleLocalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "AC";
- _analysisHeaders[7] = "AT";
- _analysisHeaders[8] = "CG";
- _analysisHeaders[9] = "CT";
- _analysisHeaders[10] = "GT";
- _analysisHeaders[11] = "Tree";
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _analysisHeaders [Abs(_analysisHeaders)] = "length("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dS("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "dN("+branchName+")";
- _analysisHeaders [Abs(_analysisHeaders)] = "omega("+branchName+")";
- }
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Local";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = treeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = treeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
- for (_biterator = 0; _biterator < treeBranchCount; _biterator = _biterator + 1)
- {
- branchName = treeBranchNames[_biterator];
-
- _returnMe ["length("+branchName+")"] = (_cL["Total"])[_biterator];
- _returnMe ["dS("+branchName+")"] = (_cL["Syn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["S_sites"]);
- _returnMe ["dN("+branchName+")"] = (_cL["NonSyn"])[_biterator]*(_returnMe ["BP"]/_returnMe ["NS_sites"]);
-
- ExecuteCommands ("_lom = _standardizeRatio(codonTree."+treeBranchNames[_biterator]+".nonSynRate,codonTree."+treeBranchNames[_biterator]+".synRate);");
- _returnMe ["omega("+branchName+")"] = _lom;
- }
-
- return _returnMe;
-}
-
-"""
-
-SimpleGlobalFitter = """
-VERBOSITY_LEVEL = -1;
-COUNT_GAPS_IN_FREQUENCIES = 0;
-
-/*---------------------------------------------------------*/
-
-function returnResultHeaders (dummy)
-{
- _analysisHeaders = {};
- _analysisHeaders[0] = "BLOCK";
- _analysisHeaders[1] = "BP";
- _analysisHeaders[2] = "S_sites";
- _analysisHeaders[3] = "NS_sites";
- _analysisHeaders[4] = "Stop_codons";
- _analysisHeaders[5] = "LogL";
- _analysisHeaders[6] = "omega";
- _analysisHeaders[7] = "omega_range";
- _analysisHeaders[8] = "AC";
- _analysisHeaders[9] = "AT";
- _analysisHeaders[10] = "CG";
- _analysisHeaders[11] = "CT";
- _analysisHeaders[12] = "GT";
- _analysisHeaders[13] = "Tree";
-
- return _analysisHeaders;
-}
-
-/*---------------------------------------------------------*/
-
-function runAGeneFit (myID)
-{
- fprintf (stdout, "[SimpleGlobalFitter.bf on GENE ", myID, "]\\n");
- taxonNameMap = {};
-
- for (k=0; k<ds.species; k=k+1)
- {
- GetString (thisName, ds,k);
- shortName = (thisName^{{"\\\\..+",""}})&&1;
- taxonNameMap[shortName] = thisName;
- SetParameter (ds,k,shortName);
- }
-
- DataSetFilter filteredData = CreateFilter (ds,1);
- _nucSites = filteredData.sites;
-
- if (Abs(treeString))
- {
- givenTreeString = treeString;
- }
- else
- {
- if (_currentGene==1)
- {
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"NJ.bf");
- }
- givenTreeString = InferTreeTopology (0);
- treeString = "";
- }
-
- DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
-
- if (_currentGene==1)
- {
- _MG94stdinOverload = {};
- _MG94stdinOverload ["0"] = "Global";
- _MG94stdinOverload ["1"] = modelSpecString;
-
- ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"MG94custom.mdl",
- _MG94stdinOverload);
-
- Tree codonTree = givenTreeString;
- }
- else
- {
- HarvestFrequencies (observedFreq,filteredData,3,1,1);
- MULTIPLY_BY_FREQS = PopulateModelMatrix ("MG94custom", observedFreq);
- vectorOfFrequencies = BuildCodonFrequencies (observedFreq);
- Model MG94customModel = (MG94custom,vectorOfFrequencies,0);
-
- Tree codonTree = givenTreeString;
- }
-
- LikelihoodFunction lf = (filteredData,codonTree);
-
- Optimize (res,lf);
-
- _snsAVL = _computeSNSSites ("filteredData", _Genetic_Code, vectorOfFrequencies, 0);
- _cL = ReturnVectorsOfCodonLengths (ComputeScalingStencils (0), "codonTree");
-
-
- _returnMe = {};
- _returnMe ["BLOCK"] = myID;
- _returnMe ["LogL"] = res[1][0];
- _returnMe ["BP"] = _snsAVL ["Sites"];
- _returnMe ["S_sites"] = _snsAVL ["SSites"];
- _returnMe ["NS_sites"] = _snsAVL ["NSSites"];
- _returnMe ["Stop_codons"] = (_nucSites-filteredData.sites*3)$3;
- _returnMe ["AC"] = AC;
- _returnMe ["AT"] = AT;
- _returnMe ["CG"] = CG;
- _returnMe ["CT"] = CT;
- _returnMe ["GT"] = GT;
- _returnMe ["omega"] = R;
- COVARIANCE_PARAMETER = "R";
- COVARIANCE_PRECISION = 0.95;
- CovarianceMatrix (cmx,lf);
- _returnMe ["omega_range"] = ""+cmx[0]+"-"+cmx[2];
- _returnMe ["Tree"] = Format(codonTree,0,1);
-
-
- return _returnMe;
-}
-"""
-
-FastaReader = """
-fscanf (stdin, "String", _coreAnalysis);
-fscanf (stdin, "String", _outputDriver);
-
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"TemplateModels"+DIRECTORY_SEPARATOR+"chooseGeneticCode.def");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"dSdNTreeTools.ibf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"CodonTools.bf");
-ExecuteAFile (HYPHY_BASE_DIRECTORY+"TemplateBatchFiles"+DIRECTORY_SEPARATOR+"Utility"+DIRECTORY_SEPARATOR+"GrabBag.bf");
-
-SetDialogPrompt ("Tree file");
-fscanf (PROMPT_FOR_FILE, "Tree", givenTree);
-
-treeBranchNames = BranchName (givenTree,-1);
-treeBranchCount = Columns (treeBranchNames)-1;
-treeString = Format (givenTree,1,1);
-
-SetDialogPrompt ("Multiple gene FASTA file");
-fscanf (PROMPT_FOR_FILE, "Lines", inLines);
-fscanf (stdin, "String", modelSpecString);
-fscanf (stdin, "String", _outPath);
-
-ExecuteAFile (_outputDriver);
-ExecuteAFile (_coreAnalysis);
-
-/*---------------------------------------------------------*/
-
-_linesIn = Columns (inLines);
-_currentGene = 1;
- _currentState = 0;
-/* 0 - waiting for a non-empty line */
-/* 1 - reading files */
-
-geneSeqs = "";
-geneSeqs * 0;
-
-_prepareFileOutput (_outPath);
-
-for (l=0; l<_linesIn; l=l+1)
-{
- if (Abs(inLines[l]) == 0)
- {
- if (_currentState == 1)
- {
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
- geneSeqs * 128;
- _currentGene = _currentGene + 1;
- }
- }
- else
- {
- if (_currentState == 0)
- {
- _currentState = 1;
- }
- geneSeqs * inLines[l];
- geneSeqs * "\\n";
- }
-}
-
-if (_currentState == 1)
-{
- geneSeqs * 0;
- DataSet ds = ReadFromString (geneSeqs);
- _processAGene (ds.species == treeBranchCount,_currentGene);
-}
-
-_finishFileOutput (0);
-"""
-
-TabWriter = """
-/*---------------------------------------------------------*/
-function _prepareFileOutput (_outPath)
-{
- _outputFilePath = _outPath;
-
- _returnHeaders = returnResultHeaders(0);
-
- fprintf (_outputFilePath, CLEAR_FILE, KEEP_OPEN, _returnHeaders[0]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",_returnHeaders[_biterator]);
- }
-
-
-
- fprintf (_outputFilePath,"\\n");
- return 0;
-}
-
-/*---------------------------------------------------------*/
-
-function _processAGene (valid, _geneID)
-{
- if (valid)
- {
- returnValue = runAGeneFit (_geneID);
- fprintf (_outputFilePath, returnValue[_returnHeaders[0]]);
- for (_biterator = 1; _biterator < Abs(_returnHeaders); _biterator = _biterator + 1)
- {
- fprintf (_outputFilePath,"\\t",returnValue[_returnHeaders[_biterator]]);
- }
- fprintf (_outputFilePath, "\\n");
- }
- /*
- else
- {
- fprintf (_outputFilePath,
- _geneID, ", Incorrect number of sequences\\n");
- }
- */
- _currentState = 0;
- return 0;
-}
-
-/*---------------------------------------------------------*/
-function _finishFileOutput (dummy)
-{
- return 0;
-}
-"""
-
-def get_dnds_config_filename(Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename ):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the DATA READER */
-
-_genomeScreenOptions ["0"] = "%s";
- /* which analysis to run on each gene; */
-_genomeScreenOptions ["1"] = "%s";
- /* what output to produce; */
-_genomeScreenOptions ["2"] = "%s";
- /* genetic code */
-_genomeScreenOptions ["3"] = "%s";
- /* tree file */
-_genomeScreenOptions ["4"] = "%s";
- /* alignment file */
-_genomeScreenOptions ["5"] = "%s";
- /* nucleotide bias string; can define any of the 203 models */
-_genomeScreenOptions ["6"] = "%s";
- /* output csv file */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (Fitter_filename, TabWriter_filename, genetic_code, tree_filename, input_filename, nuc_model, output_filename, FastaReader_filename )
- return get_filled_temp_filename(contents)
-
-
-def get_branch_lengths_config_filename(input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the NucDataBranchLengths.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "CUSTOM";
- /* use an arbitrary nucleotide model */
-_genomeScreenOptions ["2"] = "%s";
- /* which model to use */
-_genomeScreenOptions ["3"] = "%s";
- /* model options */
-_genomeScreenOptions ["4"] = "Estimated";
- /* rate parameters */
-_genomeScreenOptions ["5"] = "%s";
- /* base frequencies */
-_genomeScreenOptions ["6"] = "%s";
- /* the tree to use; */
-_genomeScreenOptions ["7"] = "%s";
- /* write .csv output to; */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, nuc_model, model_options, base_freq, tree_filename, output_filename, BranchLengths_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_tree_config_filename(input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTree.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-_genomeScreenOptions ["2"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["3"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, distance_metric, output_filename1, output_filename2, NJ_tree_filename)
- return get_filled_temp_filename(contents)
-
-
-def get_nj_treeMF_config_filename(input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename):
- contents = """
-_genomeScreenOptions = {};
-
-/* all paths are either absolute or relative
-to the BuildNJTreeMF.bf */
-
-_genomeScreenOptions ["0"] = "%s";
- /* the multiple alignment file to analyze; */
-_genomeScreenOptions ["1"] = "%s";
- /* write Newick tree output to; */
-_genomeScreenOptions ["2"] = "%s";
- /* write a postscript tree file to this file; leave blank to not write a tree */
-_genomeScreenOptions ["3"] = "%s";
- /* pick which distance metric to use; TN93 is a good default */
-
-ExecuteAFile ("%s", _genomeScreenOptions);
-""" % (input_filename, output_filename1, output_filename2, distance_metric, NJ_tree_filename)
- return get_filled_temp_filename(contents)
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tool-data/shared/ucsc/ucsc_build_sites.txt
--- a/tool-data/shared/ucsc/ucsc_build_sites.txt
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt
@@ -5,4 +5,3 @@
#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
test http://genome-test.cse.ucsc.edu/cgi-bin/hgTracks? anoCar1,ce4,ce3,ce2,ce1,loxAfr1,rn2,eschColi_O157H7_1,rn4,droYak1,heliPylo_J99_1,droYak2,dp3,dp2,caeRem2,caeRem1,oryLat1,eschColi_K12_1,homIni13,homIni14,droAna1,droAna2,oryCun1,sacCer1,heliHepa1,droGri1,sc1,dasNov1,choHof1,tupBel1,mm9,mm8,vibrChol1,mm5,mm4,mm7,mm6,mm3,mm2,rn3,venter1,galGal3,galGal2,ornAna1,equCab1,cioSav2,rheMac2,eutHer13,droPer1,droVir2,droVir1,heliPylo_26695_1,euaGli13,calJac1,campJeju1,droSim1,hg13,hg15,hg16,hg17,monDom1,monDom4,droMoj1,petMar1,droMoj2,vibrChol_MO10_1,vibrPara1,gliRes13,vibrVuln_YJ016_1,braFlo1,cioSav1,lauRas13,dm1,canFam1,canFam2,ci1,echTel1,ci2,caePb1,dm3,ponAbe2,falciparum,xenTro1,xenTro2,nonAfr13,fr2,fr1,gasAcu1,dm2,apiMel1,apiMel2,eschColi_O157H7EDL933_1,priPac1,panTro1,hg18,panTro2,campJeju_RM1221_1,canHg12,vibrChol_O395_1,vibrFisc_ES114_1,danRer5,danRer4,danRer3,danRer2,danRer1,tetNig1,afrOth13,bosTau1,eschColi_CFT073_1,bosTau3,bosTau2,bosTau4,rodEnt13,droEre1,priMat13,vibrVuln_CMCP6_1,cb2,cb3,cb1,borEut13,droSec1,felCat3,strPur1,strPur2,otoGar1,catArr1,anoGam1,triCas2
ucla http://epigenomics.mcdb.ucla.edu/cgi-bin/hgTracks? araTha1
-psu bx main http://main.genome-browser.bx.psu.edu/cgi-bin/hgTracks? hg18,hg19,mm8,mm9
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -4,32 +4,19 @@
<tool file="data_source/upload.xml" /><tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/biomart.xml" /><tool file="data_source/gramene_mart.xml" />
- <tool file="data_source/flymine.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -87,7 +74,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="features" name="Extract Features"><tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
@@ -111,7 +97,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -139,7 +124,6 @@
<tool file="plotting/histogram2.xml" /><tool file="plotting/scatterplot.xml" /><tool file="plotting/boxplot.xml" />
- <tool file="visualization/GMAJ.xml" /><tool file="visualization/build_ucsc_custom_track.xml" /><tool file="maf/vcf_to_maf_customtrack.xml" /><tool file="mutation/visualize.xml" />
@@ -170,14 +154,6 @@
<tool file="multivariate_stats/kpca.xml" /><tool file="multivariate_stats/kcca.xml" /></section>
- <section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <!-- <tool file="hyphy/hyphy_dnds_wrapper.xml" /> -->
- </section>
- <section id="motifs" name="Motif Tools">
- <tool file="rgenetics/rgWebLogo3.xml" />
- </section><section id="clustal" name="Multiple Alignments"><tool file="rgenetics/rgClustalw.xml" /></section>
@@ -253,10 +229,6 @@
<tool file="gatk/variant_eval.xml" /><tool file="gatk/variant_combine.xml" /></section>
- <section id="peak_calling" name="NGS: Peak Calling">
- <tool file="peak_calling/macs_wrapper.xml" />
- <tool file="peak_calling/sicer_wrapper.xml" />
- </section><section id="ngs-rna-tools" name="NGS: RNA Analysis"><label id="rna_seq" text="RNA-seq" /><label id="filtering" text="Filtering" />
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -5,7 +5,6 @@
<tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_test.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" />
- <tool file="data_source/bx_browser.xml" /><tool file="data_source/ebi_sra.xml" /><tool file="data_source/microbial_import.xml" /><tool file="data_source/biomart.xml" />
@@ -13,34 +12,18 @@
<tool file="data_source/cbi_rice_mart.xml" /><tool file="data_source/gramene_mart.xml" /><tool file="data_source/fly_modencode.xml" />
- <tool file="data_source/flymine.xml" />
- <tool file="data_source/flymine_test.xml" />
- <tool file="data_source/modmine.xml" />
- <tool file="data_source/mousemine.xml" />
- <tool file="data_source/ratmine.xml" />
- <tool file="data_source/yeastmine.xml" />
- <tool file="data_source/metabolicmine.xml" /><tool file="data_source/worm_modencode.xml" /><tool file="data_source/wormbase.xml" /><tool file="data_source/wormbase_test.xml" /><tool file="data_source/eupathdb.xml" />
- <tool file="data_source/encode_db.xml" />
- <tool file="data_source/epigraph_import.xml" />
- <tool file="data_source/epigraph_import_test.xml" /><tool file="data_source/hbvar.xml" /><tool file="genomespace/genomespace_file_browser_prod.xml" /><tool file="genomespace/genomespace_importer.xml" /><tool file="validation/fix_errors.xml" /></section><section id="send" name="Send Data">
- <tool file="data_destination/epigraph.xml" />
- <tool file="data_destination/epigraph_test.xml" /><tool file="genomespace/genomespace_exporter.xml" /></section>
- <section id="EncodeTools" name="ENCODE Tools">
- <tool file="encode/gencode_partition.xml" />
- <tool file="encode/random_intervals.xml" />
- </section><section id="liftOver" name="Lift-Over"><tool file="extract/liftOver_wrapper.xml" /></section>
@@ -81,7 +64,6 @@
<tool file="filters/compare.xml" /><tool file="new_operations/subtract_query.xml" /><tool file="stats/grouping.xml" />
- <tool file="new_operations/column_join.xml" /></section><section id="convert" name="Convert Formats"><tool file="filters/axt_to_concat_fasta.xml" />
@@ -124,7 +106,6 @@
<section id="scores" name="Get Genomic Scores"><tool file="stats/wiggle_to_simple.xml" /><tool file="stats/aggregate_binned_scores_in_intervals.xml" />
- <tool file="extract/phastOdds/phastOdds_tool.xml" /></section><section id="bxops" name="Operate on Genomic Intervals"><tool file="new_operations/intersect.xml" />
@@ -189,9 +170,6 @@
<tool file="multivariate_stats/kcca.xml" /></section><section id="hyphy" name="Evolution">
- <tool file="hyphy/hyphy_branch_lengths_wrapper.xml" />
- <tool file="hyphy/hyphy_nj_tree_wrapper.xml" />
- <tool file="hyphy/hyphy_dnds_wrapper.xml" /><tool file="evolution/codingSnps.xml" /><tool file="evolution/add_scores.xml" /></section>
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_destination/epigraph.xml
--- a/tools/data_destination/epigraph.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_export">
- <description> and prediction with EpiGRAPH</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
-
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_destination/epigraph_test.xml
--- a/tools/data_destination/epigraph_test.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0"?>
-<tool name="Perform genome analysis" id="epigraph_test_export">
- <description> and prediction with EpiGRAPH Test</description>
- <redirect_url_params>GENOME=${input1.dbkey} NAME=${input1.name} INFO=${input1.info}</redirect_url_params>
- <inputs>
- <param format="bed" name="input1" type="data" label="Send this dataset to EpiGRAPH">
- <validator type="unspecified_build" />
- </param>
- <param name="REDIRECT_URL" type="hidden" value="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/DataImport.jsp" />
- <param name="DATA_URL" type="baseurl" value="/datasets" />
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <outputs/>
- <help>
-
-.. class:: warningmark
-
-After clicking the **Execute** button, you will be redirected to the EpiGRAPH test website. Please be patient while the dataset is being imported. Inside EpiGRAPH, buttons are available to send the results of the EpiGRAPH analysis back to Galaxy. In addition, you can always abandon an EpiGRAPH session and return to Galaxy by directing your browser to your current Galaxy instance.
-
------
-
-.. class:: infomark
-
-**What it does**
-
-This tool sends the selected dataset to EpiGRAPH in order to perform an in-depth analysis with statistical and machine learning methods.
-
------
-
-.. class:: infomark
-
-**EpiGRAPH outline**
-
-The EpiGRAPH_ web service enables biologists to uncover hidden associations in vertebrate genome and epigenome datasets. Users can upload or import sets of genomic regions and EpiGRAPH will test a wide range of attributes (including DNA sequence and structure, gene density, chromatin modifications and evolutionary conservation) for enrichment or depletion among these regions. Furthermore, EpiGRAPH learns to predictively identify genomic regions that exhibit similar properties.
-
-.. _EpiGRAPH: http://epigraph.mpi-inf.mpg.de/
-
- </help>
-</tool>
-
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/bx_browser.xml
--- a/tools/data_source/bx_browser.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="BX" id="bx_browser" tool_type="data_source">
- <description>table browser</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://main.genome-browser.bx.psu.edu/cgi-bin/hgTables" check_values="false" method="get">
- <display>go to BX Browser $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
- <param name="tool_id" type="hidden" value="bx_browser" />
- <param name="sendToGalaxy" type="hidden" value="1" />
- <param name="hgta_compressType" type="hidden" value="none" />
- <param name="hgta_outputType" type="hidden" value="bed" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
- <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
- <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
- <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="tabular" >
- <value_translation>
- <value galaxy_value="tabular" remote_value="primaryTable" />
- <value galaxy_value="tabular" remote_value="selectedFields" />
- <value galaxy_value="wig" remote_value="wigData" />
- <value galaxy_value="interval" remote_value="tab" />
- <value galaxy_value="html" remote_value="hyperlinks" />
- <value galaxy_value="fasta" remote_value="sequence" />
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="tabular" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/encode_db.xml
--- a/tools/data_source/encode_db.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0"?>
-
-<tool name="EncodeDB" id="encode_db1">
-
- <description>
- at NHGRI
- </description>
-
- <command interpreter="python">
- fetch.py "$url" $output
- </command>
-
- <inputs action="http://research.nhgri.nih.gov/projects/ENCODEdb/cgi-bin/power_query.cgi" target="_top">
-<!-- <inputs action="http://localhost:9000/prepared"> -->
- <display>go to EncodeDB $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/async/encode_db1" />
- </inputs>
-
- <uihints minwidth="800"/>
-
- <outputs>
- <data format="bed" name="output" />
- </outputs>
-
- <options sanitize="False" refresh="True"/>
-
-</tool>
\ No newline at end of file
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/epigraph_import.xml
--- a/tools/data_source/epigraph_import.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import" tool_type="data_source">
- <description> server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/epigraph_import_test.xml
--- a/tools/data_source/epigraph_import_test.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="EpiGRAPH" id="epigraph_import_test" tool_type="data_source">
- <description> test server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://epigraph.mpi-inf.mpg.de/WebGRAPH_Public_Test/faces/Login.jsp" check_values="false" method="get">
- <display>go to EpiGRAPH server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=epigraph_import_test" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="GENOME" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="NAME" missing="EpiGRAPH query" />
- <request_param galaxy_name="info" remote_name="INFO" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/flymine.xml
--- a/tools/data_source/flymine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine" id="flymine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.flymine.org" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/flymine_test.xml
--- a/tools/data_source/flymine_test.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Flymine test" id="flymine_test" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://preview.flymine.org/preview/begin.do" check_values="false" method="get">
- <display>go to Flymine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/metabolicmine.xml
--- a/tools/data_source/metabolicmine.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0"?>
-<tool name="metabolicMine" id="metabolicmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.metabolicmine.org/beta/begin.do" check_values="false" method="get">
- <display>go to metabolicMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/modmine.xml
--- a/tools/data_source/modmine.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="modENCODE modMine" id="modmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://intermine.modencode.org/" check_values="false" method="get">
- <display>go to modENCODE modMine server $GALAXY_URL</display>
- </inputs>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/mousemine.xml
--- a/tools/data_source/mousemine.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="MouseMine" id="mousemine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://www.mousemine.org/mousemine/begin.do" check_values="false" method="get">
- <display>go to MouseMine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=mousemine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="MouseMine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
-
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/ratmine.xml
--- a/tools/data_source/ratmine.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<!--
- If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
- the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
- initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
--->
-<tool name="Ratmine" id="ratmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://ratmine.mcw.edu/ratmine/begin.do" check_values="false" method="get">
- <display>go to Ratmine server $GALAXY_URL</display>
- <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=ratmine" />
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
- <request_param galaxy_name="URL" remote_name="URL" missing="" />
- <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
- <request_param galaxy_name="organism" remote_name="organism" missing="" />
- <request_param galaxy_name="table" remote_name="table" missing="" />
- <request_param galaxy_name="description" remote_name="description" missing="" />
- <request_param galaxy_name="name" remote_name="name" missing="Ratmine query" />
- <request_param galaxy_name="info" remote_name="info" missing="" />
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/data_source/yeastmine.xml
--- a/tools/data_source/yeastmine.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0"?>
-<tool name="YeastMine" id="yeastmine" tool_type="data_source">
- <description>server</description>
- <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <inputs action="http://yeastmine.yeastgenome.org/yeastmine/begin.do" check_values="false" method="get">
- <display>go to yeastMine server $GALAXY_URL</display>
- </inputs>
- <request_param_translation>
- <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
- <value_translation>
- <value galaxy_value="auto" remote_value="txt" /><!-- intermine currently always provides 'txt', make this auto detect -->
- </value_translation>
- </request_param>
- </request_param_translation>
- <uihints minwidth="800"/>
- <outputs>
- <data name="output" format="txt" />
- </outputs>
- <options sanitize="False" refresh="True"/>
-</tool>
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/encode/gencode_partition.xml
--- a/tools/encode/gencode_partition.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<tool id="gencode_partition1" name="Gencode Partition">
- <description>an interval file</description>
- <command interpreter="python">split_by_partitions.py ${GALAXY_DATA_INDEX_DIR} $input1 $out_file1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Partition"/>
- </inputs>
- <outputs>
- <data name="out_file1" format="bed"/>
- </outputs>
- <tests>
- <test>
- <param name="input1" value="encode_1.bed"/>
- <output name="out_file1" file="gencode_partition_out.bed"/>
- </test>
- </tests>
- <help>
-For detailed information about partitioning, click here_.
-
-.. _here: http://genome.imim.es/gencode/wiki/index.php/Collecting_Feature_Sets_from_A…
-
-Datasets are partitioned according to the protocol below:
-
-A partition scheme has been defined that is similar to what has previously been done with TARs/TRANSFRAGs such that any feature can be classified as falling into one of the following 6 categories:
- 1. **Coding** -- coding exons defined from the GENCODE experimentally verified coding set (coding in any transcript)
- 2. **5UTR** -- 5' UTR exons defined from the GENCODE experimentally verified coding set (5' UTR in some transcript but never coding in any other)
- 3. **3UTR** -- 3' UTR exons defined from the GENCODE experimentally verified coding set (3' UTR in some transcript but never coding in any other)
- 4. **Intronic Proximal** -- intronic and no more than 5kb away from an exon.
- 5. **Intergenic Proximal** -- between genes and no more than 5kb away from an exon.
- 6. **Intronic Distal** -- intronic and greater than 5kb away from an exon.
- 7. **Intergenic Distal** -- between genes and greater than 5kb away from an exon.
-
------
-
-.. class:: infomark
-
-**Note:** Features overlapping more than one partition will take the identity of the lower-numbered partition.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
diff -r 37cf56c3f0e4605e15f5988764f147bcb186f40f -r 2210b9de666ed9baa48304de39ba745e081b329e tools/encode/random_intervals.xml
--- a/tools/encode/random_intervals.xml
+++ /dev/null
@@ -1,64 +0,0 @@
-<tool id="random_intervals1" name="Random Intervals">
-<description>create a random set of intervals</description>
- <command interpreter="python">random_intervals_no_bits.py $regions $input2 $input1 $out_file1 ${input2.metadata.chromCol} ${input2.metadata.startCol} ${input2.metadata.endCol} ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} $use_mask $strand_overlaps ${GALAXY_DATA_INDEX_DIR}</command>
- <inputs>
- <param name="input1" type="data" format="interval" label="File to Mimick">
- <validator type="unspecified_build" message="Unspecified build, this tool works with data from genome builds hg16 or hg17. Click the pencil icon in your history item to set the genome build."/>
- </param>
- <param name="input2" type="data" format="interval" label="Intervals to Mask"/>
- <param name="use_mask" type="select" label="Use mask">
- <option value="no_mask">No</option>
- <option value="use_mask">Yes</option>
- </param>
- <param name="strand_overlaps" type="select" label="Allow overlaps">
- <option value="all">Any</option>
- <option value="strand">Across Strands</option>
- <option value="none">None</option>
- </param>
- <param name="regions" type="select" label="Regions to use">
- <options from_file="regions.loc">
- <column name="name" index="2"/>
- <column name="value" index="1"/>
- <column name="dbkey" index="0"/>
- <filter type="data_meta" ref="input1" key="dbkey" column="0" />
- <validator type="no_options" message="This tool currently only works with ENCODE data from genome builds hg16 or hg17."/>
- </options>
- </param>
- </inputs>
- <outputs>
- <data name="out_file1" format="input"/>
- </outputs>
- <help>
-
-.. class:: warningmark
-
-This tool currently only works with ENCODE data from genome builds hg16 or hg17.
-
------
-
-.. class:: infomark
-
-**Note:** If you do not wish to mask a set of intervals, change the Use Mask option to No, this option will override any Mask files selected.
-
------
-
-**Syntax**
-
-This tool will attempt to create a random set of intervals that mimic those found within your source file. You may also specify a set of intervals to mask.
-
-**Allow overlaps** options
- * **Across Strands** - random regions are allowed to overlap only if they are on different strands.
- * **Any** - all overlaps are allowed.
- * **None** - no overlapping regions are allowed.
-
-**Regions to use** options
- * Bounding region of interest based on the dataset build.
-
-------
-
-**Citation**
-
-If you use this tool, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_
-
- </help>
-</tool>
\ No newline at end of file
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/8ea87f7bc63a/
Changeset: 8ea87f7bc63a
User: natefoo
Date: 2014-01-27 20:01:21
Summary: Merge merge from next-stable to default
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3fdf673bdfc9/
Changeset: 3fdf673bdfc9
Branch: next-stable
User: inithello
Date: 2014-01-27 19:50:42
Summary: Use the same path for tool shed repositories when testing both tool dependencies and repositories with tools in the same test run.
Affected #: 3 files
diff -r 43d7d4a42cd2a3e31e02d86d0cf7d66dc928c631 -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 install_and_test_tool_shed_repositories.sh
--- a/install_and_test_tool_shed_repositories.sh
+++ b/install_and_test_tool_shed_repositories.sh
@@ -48,6 +48,14 @@
fi
fi
+if [ -z $GALAXY_INSTALL_TEST_SHED_TOOL_PATH ] ; then
+ export GALAXY_INSTALL_TEST_SHED_TOOL_PATH='/tmp/shed_tools'
+fi
+
+if [ ! -d $GALAXY_INSTALL_TEST_SHED_TOOL_PATH ] ; then
+ mkdir -p $GALAXY_INSTALL_TEST_SHED_TOOL_PATH
+fi
+
test_tool_dependency_definitions () {
# Test installation of repositories of type tool_dependency_definition.
if [ -f $GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR/stage_1_complete ] ; then
diff -r 43d7d4a42cd2a3e31e02d86d0cf7d66dc928c631 -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -342,6 +342,8 @@
if not os.path.isdir( galaxy_test_tmp_dir ):
os.mkdir( galaxy_test_tmp_dir )
# Set up the configuration files for the Galaxy instance.
+ galaxy_shed_tool_path = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_PATH',
+ tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' ) )
shed_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF',
os.path.join( galaxy_test_tmp_dir, 'test_shed_tool_data_table_conf.xml' ) )
galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_TABLE_CONF',
@@ -377,7 +379,6 @@
galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
new_repos_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_tempfiles = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
- galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' )
galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
# Set up the tool dependency path for the Galaxy instance.
tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
diff -r 43d7d4a42cd2a3e31e02d86d0cf7d66dc928c631 -r 3fdf673bdfc9af7653695ced36f274d65748c7f1 test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
@@ -230,6 +230,8 @@
if not os.path.isdir( galaxy_test_tmp_dir ):
os.mkdir( galaxy_test_tmp_dir )
# Set up the configuration files for the Galaxy instance.
+ galaxy_shed_tool_path = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_PATH',
+ tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' ) )
shed_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF',
os.path.join( galaxy_test_tmp_dir, 'test_shed_tool_data_table_conf.xml' ) )
galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_TABLE_CONF',
@@ -265,7 +267,6 @@
galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
new_repos_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_tempfiles = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
- galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' )
galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
# Set up the tool dependency path for the Galaxy instance.
tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
https://bitbucket.org/galaxy/galaxy-central/commits/37cf56c3f0e4/
Changeset: 37cf56c3f0e4
User: inithello
Date: 2014-01-27 19:51:17
Summary: Merge bugfix from next-stable.
Affected #: 3 files
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 37cf56c3f0e4605e15f5988764f147bcb186f40f install_and_test_tool_shed_repositories.sh
--- a/install_and_test_tool_shed_repositories.sh
+++ b/install_and_test_tool_shed_repositories.sh
@@ -48,6 +48,14 @@
fi
fi
+if [ -z $GALAXY_INSTALL_TEST_SHED_TOOL_PATH ] ; then
+ export GALAXY_INSTALL_TEST_SHED_TOOL_PATH='/tmp/shed_tools'
+fi
+
+if [ ! -d $GALAXY_INSTALL_TEST_SHED_TOOL_PATH ] ; then
+ mkdir -p $GALAXY_INSTALL_TEST_SHED_TOOL_PATH
+fi
+
test_tool_dependency_definitions () {
# Test installation of repositories of type tool_dependency_definition.
if [ -f $GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR/stage_1_complete ] ; then
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 37cf56c3f0e4605e15f5988764f147bcb186f40f test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py
@@ -342,6 +342,8 @@
if not os.path.isdir( galaxy_test_tmp_dir ):
os.mkdir( galaxy_test_tmp_dir )
# Set up the configuration files for the Galaxy instance.
+ galaxy_shed_tool_path = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_PATH',
+ tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' ) )
shed_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF',
os.path.join( galaxy_test_tmp_dir, 'test_shed_tool_data_table_conf.xml' ) )
galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_TABLE_CONF',
@@ -377,7 +379,6 @@
galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
new_repos_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_tempfiles = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
- galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' )
galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
# Set up the tool dependency path for the Galaxy instance.
tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
diff -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d -r 37cf56c3f0e4605e15f5988764f147bcb186f40f test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/functional_tests.py
@@ -230,6 +230,8 @@
if not os.path.isdir( galaxy_test_tmp_dir ):
os.mkdir( galaxy_test_tmp_dir )
# Set up the configuration files for the Galaxy instance.
+ galaxy_shed_tool_path = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_PATH',
+ tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' ) )
shed_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF',
os.path.join( galaxy_test_tmp_dir, 'test_shed_tool_data_table_conf.xml' ) )
galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_TABLE_CONF',
@@ -265,7 +267,6 @@
galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
new_repos_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_tempfiles = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
- galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' )
galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
# Set up the tool dependency path for the Galaxy instance.
tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Fix description for find_diag_hits.
by commits-noreply@bitbucket.org 27 Jan '14
by commits-noreply@bitbucket.org 27 Jan '14
27 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ad9cb3bde1ee/
Changeset: ad9cb3bde1ee
User: inithello
Date: 2014-01-27 16:44:24
Summary: Fix description for find_diag_hits.
Affected #: 1 file
diff -r 74392b32685f35aa146cd5b66da098eb78eb77be -r ad9cb3bde1ee3aace42ed16dca5e40ac537eee6d scripts/migrate_tools/0009_tools.xml
--- a/scripts/migrate_tools/0009_tools.xml
+++ b/scripts/migrate_tools/0009_tools.xml
@@ -75,7 +75,7 @@
<repository owner="devteam" changeset_revision="31154ff9f5e1" name="filter_transcripts_via_tracking" description="Filter Combined Transcripts"><tool id="filter_combined_via_tracking" version="0.1" file="filter_transcripts_via_tracking.xml" /></repository>
- <repository owner="devteam" changeset_revision="acf51ff24c7d" name="find_diag_hits" description="find_diag_hits">
+ <repository owner="devteam" changeset_revision="acf51ff24c7d" name="find_diag_hits" description="Identify sequence reads corresponding to a particular taxonomic group"><tool id="find_diag_hits" version="1.0.0" file="find_diag_hits.xml" /></repository><repository owner="devteam" changeset_revision="33a0e6aca936" name="freebayes_wrapper" description="Call SNPS with Freebayes">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/74392b32685f/
Changeset: 74392b32685f
User: dan
Date: 2014-01-27 16:41:07
Summary: merge stable
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for aggregate_scores_in_intervals.py broken in 95bf71620d50c6d81248eef2001a7dc156ae1088.
by commits-noreply@bitbucket.org 27 Jan '14
by commits-noreply@bitbucket.org 27 Jan '14
27 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6c5913a4b701/
Changeset: 6c5913a4b701
Branch: stable
User: dan
Date: 2014-01-27 15:54:34
Summary: Fix for aggregate_scores_in_intervals.py broken in 95bf71620d50c6d81248eef2001a7dc156ae1088.
Affected #: 1 file
diff -r 03f7d79212ecf626af4940296e663c376b9cd2ee -r 6c5913a4b701813e823638125fff8bf9fda7354b tools/stats/aggregate_scores_in_intervals.py
--- a/tools/stats/aggregate_scores_in_intervals.py
+++ b/tools/stats/aggregate_scores_in_intervals.py
@@ -201,7 +201,7 @@
continue
# Get the score, only count if not 'nan'
score = scores_by_chrom[chrom][j]
- if not isNaN( score ):
+ if not isnan( score ):
total += score
count += 1
max_score = max( score, max_score )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e9fc9e7adc63/
Changeset: e9fc9e7adc63
User: inithello
Date: 2014-01-27 15:53:14
Summary: Migrate 43 tools from the distribution to the tool shed.
Affected #: 296 files
diff -r 04bce95d8fd21ae597ba839936dd644870450051 -r e9fc9e7adc63554b6f8a5604bce8be6b37d35d55 lib/tool_shed/galaxy_install/migrate/versions/0009_tools.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0009_tools.py
@@ -0,0 +1,103 @@
+"""
+The following tools have been eliminated from the distribution:
+
+1: Bowtie2
+2: Control-based ChIP-seq Analysis Tool
+3: ClustalW multiple sequence alignment program for DNA or proteins
+4: Compute P-values and Correlation Coefficients for Feature Occurrences
+5: Compute P-values and Correlation Coefficients for Occurrences of Two Set of Features
+6: Compute P-values and Second Moments for Feature Occurrences
+7: Compute P-values and Max Variances for Feature Occurrences
+8: Wavelet variance using Discrete Wavelet Transfoms
+9: Quantify the abundances of a set of target sequences from sampled subsequences
+10: Read QC reports using FastQC
+11: Combine FASTA and QUAL into FASTQ.
+12: Filter FASTQ reads by quality score and length
+13: Convert between various FASTQ quality formats.
+14: Manipulate FASTQ reads on various attributes.
+15: FASTQ Masker by quality score
+16: FASTQ de-interlacer on paired end reads.
+17: FASTQ interlacer on paired end reads
+18: FASTQ joiner on paired end reads
+19: FASTQ splitter on joined paired end reads
+20: FASTQ Summary Statistics by column
+21: FASTQ to FASTA converter
+22: FASTQ to Tabular converter
+23: FASTQ Trimmer by quality
+24: FASTQ Quality Trimmer by sliding window
+25: Filter Combined Transcripts
+26: find_diag_hits
+27: Call SNPS with Freebayes
+28: Fetch taxonomic representation
+29: GMAJ Multiple Alignment Viewer
+30: Find lowest diagnostic rank
+31: Model-based Analysis of ChIP-Seq
+32: Poisson two-sample test
+33: Statistical approach for the Identification of ChIP-Enriched Regions
+34: Draw phylogeny
+35: Summarize taxonomy
+36: Tabular to FASTQ converter
+37: Find splice junctions using RNA-seq data
+38: Gapped-read mapper for RNA-seq data
+39: Annotate a VCF file (dbSNP, hapmap)
+40: Extract reads from a specified region
+41: Filter a VCF file
+42: Generate the intersection of two VCF files
+43: Sequence Logo generator for fasta (eg Clustal alignments)
+
+The tools are now available in the repositories respectively:
+
+1: bowtie2
+2: ccat
+3: clustalw
+4: dwt_cor_ava_perclass
+5: dwt_cor_avb_all
+6: dwt_ivc_all
+7: dwt_var_perclass
+8: dwt_var_perfeature
+9: express
+10: fastqc
+11: fastq_combiner
+12: fastq_filter
+13: fastq_groomer
+14: fastq_manipulation
+15: fastq_masker_by_quality
+16: fastq_paired_end_deinterlacer
+17: fastq_paired_end_interlacer
+18: fastq_paired_end_joiner
+19: fastq_paired_end_splitter
+20: fastq_stats
+21: fastqtofasta
+22: fastq_to_tabular
+23: fastq_trimmer
+24: fastq_trimmer_by_quality
+25: filter_transcripts_via_tracking
+26: find_diag_hits
+27: freebayes_wrapper
+28: gi2taxonomy
+29: gmaj
+30: lca_wrapper
+31: macs
+32: poisson2test
+33: sicer
+34: t2ps
+35: t2t_report
+36: tabular_to_fastq
+37: tophat
+38: tophat2
+39: vcf_annotate
+40: vcf_extract
+41: vcf_filter
+42: vcf_intersect
+43: weblogo3
+
+from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
+and will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+
+def upgrade( migrate_engine ):
+ print __doc__
+
+def downgrade( migrate_engine ):
+ pass
diff -r 04bce95d8fd21ae597ba839936dd644870450051 -r e9fc9e7adc63554b6f8a5604bce8be6b37d35d55 scripts/migrate_tools/0009_tools.sh
--- /dev/null
+++ b/scripts/migrate_tools/0009_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0009_tools.xml $@
diff -r 04bce95d8fd21ae597ba839936dd644870450051 -r e9fc9e7adc63554b6f8a5604bce8be6b37d35d55 scripts/migrate_tools/0009_tools.xml
--- /dev/null
+++ b/scripts/migrate_tools/0009_tools.xml
@@ -0,0 +1,132 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+ <repository owner="devteam" changeset_revision="96d2e31a3938" name="bowtie2" description="Bowtie2">
+ <tool id="bowtie2" version="0.2" file="bowtie2_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="a0c8dc671a23" name="ccat" description="Control-based ChIP-seq Analysis Tool">
+ <tool id="peakcalling_ccat" version="0.0.1" file="ccat_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="7cc64024fe92" name="clustalw" description="ClustalW multiple sequence alignment program for DNA or proteins">
+ <tool id="clustalw" version="0.1" file="rgClustalw.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="6708501767b6" name="dwt_cor_ava_perclass" description="Compute P-values and Correlation Coefficients for Feature Occurrences">
+ <tool id="compute_p-values_correlation_coefficients_feature_occurrences_between_two_datasets_using_discrete_wavelet_transfom" version="1.0.0" file="execute_dwt_cor_aVa_perClass.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="0f2eda4ea8dc" name="dwt_cor_avb_all" description="Compute P-values and Correlation Coefficients for Occurrences of Two Set of Features">
+ <tool id="compute_p-values_correlation_coefficients_featureA_featureB_occurrences_between_two_datasets_using_discrete_wavelet_transfom" version="1.0.0" file="execute_dwt_cor_aVb_all.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="0b89b03ad760" name="dwt_ivc_all" description="Compute P-values and Second Moments for Feature Occurrences">
+ <tool id="compute_p-values_second_moments_feature_occurrences_between_two_datasets_using_discrete_wavelet_transfom" version="1.0.0" file="execute_dwt_IvC_all.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="cb422b6f49d2" name="dwt_var_perclass" description="Compute P-values and Max Variances for Feature Occurrences">
+ <tool id="compute_p-values_max_variances_feature_occurrences_in_one_dataset_using_discrete_wavelet_transfom" version="1.0.0" file="execute_dwt_var_perClass.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="d56c5d2e1a29" name="dwt_var_perfeature" description="Wavelet variance using Discrete Wavelet Transfoms">
+ <tool id="dwt_var1" version="1.0.0" file="execute_dwt_var_perFeature.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="7b0708761d05" name="express" description="Quantify the abundances of a set of target sequences from sampled subsequences">
+ <tool id="express" version="1.1.1" file="express_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="e28c965eeed4" name="fastqc" description="Read QC reports using FastQC">
+ <tool id="fastqc" version="1.0.0" file="rgFastQC.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="94306bdd58f7" name="fastq_combiner" description="Combine FASTA and QUAL into FASTQ.">
+ <tool id="fastq_combiner" version="1.0.1" file="fastq_combiner.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="30d9ece6c752" name="fastq_filter" description="Filter FASTQ reads by quality score and length">
+ <tool id="fastq_filter" version="1.0.0" file="fastq_filter.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="1298445c852b" name="fastq_groomer" description="Convert between various FASTQ quality formats.">
+ <tool id="fastq_groomer" version="1.0.4" file="fastq_groomer.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="5d1e9e13e8db" name="fastq_manipulation" description="Manipulate FASTQ reads on various attributes.">
+ <tool id="fastq_manipulation" version="1.0.1" file="fastq_manipulation.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="5a7b5751617b" name="fastq_masker_by_quality" description="FASTQ Masker by quality score">
+ <tool id="fastq_masker_by_quality" version="1.0.0" file="fastq_masker_by_quality.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="f0949bc49926" name="fastq_paired_end_deinterlacer" description="FASTQ de-interlacer on paired end reads.">
+ <tool id="fastq_paired_end_deinterlacer" version="1.1" file="fastq_paired_end_deinterlacer.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="b89bdf6acb6c" name="fastq_paired_end_interlacer" description="FASTQ interlacer on paired end reads">
+ <tool id="fastq_paired_end_interlacer" version="1.1" file="fastq_paired_end_interlacer.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="2793d1d765b9" name="fastq_paired_end_joiner" description="FASTQ joiner on paired end reads">
+ <tool id="fastq_paired_end_joiner" version="1.0.0" file="fastq_paired_end_joiner.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="c549e99026db" name="fastq_paired_end_splitter" description="FASTQ splitter on joined paired end reads">
+ <tool id="fastq_paired_end_splitter" version="1.0.0" file="fastq_paired_end_splitter.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="9b7b4e0ca9db" name="fastq_stats" description="FASTQ Summary Statistics by column">
+ <tool id="fastq_stats" version="1.0.0" file="fastq_stats.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="3571553aeb20" name="fastqtofasta" description="FASTQ to FASTA converter">
+ <tool id="fastq_to_fasta_python" version="1.0.0" file="fastq_to_fasta.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="bc9269529e88" name="fastq_to_tabular" description="FASTQ to Tabular converter">
+ <tool id="fastq_to_tabular" version="1.1.0" file="fastq_to_tabular.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="0b9feb0ed628" name="fastq_trimmer" description="FASTQ Trimmer by quality">
+ <tool id="fastq_trimmer" version="1.0.0" file="fastq_trimmer.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="1cdcaf5fc1da" name="fastq_trimmer_by_quality" description="FASTQ Quality Trimmer by sliding window">
+ <tool id="fastq_quality_trimmer" version="1.0.0" file="fastq_trimmer_by_quality.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="31154ff9f5e1" name="filter_transcripts_via_tracking" description="Filter Combined Transcripts">
+ <tool id="filter_combined_via_tracking" version="0.1" file="filter_transcripts_via_tracking.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="acf51ff24c7d" name="find_diag_hits" description="find_diag_hits">
+ <tool id="find_diag_hits" version="1.0.0" file="find_diag_hits.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="33a0e6aca936" name="freebayes_wrapper" description="Call SNPS with Freebayes">
+ <tool id="freebayes_wrapper" version="0.5.0" file="freebayes.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="7b1b03c4465d" name="gi2taxonomy" description="Fetch taxonomic representation">
+ <tool id="Fetch Taxonomic Ranks" version="1.1.0" file="gi2taxonomy.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="2cd5ee197ec7" name="gmaj" description="GMAJ Multiple Alignment Viewer">
+ <tool id="gmaj_1" version="2.0.1" file="GMAJ.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="33e8ed5a4601" name="lca_wrapper" description="Find lowest diagnostic rank">
+ <tool id="lca1" version="1.0.1" file="lca.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="ae2ec275332a" name="macs" description="Model-based Analysis of ChIP-Seq">
+ <tool id="peakcalling_macs" version="1.0.1" file="macs_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="8cd5945559b8" name="poisson2test" description="Poisson two-sample test">
+ <tool id="poisson2test" version="1.0.0" file="poisson2test.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="82a8234e03f2" name="sicer" description="Statistical approach for the Identification of ChIP-Enriched Regions">
+ <tool id="peakcalling_sicer" version="0.0.1" file="sicer_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="9e2b9ca7f33a" name="t2ps" description="Draw phylogeny">
+ <tool id="Draw_phylogram" version="1.0.0" file="t2ps_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="592acb9505fc" name="t2t_report" description="Summarize taxonomy">
+ <tool id="t2t_report" version="1.0.0" file="t2t_report.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="b334cd1095ea" name="tabular_to_fastq" description="Tabular to FASTQ converter">
+ <tool id="tabular_to_fastq" version="1.0.0" file="tabular_to_fastq.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="1030acbecce6" name="tophat" description="Find splice junctions using RNA-seq data">
+ <tool id="tophat" version="1.5.0" file="tophat_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="ffa30bedbee3" name="tophat2" description="Gapped-read mapper for RNA-seq data">
+ <tool id="tophat2" version="0.6" file="tophat2_wrapper.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="b001b50f2009" name="vcf_annotate" description="Annotate a VCF file (dbSNP, hapmap)">
+ <tool id="vcf_annotate" version="1.0.0" file="annotate.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="76ad0b7865b9" name="vcf_extract" description="Extract reads from a specified region">
+ <tool id="vcf_extract" version="1.0.0" file="extract.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="da1a6f33b504" name="vcf_filter" description="Filter a VCF file">
+ <tool id="vcf_filter" version="1.0.0" file="filter.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="9d162bde4113" name="vcf_intersect" description="Generate the intersection of two VCF files">
+ <tool id="vcf_intersect" version="1.0.0" file="vcf_tools/intersect.xml" />
+ </repository>
+ <repository owner="devteam" changeset_revision="66253fc0a69b" name="weblogo3" description="Sequence Logo generator for fasta (eg Clustal alignments)">
+ <tool id="rgweblogo3" version="0.4" file="rgWebLogo3.xml" />
+ </repository>
+</toolshed>
\ No newline at end of file
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/8207135c2509/
Changeset: 8207135c2509
User: inithello
Date: 2014-01-27 15:53:51
Summary: Remove migrated tools from tool_conf.xml.main
Affected #: 1 file
diff -r e9fc9e7adc63554b6f8a5604bce8be6b37d35d55 -r 8207135c2509db9d875becf0fded05212b3d87e0 tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -55,7 +55,6 @@
<section id="convert" name="Convert Formats"><tool file="filters/bed2gff.xml" /><tool file="fasta_tools/fasta_to_tabular.xml" />
- <tool file="fastq/fastq_to_fasta.xml" /><tool file="filters/gff2bed.xml" /><tool file="maf/maf_to_bed.xml" /><tool file="maf/maf_to_interval.xml" />
@@ -182,14 +181,6 @@
<section id="clustal" name="Multiple Alignments"><tool file="rgenetics/rgClustalw.xml" /></section>
- <section id="tax_manipulation" name="Metagenomic analyses">
- <tool file="taxonomy/gi2taxonomy.xml" />
- <tool file="taxonomy/t2t_report.xml" />
- <tool file="taxonomy/t2ps_wrapper.xml" />
- <tool file="taxonomy/find_diag_hits.xml" />
- <tool file="taxonomy/lca.xml" />
- <tool file="taxonomy/poisson2test.xml" />
- </section><section id="hgv" name="Phenotype Association"><tool file="evolution/codingSnps.xml" /><tool file="evolution/add_scores.xml" />
@@ -202,41 +193,26 @@
<tool file="phenotype_association/gpass.xml" /><tool file="phenotype_association/beam.xml" /><tool file="phenotype_association/lps.xml" />
- <tool file="phenotype_association/freebayes.xml" /><tool file="phenotype_association/master2pg.xml" /><tool file="phenotype_association/vcf2pgSnp.xml" /></section><label id="ngs" text="NGS Toolbox Beta" /><section id="cshl_library_information" name="NGS: QC and manipulation"><label id="illumina" text="Illumina data" />
- <tool file="fastq/fastq_groomer.xml" />
- <tool file="fastq/fastq_paired_end_splitter.xml" />
- <tool file="fastq/fastq_paired_end_joiner.xml" />
- <tool file="fastq/fastq_stats.xml" /><label id="454" text="Roche-454 data" /><tool file="metag_tools/short_reads_figure_score.xml" /><tool file="metag_tools/short_reads_trim_seq.xml" />
- <tool file="fastq/fastq_combiner.xml" /><label id="solid" text="AB-SOLiD data" /><tool file="next_gen_conversion/solid2fastq.xml" /><tool file="solid_tools/solid_qual_stats.xml" /><tool file="solid_tools/solid_qual_boxplot.xml" /><label id="generic_fastq" text="Generic FASTQ manipulation" />
- <tool file="fastq/fastq_filter.xml" />
- <tool file="fastq/fastq_trimmer.xml" />
- <tool file="fastq/fastq_trimmer_by_quality.xml" />
- <tool file="fastq/fastq_masker_by_quality.xml" />
- <tool file="fastq/fastq_manipulation.xml" />
- <tool file="fastq/fastq_to_fasta.xml" />
- <tool file="fastq/fastq_to_tabular.xml" />
- <tool file="fastq/tabular_to_fastq.xml" /><label id="fastx_toolkit_fastq" text="FASTX-Toolkit for FASTQ data" /><label id="fastq_qc" text="FASTQ QC" /><tool file="rgenetics/rgFastQC.xml" /></section><section id="ngs_mapping" name="NGS: Mapping"><label id="illumina" text="Illumina" />
- <tool file="sr_mapping/bowtie2_wrapper.xml" /><label id="roche_454" text="Roche-454" /><tool file="metag_tools/megablast_wrapper.xml" /><tool file="metag_tools/megablast_xml_parser.xml" />
@@ -283,10 +259,6 @@
</section><section id="ngs-rna-tools" name="NGS: RNA Analysis"><label id="rna_seq" text="RNA-seq" />
- <tool file="ngs_rna/tophat_wrapper.xml" />
- <tool file="ngs_rna/tophat2_wrapper.xml" />
- <tool file="ngs_rna/express_wrapper.xml" /><label id="filtering" text="Filtering" />
- <tool file="ngs_rna/filter_transcripts_via_tracking.xml" /></section></toolbox>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for aggregate_scores_in_intervals.py broken in 95bf71620d50c6d81248eef2001a7dc156ae1088.
by commits-noreply@bitbucket.org 27 Jan '14
by commits-noreply@bitbucket.org 27 Jan '14
27 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/04bce95d8fd2/
Changeset: 04bce95d8fd2
User: dan
Date: 2014-01-27 15:54:34
Summary: Fix for aggregate_scores_in_intervals.py broken in 95bf71620d50c6d81248eef2001a7dc156ae1088.
Affected #: 1 file
diff -r bbd852d8ed7c125844aac9f38a85d5246bbd845b -r 04bce95d8fd21ae597ba839936dd644870450051 tools/stats/aggregate_scores_in_intervals.py
--- a/tools/stats/aggregate_scores_in_intervals.py
+++ b/tools/stats/aggregate_scores_in_intervals.py
@@ -201,7 +201,7 @@
continue
# Get the score, only count if not 'nan'
score = scores_by_chrom[chrom][j]
- if not isNaN( score ):
+ if not isnan( score ):
total += score
count += 1
max_score = max( score, max_score )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ec858cad8c51/
Changeset: ec858cad8c51
User: jmchilton
Date: 2014-01-27 05:17:01
Summary: PEP-8 fixes for workflow controllers.
Affected #: 2 files
diff -r a7ac443f9c9b7b5383284149db4bcc6a99e4b124 -r ec858cad8c51d5190237eaacb31c51841e2e2843 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -18,7 +18,9 @@
log = logging.getLogger(__name__)
+
class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin):
+
@web.expose_api
def index(self, trans, **kwd):
"""
@@ -34,10 +36,10 @@
filter1 = ( trans.app.model.StoredWorkflow.user == trans.user )
if show_published:
filter1 = or_( filter1, ( trans.app.model.StoredWorkflow.published == True ) )
- for wf in trans.sa_session.query(trans.app.model.StoredWorkflow).filter(
+ for wf in trans.sa_session.query( trans.app.model.StoredWorkflow ).filter(
filter1, trans.app.model.StoredWorkflow.table.c.deleted == False ).order_by(
- desc(trans.app.model.StoredWorkflow.table.c.update_time)).all():
- item = wf.to_dict(value_mapper={'id':trans.security.encode_id})
+ desc( trans.app.model.StoredWorkflow.table.c.update_time ) ).all():
+ item = wf.to_dict( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id(wf.id)
item['url'] = url_for('workflow', id=encoded_id)
rval.append(item)
@@ -45,9 +47,9 @@
user=trans.user ).join( 'stored_workflow' ).filter(
trans.app.model.StoredWorkflow.deleted == False ).order_by(
desc( trans.app.model.StoredWorkflow.update_time ) ).all():
- item = wf_sa.stored_workflow.to_dict(value_mapper={'id':trans.security.encode_id})
+ item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id })
encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
- item['url'] = url_for('workflow', id=encoded_id)
+ item['url'] = url_for( 'workflow', id=encoded_id )
rval.append(item)
return rval
@@ -73,16 +75,16 @@
except:
trans.response.status = 400
return "That workflow does not exist."
- item = stored_workflow.to_dict(view='element', value_mapper={'id':trans.security.encode_id})
+ item = stored_workflow.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id } )
item['url'] = url_for('workflow', id=workflow_id)
latest_workflow = stored_workflow.latest_workflow
inputs = {}
for step in latest_workflow.steps:
if step.type == 'data_input':
if step.tool_inputs and "name" in step.tool_inputs:
- inputs[step.id] = {'label':step.tool_inputs['name'], 'value':""}
+ inputs[step.id] = {'label': step.tool_inputs['name'], 'value': ""}
else:
- inputs[step.id] = {'label':"Input Dataset", 'value':""}
+ inputs[step.id] = {'label': "Input Dataset", 'value': ""}
else:
pass
# Eventually, allow regular tool parameters to be inserted and modified at runtime.
@@ -220,12 +222,12 @@
if 'param' in param_dict and 'value' in param_dict:
param_dict[ param_dict['param'] ] = param_dict['value']
- # Update step if there's no step id (i.e. all steps with tool are
+ # Update step if there's no step id (i.e. all steps with tool are
# updated) or update if step ids match.
if not step_id or ( step_id and int( step_id ) == step.id ):
for name, value in param_dict.items():
step.state.inputs[ name ] = value
-
+
if step.tool_errors:
trans.response.status = 400
return "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
@@ -251,6 +253,7 @@
job = None
if step.type == 'tool' or step.type is None:
tool = self.app.toolbox.get_tool( step.tool_id )
+
def callback( input, value, prefixed_name, prefixed_label ):
if isinstance( input, DataToolParameter ):
if prefixed_name in step.input_connections_by_name:
@@ -347,6 +350,7 @@
"""
data = payload['workflow']
+
workflow, missing_tool_tups = self._workflow_from_dict( trans, data, source="API" )
# galaxy workflow newly created id
@@ -357,7 +361,7 @@
# return list
rval = []
- item = workflow.to_dict(value_mapper={'id':trans.security.encode_id})
+ item = workflow.to_dict(value_mapper={'id': trans.security.encode_id})
item['url'] = url_for('workflow', id=encoded_id)
rval.append(item)
@@ -388,7 +392,7 @@
elif stored_workflow.deleted:
raise exceptions.MessageException( "You can't import this workflow because it has been deleted." )
imported_workflow = self._import_shared_workflow( trans, stored_workflow )
- item = imported_workflow.to_dict(value_mapper={'id':trans.security.encode_id})
+ item = imported_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
encoded_id = trans.security.encode_id(imported_workflow.id)
item['url'] = url_for('workflow', id=encoded_id)
return item
diff -r a7ac443f9c9b7b5383284149db4bcc6a99e4b124 -r ec858cad8c51d5190237eaacb31c51841e2e2843 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -68,6 +68,7 @@
grids.GridOperation( "Sharing", condition=( lambda item: not item.deleted ), async_compatible=False ),
grids.GridOperation( "Delete", condition=( lambda item: item.deleted ), async_compatible=True ),
]
+
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter_by( user=trans.user, deleted=False )
@@ -109,7 +110,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
# A public workflow is published, has a slug, and is not deleted.
- return query.filter( self.model_class.published==True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
+ return query.filter( self.model_class.published == True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
# Simple SGML parser to get all content in a single tag.
@@ -183,8 +184,8 @@
trans.sa_session.flush()
return trans.fill_template( "workflow/list.mako",
- workflows = workflows,
- shared_by_others = shared_by_others )
+ workflows=workflows,
+ shared_by_others=shared_by_others )
@web.expose
@web.require_login( "use Galaxy workflows" )
@@ -205,8 +206,8 @@
.order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
.all()
return trans.fill_template( "workflow/list_for_run.mako",
- workflows = workflows,
- shared_by_others = shared_by_others )
+ workflows=workflows,
+ shared_by_others=shared_by_others )
@web.expose
def list_published( self, trans, **kwargs ):
@@ -237,7 +238,7 @@
def display(self, trans, stored_workflow):
""" Base workflow display """
if stored_workflow is None:
- raise web.httpexceptions.HTTPNotFound()
+ raise web.httpexceptions.HTTPNotFound()
# Security check raises error if user cannot access workflow.
self.security_check( trans, stored_workflow, False, True )
# Get data for workflow's steps.
@@ -256,7 +257,7 @@
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, stored_workflow )
return trans.fill_template_mako( "workflow/display.mako", item=stored_workflow, item_data=stored_workflow.latest_workflow.steps,
- user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
+ user_item_rating=user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
@web.expose
def get_item_content_async( self, trans, id ):
@@ -272,7 +273,7 @@
stored.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, stored )
for step in stored.latest_workflow.steps:
step.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, step )
- return trans.stream_template_mako( "/workflow/item_content.mako", item = stored, item_data = stored.latest_workflow.steps )
+ return trans.stream_template_mako( "/workflow/item_content.mako", item=stored, item_data=stored.latest_workflow.steps )
@web.expose
@web.require_login( "use Galaxy workflows" )
@@ -282,8 +283,8 @@
stored = self.get_stored_workflow( trans, id )
if email:
other = trans.sa_session.query( model.User ) \
- .filter( and_( model.User.table.c.email==email,
- model.User.table.c.deleted==False ) ) \
+ .filter( and_( model.User.table.c.email == email,
+ model.User.table.c.deleted == False ) ) \
.first()
if not other:
mtype = "error"
@@ -306,8 +307,8 @@
trans.set_message( "Workflow '%s' shared with user '%s'" % ( stored.name, other.email ) )
return trans.response.send_redirect( url_for( controller='workflow', action='sharing', id=id ) )
return trans.fill_template( "/ind_share_base.mako",
- message = msg,
- messagetype = mtype,
+ message=msg,
+ messagetype=mtype,
item=stored,
email=email,
use_panels=use_panels )
@@ -414,7 +415,7 @@
stored.latest_workflow.name = san_new_name
trans.sa_session.flush()
# For current workflows grid:
- trans.set_message ( "Workflow renamed to '%s'." % new_name )
+ trans.set_message( "Workflow renamed to '%s'." % new_name )
return self.list( trans )
# For new workflows grid:
#message = "Workflow renamed to '%s'." % new_name
@@ -468,7 +469,7 @@
stored = self.get_stored_workflow( trans, id )
# Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
- importable = accessible in ['True', 'true', 't', 'T'];
+ importable = accessible in ['True', 'true', 't', 'T']
if stored and stored.importable != importable:
if importable:
self._make_item_accessible( trans.sa_session, stored )
@@ -527,7 +528,7 @@
annotation_obj = self.get_item_annotation_obj( trans.sa_session, stored.user, stored )
if annotation_obj:
self.add_item_annotation( trans.sa_session, trans.get_user(), new_stored, annotation_obj.annotation )
- new_stored.copy_tags_from(trans.user,stored)
+ new_stored.copy_tags_from(trans.user, stored)
if not owner:
new_stored.name += " shared by '%s'" % stored.user.email
new_stored.user = user
@@ -617,7 +618,7 @@
} )
module.update_state( incoming )
- if type=='tool':
+ if type == 'tool':
return {
'tool_state': module.get_state(),
'data_inputs': module.get_data_inputs(),
@@ -702,10 +703,10 @@
'data_inputs': [],
'data_outputs': [],
'form_html': invalid_tool_form_html,
- 'annotation' : annotation_str,
- 'input_connections' : {},
- 'post_job_actions' : {},
- 'workflow_outputs' : []
+ 'annotation': annotation_str,
+ 'input_connections': {},
+ 'post_job_actions': {},
+ 'workflow_outputs': []
}
# Position
step_dict['position'] = step.position
@@ -741,10 +742,11 @@
}
# Connections
input_connections = step.input_connections
- multiple_input = {} # Boolean value indicating if this can be mutliple
+ multiple_input = {} # Boolean value indicating if this can be mutliple
if step.type is None or step.type == 'tool':
# Determine full (prefixed) names of valid input datasets
data_input_names = {}
+
def callback( input, value, prefixed_name, prefixed_label ):
if isinstance( input, DataToolParameter ):
data_input_names[ prefixed_name ] = True
@@ -756,9 +758,11 @@
# post_job_actions
pja_dict = {}
for pja in step.post_job_actions:
- pja_dict[pja.action_type+pja.output_name] = dict(action_type = pja.action_type,
- output_name = pja.output_name,
- action_arguments = pja.action_arguments)
+ pja_dict[pja.action_type + pja.output_name] = dict(
+ action_type=pja.action_type,
+ output_name=pja.output_name,
+ action_arguments=pja.action_arguments
+ )
step_dict['post_job_actions'] = pja_dict
#workflow outputs
outputs = []
@@ -823,9 +827,9 @@
step.position = step_dict['position']
module = module_factory.from_dict( trans, step_dict )
module.save_to_step( step )
- if step_dict.has_key('workflow_outputs'):
+ if 'workflow_outputs' in step_dict:
for output_name in step_dict['workflow_outputs']:
- m = model.WorkflowOutput(workflow_step = step, output_name = output_name)
+ m = model.WorkflowOutput(workflow_step=step, output_name=output_name)
trans.sa_session.add(m)
if step.tool_errors:
# DBTODO Check for conditional inputs here.
@@ -842,7 +846,7 @@
# Input connections
for input_name, conns in step.temp_input_connections.iteritems():
if conns:
- conn_dicts = conns if isinstance(conns,list) else [conns]
+ conn_dicts = conns if isinstance(conns, list) else [ conns ]
for conn_dict in conn_dicts:
conn = model.WorkflowStepConnection()
conn.input_step = step
@@ -1049,7 +1053,7 @@
# This may not be ideal...
workflow_data = uploaded_file.read()
else:
- message= 'You attempted to upload an empty file.'
+ message = 'You attempted to upload an empty file.'
status = 'error'
if workflow_data:
# Convert incoming workflow data from json
@@ -1154,13 +1158,14 @@
ext_to_class_name[k] = c.__module__ + "." + c.__name__
classes.append( c )
class_to_classes = dict()
+
def visit_bases( types, cls ):
for base in cls.__bases__:
if issubclass( base, Data ):
types.add( base.__module__ + "." + base.__name__ )
visit_bases( types, base )
for c in classes:
- n = c.__module__ + "." + c.__name__
+ n = c.__module__ + "." + c.__name__
types = set( [ n ] )
visit_bases( types, c )
class_to_classes[ n ] = dict( ( t, True ) for t in types )
@@ -1179,7 +1184,8 @@
"workflow/build_from_current_history.mako",
jobs=jobs,
warnings=warnings,
- history=history )
+ history=history
+ )
else:
# Ensure job_ids and dataset_ids are lists (possibly empty)
if job_ids is None:
@@ -1212,7 +1218,7 @@
assert job_id in jobs_by_id, "Attempt to create workflow with job not connected to current history"
job = jobs_by_id[ job_id ]
tool = trans.app.toolbox.get_tool( job.tool_id )
- param_values = job.get_param_values( trans.app, ignore_errors=True ) #If a tool was updated and e.g. had a text value changed to an integer, we don't want a traceback here
+ param_values = job.get_param_values( trans.app, ignore_errors=True ) # If a tool was updated and e.g. had a text value changed to an integer, we don't want a traceback here
associations = cleanup_param_values( tool.inputs, param_values )
step = model.WorkflowStep()
step.type = 'tool'
@@ -1247,8 +1253,8 @@
for i, steps_at_level in enumerate( levorder ):
for j, index in enumerate( steps_at_level ):
step = steps[ index ]
- step.position = dict( top = ( base_pos + 120 * j ),
- left = ( base_pos + 220 * i ) )
+ step.position = dict( top=( base_pos + 120 * j ),
+ left=( base_pos + 220 * i ) )
# Store it
stored = model.StoredWorkflow()
stored.user = user
@@ -1288,7 +1294,7 @@
# has_errors = False
saved_history = None
if history_id is not None:
- saved_history = trans.get_history();
+ saved_history = trans.get_history()
try:
decoded_history_id = trans.security.decode_id( history_id )
history = trans.sa_session.query(trans.app.model.History).get(decoded_history_id)
@@ -1300,12 +1306,12 @@
error("Malformed history id ( %s ) specified, unable to decode." % str( history_id ))
except:
error("That history does not exist.")
- try: # use a try/finally block to restore the user's current history
+ try: # use a try/finally block to restore the user's current history
if kwargs:
# If kwargs were provided, the states for each step should have
# been POSTed
# List to gather values for the template
- invocations=[]
+ invocations = []
for (kwargs, multi_input_keys) in _expand_multiple_inputs(kwargs, mode=multiple_input_mode):
for step in workflow.steps:
step.upgrade_messages = {}
@@ -1372,6 +1378,7 @@
job = None
if step.type == 'tool' or step.type is None:
tool = trans.app.toolbox.get_tool( step.tool_id )
+
# Connect up
def callback( input, value, prefixed_name, prefixed_label ):
replacement = None
@@ -1465,7 +1472,7 @@
steps=[],
workflow=stored,
hide_fixed_params=hide_fixed_params,
- missing_tools = missing_tools)
+ missing_tools=missing_tools)
# Render the form
stored.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, stored )
return trans.fill_template(
@@ -1525,7 +1532,7 @@
else:
outputs.remove(existing_output.output_name)
for outputname in outputs:
- m = model.WorkflowOutput(workflow_step_id = int(step.id), output_name = outputname)
+ m = model.WorkflowOutput(workflow_step_id=int(step.id), output_name=outputname)
trans.sa_session.add(m)
# Prepare each step
trans.sa_session.flush()
@@ -1562,7 +1569,8 @@
workflow=stored,
has_upgrade_messages=has_upgrade_messages,
errors=errors,
- incoming=kwargs )
+ incoming=kwargs
+ )
@web.expose
def configure_menu( self, trans, workflow_ids=None ):
@@ -1619,8 +1627,8 @@
in_pos = {}
out_pos = {}
margin = 5
- line_px = 16 # how much spacing between input/outputs
- widths = {} # store px width for boxes of each step
+ line_px = 16 # how much spacing between input/outputs
+ widths = {} # store px width for boxes of each step
max_width, max_x, max_y = 0, 0, 0
for step in workflow.steps:
@@ -1651,7 +1659,7 @@
y += 45
for di in module.get_data_inputs():
- cur_y = y+count*line_px
+ cur_y = y + count * line_px
if step.order_index not in in_pos:
in_pos[step.order_index] = {}
in_pos[step.order_index][di['name']] = (x, cur_y)
@@ -1659,12 +1667,11 @@
count += 1
max_len = max(max_len, len(di['label']))
-
if len(module.get_data_inputs()) > 0:
y += 15
for do in module.get_data_outputs():
- cur_y = y+count*line_px
+ cur_y = y + count * line_px
if step.order_index not in out_pos:
out_pos[step.order_index] = {}
out_pos[step.order_index][do['name']] = (x, cur_y)
@@ -1672,7 +1679,7 @@
count += 1
max_len = max(max_len, len(do['name']))
- widths[step.order_index] = max_len*5.5
+ widths[step.order_index] = max_len * 5.5
max_x = max(max_x, step.position['left'])
max_y = max(max_y, step.position['top'])
max_width = max(max_width, widths[step.order_index])
@@ -1680,7 +1687,7 @@
for step_dict in data:
width = widths[step_dict['id']]
x, y = step_dict['position']['left'], step_dict['position']['top']
- boxes.append( svgfig.Rect(x-margin, y, x+width-margin, y+30, fill="#EBD9B2").SVG() )
+ boxes.append( svgfig.Rect(x - margin, y, x + width - margin, y + 30, fill="#EBD9B2").SVG() )
box_height = (len(step_dict['data_inputs']) + len(step_dict['data_outputs'])) * line_px + margin
# Draw separator line
@@ -1709,8 +1716,8 @@
return canvas
+
## ---- Utility methods -------------------------------------------------------
-
def attach_ordered_steps( workflow, steps ):
ordered_steps = order_workflow_steps( steps )
if ordered_steps:
@@ -1722,6 +1729,7 @@
workflow.has_cycles = True
workflow.steps = steps
+
def edgelist_for_workflow_steps( steps ):
"""
Create a list of tuples representing edges between ``WorkflowSteps`` based
@@ -1735,6 +1743,7 @@
edges.append( ( steps_to_index[conn.output_step], steps_to_index[conn.input_step] ) )
return edges
+
def order_workflow_steps( steps ):
"""
Perform topological sort of the steps, return ordered or None
@@ -1744,7 +1753,7 @@
if not step.position or not 'left' in step.position or not 'top' in step.position:
position_data_available = False
if position_data_available:
- steps.sort(cmp=lambda s1,s2: cmp( math.sqrt(s1.position['left']**2 + s1.position['top']**2), math.sqrt(s2.position['left']**2 + s2.position['top']**2)))
+ steps.sort(cmp=lambda s1, s2: cmp( math.sqrt(s1.position['left'] ** 2 + s1.position['top'] ** 2), math.sqrt(s2.position['left'] ** 2 + s2.position['top'] ** 2)))
try:
edges = edgelist_for_workflow_steps( steps )
node_order = topsort( edges )
@@ -1752,6 +1761,7 @@
except CycleError:
return None
+
def order_workflow_steps_with_levels( steps ):
try:
return topsort_levels( edgelist_for_workflow_steps( steps ) )
@@ -1768,6 +1778,7 @@
self.is_fake = True
self.id = "fake_%s" % dataset.id
+
def get_job_dict( trans ):
"""
Return a dictionary of Job -> [ Dataset ] mappings, for all finished
@@ -1799,6 +1810,7 @@
jobs[ job ] = [ ( assoc.name, dataset ) ]
return jobs, warnings
+
def cleanup_param_values( inputs, values ):
"""
Remove 'Data' values from `param_values`, along with metadata cruft,
@@ -1809,6 +1821,7 @@
if 'dbkey' in values:
del values['dbkey']
root_values = values
+
# Recursively clean data inputs and dynamic selects
def cleanup( prefix, inputs, values ):
for key, input in inputs.items():
@@ -1821,7 +1834,7 @@
# HACK: Nested associations are not yet working, but we
# still need to clean them up so we can serialize
# if not( prefix ):
- if tmp: #this is false for a non-set optional dataset
+ if tmp: # this is false for a non-set optional dataset
if not isinstance(tmp, list):
associations.append( ( tmp.hid, prefix + key ) )
else:
@@ -1846,6 +1859,7 @@
cleanup( "", inputs, values )
return associations
+
def _build_workflow_on_str(instance_ds_names):
# Returns suffix for new histories based on multi input iteration
num_multi_inputs = len(instance_ds_names)
https://bitbucket.org/galaxy/galaxy-central/commits/d283386f6032/
Changeset: d283386f6032
User: jmchilton
Date: 2014-01-27 05:17:01
Summary: API testing helpers to run methods as alternative users.
Previous run_as method has some potential limitations, seems testing permission/security things is more correct if actually using normal alternative user key.
Affected #: 1 file
diff -r ec858cad8c51d5190237eaacb31c51841e2e2843 -r d283386f60325faafa4b8cb68593c59d32e768bf test/base/api.py
--- a/test/base/api.py
+++ b/test/base/api.py
@@ -1,3 +1,5 @@
+from contextlib import contextmanager
+
# TODO: We don't need all of TwillTestCase, strip down to a common super class
# shared by API and Twill test cases.
from .twilltestcase import TwillTestCase
@@ -11,6 +13,7 @@
TEST_USER = "user(a)bx.psu.edu"
+DEFAULT_OTHER_USER = "otheruser(a)bx.psu.edu" # A second user for API testing.
# TODO: Allow these to point at existing Galaxy instances.
@@ -39,6 +42,30 @@
user = [ user for user in users if user["email"] == email ][0]
return user
+ def _setup_user_get_key( self, email ):
+ self.galaxy_interactor.ensure_user_with_email( email )
+ users = self._get( "users", admin=True ).json()
+ user = [ user for user in users if user["email"] == email ][0]
+ return self._post( "users/%s/api_key" % user[ "id" ], admin=True ).json()
+
+ @contextmanager
+ def _different_user( self, email=DEFAULT_OTHER_USER ):
+ """ Use in test cases to switch get/post operations to act as new user,
+
+ with self._different_user( "other_user(a)bx.psu.edu" ):
+ self._get( "histories" ) # Gets other_user(a)bx.psu.edu histories.
+ """
+ original_api_key = self.user_api_key
+ original_interactor_key = self.galaxy_interactor.api_key
+ new_key = self._setup_user_get_key( email )
+ try:
+ self.user_api_key = new_key
+ self.galaxy_interactor.api_key = new_key
+ yield
+ finally:
+ self.user_api_key = original_api_key
+ self.galaxy_interactor.api_key = original_interactor_key
+
def _get( self, *args, **kwds ):
return self.galaxy_interactor.get( *args, **kwds )
@@ -51,7 +78,7 @@
try:
body = response.json()
except Exception:
- body = "INVALID JSON RESPONSE"
+ body = "INVALID JSON RESPONSE <%s>" % response.content
assertion_message_template = "Request status code (%d) was not expected value %d. Body was %s"
assertion_message = assertion_message_template % ( response_status_code, expected_status_code, body )
raise AssertionError( assertion_message )
https://bitbucket.org/galaxy/galaxy-central/commits/bbd852d8ed7c/
Changeset: bbd852d8ed7c
User: jmchilton
Date: 2014-01-27 05:17:01
Summary: Workflow API tests for deletion functionality.
Various refactoring to reduce code duplication (in this and subsequent changesets).
Affected #: 1 file
diff -r d283386f60325faafa4b8cb68593c59d32e768bf -r bbd852d8ed7c125844aac9f38a85d5246bbd845b test/functional/api/test_workflows.py
--- a/test/functional/api/test_workflows.py
+++ b/test/functional/api/test_workflows.py
@@ -5,6 +5,8 @@
import time
from .helpers import TestsDatasets
+from base.interactor import delete_request # requests like delete
+
workflow_str = resource_string( __name__, "test_workflow_1.ga" )
@@ -15,6 +17,23 @@
# - Much more testing obviously, always more testing.
class WorkflowsApiTestCase( api.ApiTestCase, TestsDatasets ):
+ def test_delete( self ):
+ workflow_id = self._simple_workflow( "test_delete" )
+ workflow_name = "test_delete (imported from API)"
+ self._assert_user_has_workflow_with_name( workflow_name )
+ workflow_url = self._api_url( "workflows/%s" % workflow_id, use_key=True )
+ delete_response = delete_request( workflow_url )
+ self._assert_status_code_is( delete_response, 200 )
+ # Make sure workflow is no longer in index by default.
+ assert workflow_name not in self.__workflow_names()
+
+ def test_other_cannot_delete( self ):
+ workflow_id = self._simple_workflow( "test_other_delete" )
+ with self._different_user():
+ workflow_url = self._api_url( "workflows/%s" % workflow_id, use_key=True )
+ delete_response = delete_request( workflow_url )
+ self._assert_status_code_is( delete_response, 403 )
+
def test_index( self ):
index_response = self._get( "workflows" )
self._assert_status_code_is( index_response, 200 )
@@ -29,7 +48,7 @@
self._assert_user_has_workflow_with_name( "test_import (imported from API)" )
def test_export( self ):
- uploaded_workflow_id = self._create_workflow( self._load_workflow( name="test_for_export" ) )
+ uploaded_workflow_id = self._simple_workflow( "test_for_export" )
download_response = self._get( "workflows/%s/download" % uploaded_workflow_id )
self._assert_status_code_is( download_response, 200 )
downloaded_workflow = download_response.json()
@@ -100,9 +119,14 @@
def _ds_entry( self, hda ):
return dict( src="hda", id=hda[ "id" ] )
- def _create_workflow( self, workflow ):
+ def _simple_workflow( self, name, **create_kwds ):
+ workflow = self._load_workflow( name )
+ return self._create_workflow( workflow, **create_kwds )
+
+ def _create_workflow( self, workflow, **create_kwds ):
data = dict(
workflow=dumps( workflow ),
+ **create_kwds
)
upload_response = self._post( "workflows/upload", data=data )
self._assert_status_code_is( upload_response, 200 )
@@ -110,10 +134,14 @@
return uploaded_workflow_id
def _assert_user_has_workflow_with_name( self, name ):
+ names = self.__workflow_names()
+ assert name in names, "No workflows with name %s in users workflows <%s>" % ( name, names )
+
+ def __workflow_names( self ):
index_response = self._get( "workflows" )
self._assert_status_code_is( index_response, 200 )
names = map( lambda w: w[ "name" ], index_response.json() )
- assert name in names, "No workflows with name %s in users workflows <%s>" % ( name, names )
+ return names
def _load_workflow( self, name, add_pja=False ):
workflow = loads( workflow_str )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/03f7d79212ec/
Changeset: 03f7d79212ec
Branch: stable
User: jmchilton
Date: 2014-01-27 03:47:59
Summary: Bugfix: Disallow shared workflows from being inappropriately deletable...
... by user workflow is shared with via workflow API.
Affected #: 1 file
diff -r 87b586afb05428a3b0ae37d68c79241994a27021 -r 03f7d79212ecf626af4940296e663c376b9cd2ee lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -296,9 +296,8 @@
# check to see if user has permissions to selected workflow
if stored_workflow.user != trans.user and not trans.user_is_admin():
- if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
- trans.response.status = 403
- return("Workflow is not owned by or shared with current user")
+ trans.response.status = 403
+ return("Workflow is not owned by current user")
#Mark a workflow as deleted
stored_workflow.deleted = True
https://bitbucket.org/galaxy/galaxy-central/commits/a7ac443f9c9b/
Changeset: a7ac443f9c9b
User: jmchilton
Date: 2014-01-27 03:48:23
Summary: Merge latest stable.
Affected #: 1 file
diff -r e410940a015c5245f2d065b8f7a9eda8da56d966 -r a7ac443f9c9b7b5383284149db4bcc6a99e4b124 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -325,9 +325,8 @@
# check to see if user has permissions to selected workflow
if stored_workflow.user != trans.user and not trans.user_is_admin():
- if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
- trans.response.status = 403
- return("Workflow is not owned by or shared with current user")
+ trans.response.status = 403
+ return("Workflow is not owned by current user")
#Mark a workflow as deleted
stored_workflow.deleted = True
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Timing and statistics improvements in the tool shed's install and test framework.
by commits-noreply@bitbucket.org 26 Jan '14
by commits-noreply@bitbucket.org 26 Jan '14
26 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e410940a015c/
Changeset: e410940a015c
User: greg
Date: 2014-01-26 23:41:11
Summary: Timing and statistics improvements in the tool shed's install and test framework.
Affected #: 1 file
diff -r d658d4ec711d0c778b9ce07d700c3d3fee9aeca0 -r e410940a015c5245f2d065b8f7a9eda8da56d966 test/install_and_test_tool_shed_repositories/base/util.py
--- a/test/install_and_test_tool_shed_repositories/base/util.py
+++ b/test/install_and_test_tool_shed_repositories/base/util.py
@@ -878,24 +878,25 @@
else:
# The assumption is that the Tool SHed's install and test framework is executed no more than once per 24 hour
# period, so check the required repository's time_last_tested value to see if its tool_test_results column
- # has been updated within the past 24 hours. The RepositoryMetadata class's to_dict() method returns the value
- # of time_last_tested in datetime.isoformat().
+ # has been updated within the past 20 hours to allow for differing test run times (some may be slower than
+ # others). The RepositoryMetadata class's to_dict() method returns the value of time_last_tested in
+ # datetime.isoformat().
time_last_tested, error_message = get_time_last_tested( galaxy_tool_shed_url, required_repository_metadata_id )
print 'Value of time_last_tested: %s' % str( time_last_tested )
if time_last_tested is None:
print 'The time_last_tested column value is None for version %s of repository dependency %s owned by %s.' % \
( changeset_revision, name, owner )
else:
- twenty_four_hours_ago = ( datetime.utcnow() - timedelta( hours=24 ) ).isoformat()
- print 'Value of twenty_four_hours_ago: %s' % str( twenty_four_hours_ago )
+ twenty_hours_ago = ( datetime.utcnow() - timedelta( hours=20 ) ).isoformat()
+ print 'Value of twenty_hours_ago: %s' % str( twenty_hours_ago )
# This is counter intuitive because the following check is on strings like this: '2014-01-21T19:46:06.953741',
- # so if "time_last_tested > twenty_four_hours_ago" is True, then it implies that the time_last_tested column
- # was actually updated less than 24 hours ago, and should not be updated again because we're likely processing
+ # so if "time_last_tested > twenty_hours_ago" is True, then it implies that the time_last_tested column
+ # was actually updated less than 20 hours ago, and should not be updated again because we're likely processing
# another dependent repository, many of which can have the same repository dependency.
try:
# Be very conservative here. Our default behavior will be to assume containers have not been populated
# during the current test run.
- already_populated = time_last_tested > twenty_four_hours_ago
+ already_populated = time_last_tested > twenty_hours_ago
except Exception, e:
log.exception( 'Error attempting to set already_populated: %s' % str( e ) )
already_populated = False
@@ -903,10 +904,10 @@
if already_populated:
print 'The install containers for version %s of repository dependency %s owned by %s have been ' % \
( changeset_revision, name, owner )
- print 'populated within the past 24 hours (likely in this test run), so skipping this check.'
+ print 'populated within the past 20 hours (likely in this test run), so skipping this check.'
continue
else:
- print 'Version %s of repository dependency %s owned by %s was last tested more than 24 hours ago.' % \
+ print 'Version %s of repository dependency %s owned by %s was last tested more than 20 hours ago.' % \
( changeset_revision, name, owner )
# Inspect the tool_test_results_dict for the last test run to see if it has not yet been populated.
if len( tool_test_results_dicts ) == 0:
@@ -996,27 +997,27 @@
print "# Repository revisions processed: %s" % str( total_repositories_processed )
if successful_repository_installations:
print "# ----------------------------------------------------------------------------------"
- print "# The following revisions were successfully installed:"
+ print "# The following %d revisions were successfully installed:" % len( successful_repository_installations )
display_repositories_by_owner( successful_repository_installations )
if repositories_with_installation_error:
print "# ----------------------------------------------------------------------------------"
- print "# The following revisions have installation errors:"
+ print "# The following %d revisions have installation errors:" % len( repositories_with_installation_error )
display_repositories_by_owner( repositories_with_installation_error )
if successful_tool_dependency_installations:
print "# ----------------------------------------------------------------------------------"
- print "# The following tool dependencies were successfully installed:"
+ print "# The following %d tool dependencies were successfully installed:" % len( successful_tool_dependency_installations )
display_tool_dependencies_by_name( successful_tool_dependency_installations )
if tool_dependencies_with_installation_error:
print "# ----------------------------------------------------------------------------------"
- print "# The following tool dependencies have installation errors:"
+ print "# The following %d tool dependencies have installation errors:" % len( tool_dependencies_with_installation_error )
display_tool_dependencies_by_name( tool_dependencies_with_installation_error )
if all_tests_passed:
print '# ----------------------------------------------------------------------------------'
- print "# The following revisions successfully passed all functional tests:"
+ print "# The following %d revisions successfully passed all functional tests:" % len( all_tests_passed )
display_repositories_by_owner( all_tests_passed )
if at_least_one_test_failed:
print '# ----------------------------------------------------------------------------------'
- print "# The following revisions failed at least 1 functional test:"
+ print "# The following %d revisions failed at least 1 functional test:" % len( at_least_one_test_failed )
display_repositories_by_owner( at_least_one_test_failed )
print "####################################################################################"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0