2 // FORESTER -- software libraries and applications
3 // for evolutionary biology research and applications.
5 // Copyright (C) 2008-2009 Christian M. Zmasek
6 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
7 // Copyright (C) 2000-2001 Washington University School of Medicine
8 // and Howard Hughes Medical Institute
11 // This library is free software; you can redistribute it and/or
12 // modify it under the terms of the GNU Lesser General Public
13 // License as published by the Free Software Foundation; either
14 // version 2.1 of the License, or (at your option) any later version.
16 // This library is distributed in the hope that it will be useful,
17 // but WITHOUT ANY WARRANTY; without even the implied warranty of
18 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 // Lesser General Public License for more details.
21 // You should have received a copy of the GNU Lesser General Public
22 // License along with this library; if not, write to the Free Software
23 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
25 // Contact: phylosoft @ gmail . com
26 // WWW: www.phylosoft.org/forester
28 package org.forester.sdi;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.PrintWriter;
34 import java.util.ArrayList;
35 import java.util.Arrays;
36 import java.util.Collections;
37 import java.util.HashMap;
38 import java.util.List;
40 import org.forester.evoinference.matrix.distance.DistanceMatrix;
41 import org.forester.io.parsers.PhylogenyParser;
42 import org.forester.io.parsers.SymmetricalDistanceMatrixParser;
43 import org.forester.io.parsers.nhx.NHXParser;
44 import org.forester.io.parsers.util.ParserUtils;
45 import org.forester.phylogeny.Phylogeny;
46 import org.forester.phylogeny.PhylogenyMethods;
47 import org.forester.phylogeny.PhylogenyNode;
48 import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
49 import org.forester.phylogeny.factories.PhylogenyFactory;
50 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
51 import org.forester.util.ForesterUtil;
54 * @author Christian M. Zmasek
56 public final class RIO {
58 private final static boolean ROOT_BY_MINIMIZING_MAPPING_COST = false;
59 private final static boolean ROOT_BY_MINIMIZING_SUM_OF_DUPS = true;
60 private final static boolean ROOT_BY_MINIMIZING_TREE_HEIGHT = true;
61 private final static boolean TIME = false;
62 private HashMap<String, HashMap<String, Integer>> _o_hash_maps;
63 private HashMap<String, HashMap<String, Integer>> _so_hash_maps;
64 private HashMap<String, HashMap<String, Integer>> _up_hash_maps;
65 private HashMap<String, HashMap<String, Integer>> _sn_hash_maps; // HashMap of HashMaps
66 private DistanceMatrix _m;
67 private HashMap<String, Double> _l;
68 private List<String> _seq_names;
69 private int _bootstraps;
70 private int _ext_nodes_;
74 * Default constructor.
81 * Returns the numbers of trees analyzed.
83 * @return the numbers of trees analyzed
85 public final int getBootstraps() {
89 // Helper method for inferredOrthologsToString.
90 // inferredOrthologsToArrayList,
91 // and inferredUltraParalogsToString.
92 private final double getBootstrapValueFromHash( final HashMap<String, Integer> h, final String name ) {
93 if ( !h.containsKey( name ) ) {
96 final int i = h.get( name );
97 return ( i * 100.0 / getBootstraps() );
101 * Returns the distance to a sequences/taxa after a distance list file has
102 * been read in with readDistanceList(File). Throws an exception if name is
103 * not found or if no list has been read in.
108 public final double getDistance( String name ) {
109 double distance = 0.0;
112 throw new RuntimeException( "Distance list has probably not been read in (successfully)." );
114 if ( _l.get( name ) == null ) {
115 throw new IllegalArgumentException( name + " not found." );
117 distance = ( _l.get( name ) ).doubleValue();
121 public final double getDistance( final String name1, final String name2 ) {
123 return _m.getValue( _m.getIndex( name1 ), _m.getIndex( name2 ) );
125 catch ( final Exception e ) {
131 * Returns the numbers of number of ext nodes in gene trees analyzed (after
134 * @return number of ext nodes in gene trees analyzed (after stripping)
136 public final int getExtNodesOfAnalyzedGeneTrees() {
141 * Returns a HashMap containing the inferred orthologs of the external gene
142 * tree node with the sequence name seq_name. Sequence names are the keys
143 * (String), numbers of observations are the values (Int). Orthologs are to
144 * be inferred by method "inferOrthologs". Throws an exception if seq_name
148 * sequence name of a external node of the gene trees
149 * @return HashMap containing the inferred orthologs
150 * (name(String)->value(Int))
152 public final HashMap<String, Integer> getInferredOrthologs( final String seq_name ) {
153 if ( _o_hash_maps == null ) {
156 return _o_hash_maps.get( seq_name );
159 private final HashMap<String, Integer> getInferredSubtreeNeighbors( final String seq_name ) {
160 if ( _sn_hash_maps == null ) {
163 return _sn_hash_maps.get( seq_name );
167 * Returns a HashMap containing the inferred "super orthologs" of the
168 * external gene tree node with the sequence name seq_name. Sequence names
169 * are the keys (String), numbers of observations are the values (Int).
170 * Super orthologs are to be inferred by method "inferOrthologs". Throws an
171 * exception if seq_name is not found.
174 * sequence name of a external node of the gene trees
175 * @return HashMap containing the inferred super orthologs
176 * (name(String)->value(Int))
178 public final HashMap<String, Integer> getInferredSuperOrthologs( final String seq_name ) {
179 if ( _so_hash_maps == null ) {
182 return _so_hash_maps.get( seq_name );
186 * Returns a HashMap containing the inferred "ultra paralogs" of the
187 * external gene tree node with the sequence name seq_name. Sequence names
188 * are the keys (String), numbers of observations are the values (Int).
189 * "ultra paralogs" are to be inferred by method "inferOrthologs". Throws an
190 * exception if seq_name is not found.
193 * sequence name of a external node of the gene trees
194 * @return HashMap containing the inferred ultra paralogs
195 * (name(String)->value(Int))
197 public final HashMap<String, Integer> getInferredUltraParalogs( final String seq_name ) {
198 if ( _up_hash_maps == null ) {
201 return _up_hash_maps.get( seq_name );
205 * Returns the time (in ms) needed to run "inferOrthologs". Final variable
206 * TIME needs to be set to true.
208 * @return time (in ms) needed to run method "inferOrthologs"
210 public long getTime() {
215 * Infers the orthologs (as well the "super orthologs", the "subtree
216 * neighbors", and the "ultra paralogs") for each external node of the gene
217 * Trees in multiple tree File gene_trees_file (=output of PHYLIP NEIGHBOR,
218 * for example). Tallies how many times each sequence is (super-)
219 * orthologous towards the query. Tallies how many times each sequence is
220 * ultra paralogous towards the query. Tallies how many times each sequence
221 * is a subtree neighbor of the query. Gene duplications are inferred using
222 * SDI. Modifies its argument species_tree. Is a little faster than
223 * "inferOrthologs(File,Phylogeny)" since orthologs are only inferred for
226 * To obtain the results use the methods listed below.
228 * @param gene_trees_file
229 * a File containing gene Trees in NH format, which is the result
230 * of performing a bootstrap analysis in PHYLIP
231 * @param species_tree
232 * a species Phylogeny, which has species names in its species
235 * the sequence name of the squence whose orthologs are to be
237 * @throws SDIException
239 public void inferOrthologs( final File gene_trees_file, final Phylogeny species_tree, final String query )
240 throws IOException, SDIException {
243 _time = System.currentTimeMillis();
245 // Read in first tree to get its sequence names
246 // and strip species_tree.
247 final PhylogenyFactory factory = ParserBasedPhylogenyFactory.getInstance();
248 final PhylogenyParser p = ParserUtils.createParserDependingOnFileType( gene_trees_file, true );
249 if ( p instanceof NHXParser ) {
250 final NHXParser nhx = ( NHXParser ) p;
251 nhx.setReplaceUnderscores( false );
252 nhx.setIgnoreQuotes( true );
253 nhx.setTaxonomyExtraction( PhylogenyMethods.TAXONOMY_EXTRACTION.YES );
255 final Phylogeny gene_tree = factory.create( gene_trees_file, p )[ 0 ];
256 System.out.println( "species " + species_tree.toString() );
257 // Removes from species_tree all species not found in gene_tree.
258 PhylogenyMethods.taxonomyBasedDeletionOfExternalNodes( gene_tree, species_tree );
259 // System.out.println( "gene " + gene_tree.toString() );
260 // System.out.println( "species " + species_tree.toString() );
261 // Removes from gene_tree all species not found in species_tree.
262 // Archaeopteryx.createApplication( gene_tree );
263 // Archaeopteryx.createApplication( species_tree );
265 // Thread.sleep( 40000 );
267 // catch ( InterruptedException e ) {
268 // // TODO Auto-generated catch block
269 // e.printStackTrace();
271 PhylogenyMethods.taxonomyBasedDeletionOfExternalNodes( species_tree, gene_tree );
272 _seq_names = getAllExternalSequenceNames( gene_tree );
273 if ( ( _seq_names == null ) || ( _seq_names.size() < 1 ) ) {
274 throw new IOException( "could not get sequence names" );
276 _o_hash_maps = new HashMap<String, HashMap<String, Integer>>();
277 _so_hash_maps = new HashMap<String, HashMap<String, Integer>>();
278 _up_hash_maps = new HashMap<String, HashMap<String, Integer>>();
279 _sn_hash_maps = new HashMap<String, HashMap<String, Integer>>();
280 _o_hash_maps.put( query, new HashMap<String, Integer>( _seq_names.size() ) );
281 _so_hash_maps.put( query, new HashMap<String, Integer>( _seq_names.size() ) );
282 _up_hash_maps.put( query, new HashMap<String, Integer>( _seq_names.size() ) );
283 _sn_hash_maps.put( query, new HashMap<String, Integer>( _seq_names.size() ) );
284 // Go through all gene trees in the file.
285 final Phylogeny[] gene_trees = factory.create( gene_trees_file, p );
286 for( final Phylogeny gt : gene_trees ) {
288 // Removes from gene_tree all species not found in species_tree.
289 PhylogenyMethods.taxonomyBasedDeletionOfExternalNodes( species_tree, gt );
290 inferOrthologsHelper( gt, species_tree, query );
291 // System.out.println( bs );
295 _time = ( System.currentTimeMillis() - _time );
299 public List<PhylogenyNode> getNodesViaSequenceName( final Phylogeny phy, final String seq_name ) {
300 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
301 for( final PhylogenyNodeIterator iter = phy.iteratorPreorder(); iter.hasNext(); ) {
302 final PhylogenyNode n = iter.next();
303 if ( n.getNodeData().isHasSequence() && n.getNodeData().getSequence().getName().equals( seq_name ) ) {
306 if ( !n.getNodeData().isHasSequence() && n.getName().equals( seq_name ) ) {
313 // Helper method which performs the actual ortholog inference for
314 // the external node with seqname query.
315 private void inferOrthologsHelper( final Phylogeny gene_tree, final Phylogeny species_tree, final String query )
316 throws SDIException {
317 Phylogeny assigned_tree = null;
318 List<PhylogenyNode> nodes = null;
319 final SDIR sdiunrooted = new SDIR();
320 List<PhylogenyNode> orthologs = null;
321 List<PhylogenyNode> super_orthologs = null;
322 List<PhylogenyNode> ultra_paralogs = null;
323 List<PhylogenyNode> subtree_neighbors = null;
324 assigned_tree = sdiunrooted.infer( gene_tree,
326 RIO.ROOT_BY_MINIMIZING_MAPPING_COST,
327 RIO.ROOT_BY_MINIMIZING_SUM_OF_DUPS,
328 RIO.ROOT_BY_MINIMIZING_TREE_HEIGHT,
331 setExtNodesOfAnalyzedGeneTrees( assigned_tree.getNumberOfExternalNodes() );
332 nodes = getNodesViaSequenceName( assigned_tree, query );
333 if ( nodes.size() > 1 ) {
334 throw new IllegalArgumentException( "node named [" + query + "] not unique" );
336 else if ( nodes.isEmpty() ) {
337 throw new IllegalArgumentException( "no node containing a sequence named [" + query + "] found" );
339 final PhylogenyNode query_node = nodes.get( 0 );
340 final PhylogenyMethods methods = PhylogenyMethods.getInstance();
341 orthologs = methods.getOrthologousNodes( assigned_tree, query_node );
342 updateHash( _o_hash_maps, query, orthologs );
343 super_orthologs = PhylogenyMethods.getSuperOrthologousNodes( query_node );
344 updateHash( _so_hash_maps, query, super_orthologs );
345 subtree_neighbors = getSubtreeNeighbors( query_node, 2 );
346 updateHash( _sn_hash_maps, query, subtree_neighbors );
347 ultra_paralogs = PhylogenyMethods.getUltraParalogousNodes( query_node );
348 updateHash( _up_hash_maps, query, ultra_paralogs );
352 * Returns an ArrayList containg the names of orthologs of the PhylogenyNode
353 * with seq name seq_name.
356 * sequence name of a external node of the gene trees
357 * @param threshold_orthologs
358 * the minimal number of observations for a a sequence to be
359 * reported as orthologous as percentage (0.0-100.0%)
360 * @return ArrayList containg the names of orthologs of the PhylogenyNode
361 * with seq name seq_name
363 public ArrayList<String> inferredOrthologsToArrayList( final String seq_name, double threshold_orthologs ) {
364 HashMap<String, Integer> o_hashmap = null;
367 final ArrayList<String> arraylist = new ArrayList<String>();
368 if ( _o_hash_maps == null ) {
369 throw new RuntimeException( "Orthologs have not been calculated (successfully)." );
371 if ( threshold_orthologs < 0.0 ) {
372 threshold_orthologs = 0.0;
374 else if ( threshold_orthologs > 100.0 ) {
375 threshold_orthologs = 100.0;
377 o_hashmap = getInferredOrthologs( seq_name );
378 if ( o_hashmap == null ) {
379 throw new RuntimeException( "Orthologs for " + seq_name + " were not established." );
381 if ( _seq_names.size() > 0 ) {
382 I: for( int i = 0; i < _seq_names.size(); ++i ) {
383 name = _seq_names.get( i );
384 if ( name.equals( seq_name ) ) {
387 o = getBootstrapValueFromHash( o_hashmap, name );
388 if ( o < threshold_orthologs ) {
391 arraylist.add( name );
398 * Returns a String containg the names of orthologs of the PhylogenyNode
399 * with seq name query_name. The String also contains how many times a
400 * particular ortholog has been observed.
403 * The output order is (per line): Name, Ortholog, Subtree neighbor, Super
407 * The sort priority of this is determined by sort in the following manner:
410 * <li>1 : Ortholog, Super ortholog
411 * <li>2 : Super ortholog, Ortholog
412 * <li>3 : Ortholog, Distance
413 * <li>4 : Distance, Ortholog
414 * <li>5 : Ortholog, Super ortholog, Distance
415 * <li>6 : Ortholog, Distance, Super ortholog
416 * <li>7 : Super ortholog, Ortholog, Distance
417 * <li>8 : Super ortholog, Distance, Ortholog
418 * <li>9 : Distance, Ortholog, Super ortholog
419 * <li>10 : Distance, Super ortholog, Ortholog
420 * <li>11 : Ortholog, Subtree neighbor, Distance
421 * <li>12 : Ortholog, Subtree neighbor, Super ortholog, Distance (default)
422 * <li>13 : Ortholog, Super ortholog, Subtree neighbor, Distance
423 * <li>14 : Subtree neighbor, Ortholog, Super ortholog, Distance
424 * <li>15 : Subtree neighbor, Distance, Ortholog, Super ortholog
425 * <li>16 : Ortholog, Distance, Subtree neighbor, Super ortholog
426 * <li>17 : Ortholog, Subtree neighbor, Distance, Super ortholog
429 * Returns "-" if no putative orthologs have been found (given
430 * threshold_orthologs).
432 * Orthologs are to be inferred by method "inferOrthologs".
434 * (Last modified: 05/08/01)
437 * sequence name of a external node of the gene trees
439 * order and sort priority
440 * @param threshold_orthologs
441 * the minimal number of observations for a a sequence to be
442 * reported as orthologous, in percents (0.0-100.0%)
443 * @param threshold_subtreeneighborings
444 * the minimal number of observations for a a sequence to be
445 * reported as orthologous, in percents (0.0-100.0%)
446 * @return String containing the inferred orthologs, String containing "-"
447 * if no orthologs have been found null in case of error
448 * @see #inferOrthologs(File,Phylogeny,String)
449 * @see #inferOrthologs(Phylogeny[],Phylogeny)
450 * @see #inferOrthologs(File,Phylogeny)
451 * @see #getOrder(int)
453 public StringBuffer inferredOrthologsToString( final String query_name,
455 double threshold_orthologs,
456 double threshold_subtreeneighborings ) {
457 HashMap<String, Integer> o_hashmap = null;
458 HashMap<String, Integer> s_hashmap = null;
459 HashMap<String, Integer> n_hashmap = null;
461 double o = 0.0, // Orthologs.
462 s = 0.0, // Super orthologs.
463 sn = 0.0, // Subtree neighbors.
464 value1 = 0.0, value2 = 0.0, value3 = 0.0, value4 = 0.0, d = 0.0;
465 final ArrayList<Tuplet> nv = new ArrayList<Tuplet>();
466 if ( ( _o_hash_maps == null ) || ( _so_hash_maps == null ) || ( _sn_hash_maps == null ) ) {
467 //TODO ~~~ throw new RuntimeException( "Orthologs have not been calculated (successfully)" );
469 if ( ( sort < 0 ) || ( sort > 17 ) ) {
472 if ( ( sort > 2 ) && ( _m == null ) && ( _l == null ) ) {
473 throw new RuntimeException( "Distance list or matrix have not been read in (successfully)" );
475 if ( threshold_orthologs < 0.0 ) {
476 threshold_orthologs = 0.0;
478 else if ( threshold_orthologs > 100.0 ) {
479 threshold_orthologs = 100.0;
481 if ( threshold_subtreeneighborings < 0.0 ) {
482 threshold_subtreeneighborings = 0.0;
484 else if ( threshold_subtreeneighborings > 100.0 ) {
485 threshold_subtreeneighborings = 100.0;
487 o_hashmap = getInferredOrthologs( query_name );
488 s_hashmap = getInferredSuperOrthologs( query_name );
489 n_hashmap = getInferredSubtreeNeighbors( query_name );
490 if ( ( o_hashmap == null ) || ( s_hashmap == null ) || ( n_hashmap == null ) ) {
491 //TODO ~~~ throw new RuntimeException( "Orthologs for " + query_name + " were not established" );
493 final StringBuffer orthologs = new StringBuffer();
494 if ( _seq_names.size() > 0 ) {
495 I: for( int i = 0; i < _seq_names.size(); ++i ) {
496 name = _seq_names.get( i );
497 if ( name.equals( query_name ) ) {
500 o = getBootstrapValueFromHash( o_hashmap, name );
501 if ( o < threshold_orthologs ) {
504 sn = getBootstrapValueFromHash( n_hashmap, name );
505 if ( sn < threshold_subtreeneighborings ) {
508 s = getBootstrapValueFromHash( s_hashmap, name );
511 d = getDistance( query_name, name );
514 d = getDistance( name );
519 nv.add( new Tuplet( name, o, 5 ) );
522 nv.add( new Tuplet( name, o, s, 5 ) );
525 nv.add( new Tuplet( name, s, o, 5 ) );
528 nv.add( new Tuplet( name, o, d, 1 ) );
531 nv.add( new Tuplet( name, d, o, 0 ) );
534 nv.add( new Tuplet( name, o, s, d, 2 ) );
537 nv.add( new Tuplet( name, o, d, s, 1 ) );
540 nv.add( new Tuplet( name, s, o, d, 2 ) );
543 nv.add( new Tuplet( name, s, d, o, 1 ) );
546 nv.add( new Tuplet( name, d, o, s, 0 ) );
549 nv.add( new Tuplet( name, d, s, o, 0 ) );
552 nv.add( new Tuplet( name, o, sn, d, 2 ) );
555 nv.add( new Tuplet( name, o, sn, s, d, 3 ) );
558 nv.add( new Tuplet( name, o, s, sn, d, 3 ) );
561 nv.add( new Tuplet( name, sn, o, s, d, 3 ) );
564 nv.add( new Tuplet( name, sn, d, o, s, 1 ) );
567 nv.add( new Tuplet( name, o, d, sn, s, 1 ) );
570 nv.add( new Tuplet( name, o, sn, d, s, 2 ) );
573 nv.add( new Tuplet( name, o, 5 ) );
575 } // End of I for loop.
576 if ( ( nv != null ) && ( nv.size() > 0 ) ) {
577 orthologs.append( "[seq name]\t\t[ortho]\t[st-n]\t[sup-o]\t[dist]" + ForesterUtil.LINE_SEPARATOR );
578 final Tuplet[] nv_array = new Tuplet[ nv.size() ];
579 for( int j = 0; j < nv.size(); ++j ) {
580 nv_array[ j ] = nv.get( j );
582 Arrays.sort( nv_array );
583 for( int i = 0; i < nv_array.length; ++i ) {
584 name = nv_array[ i ].getKey();
585 value1 = nv_array[ i ].getValue1();
586 value2 = nv_array[ i ].getValue2();
587 value3 = nv_array[ i ].getValue3();
588 value4 = nv_array[ i ].getValue4();
589 orthologs.append( addNameAndValues( name, value1, value2, value3, value4, sort ) );
593 // No orthologs found.
594 if ( ( orthologs == null ) || ( orthologs.length() < 1 ) ) {
595 orthologs.append( "-" );
598 } // inferredOrthologsToString( String, int, double )
600 // Helper method for inferredOrthologTableToFile.
601 // Returns individual rows for the table as String.
602 private String inferredOrthologsToTableHelper( final String name2,
603 final List<String> names,
605 final boolean super_orthologs ) {
606 HashMap<String, Integer> hashmap = null;
607 String name = null, orthologs = new String( "" );
609 if ( !super_orthologs ) {
610 hashmap = getInferredOrthologs( name2 );
613 hashmap = getInferredSuperOrthologs( name2 );
615 if ( hashmap == null ) {
616 throw new RuntimeException( "Unexpected failure in method inferredOrthologsToTableHelper" );
618 for( int i = 0; i < names.size(); ++i ) {
619 name = names.get( i );
620 if ( !hashmap.containsKey( name ) ) {
624 value = hashmap.get( name );
629 throw new RuntimeException( "Failed sanity check in method inferredOrthologsToTableHelper: value not 0." );
631 orthologs += ( " " + "\t" );
634 orthologs += ( value + "\t" );
641 * Writes the orthologs for each external node of the gene trees to outfile
642 * in the form of a table. Orthologs are to be inferred by method
643 * "inferOrthologs". Overwrites without asking! (Last modified: 12/07/00)
646 * the File to write to
648 public void inferredOrthologTableToFile( final File outfile ) throws IOException {
649 if ( _o_hash_maps == null ) {
652 inferredOrthologTableToFile( outfile, false );
655 // Helper for inferredOrthologTableToFile(File).
656 // (Last modified: 11/28/00)
657 private void inferredOrthologTableToFile( final File outfile, final boolean super_orthologs ) throws IOException {
658 String name = "", line = "";
659 PrintWriter out = null;
660 if ( _seq_names == null ) {
661 throw new RuntimeException( "inferredOrthologTableToFile: seq_names_ is null." );
663 Collections.sort( _seq_names );
664 out = new PrintWriter( new FileWriter( outfile ), true );
666 for( int i = 0; i < _seq_names.size(); ++i ) {
667 line += ( i + ")\t" );
671 for( int i = 0; i < _seq_names.size(); ++i ) {
672 name = _seq_names.get( i );
673 if ( name.length() < 8 ) {
674 line = i + ")\t" + name + "\t\t\t";
676 else if ( name.length() < 16 ) {
677 line = i + ")\t" + name + "\t\t";
680 line = i + ")\t" + name + "\t";
682 line += inferredOrthologsToTableHelper( name, _seq_names, i, super_orthologs );
689 * Writes the "super orthologs" for each external nodes of the gene trees to
690 * outfile in the form of a table. Super orthologs are to be inferred by
691 * method "inferOrthologs". Overwrites without asking!
694 * the File to write to
696 public void inferredSuperOrthologTableToFile( final File outfile ) throws IOException {
697 if ( _so_hash_maps == null ) {
700 inferredOrthologTableToFile( outfile, true );
704 * Returns a String containg the names of orthologs of the PhylogenyNode
705 * with seq name query_name. The String also contains how many times a
706 * particular ortholog has been observed. Returns "-" if no putative
707 * orthologs have been found (given threshold_orthologs).
709 * Orthologs are to be inferred by method "inferOrthologs".
712 * sequence name of a external node of the gene trees
713 * @param return_dists
714 * @param threshold_ultra_paralogs
716 * @return String containing the inferred orthologs, String containing "-"
717 * if no orthologs have been found null in case of error
719 public String inferredUltraParalogsToString( final String query_name,
720 final boolean return_dists,
721 double threshold_ultra_paralogs ) {
722 HashMap<String, Integer> sp_hashmap = null;
723 String name = "", ultra_paralogs = "";
725 double sp = 0.0, value1 = 0.0, value2 = 0.0, d = 0.0;
726 final List<Tuplet> nv = new ArrayList<Tuplet>();
727 if ( threshold_ultra_paralogs < 1.0 ) {
728 threshold_ultra_paralogs = 1.0;
730 else if ( threshold_ultra_paralogs > 100.0 ) {
731 threshold_ultra_paralogs = 100.0;
733 if ( _up_hash_maps == null ) {
734 throw new RuntimeException( "Ultra paralogs have not been calculated (successfully)." );
736 if ( return_dists && ( _m == null ) && ( _l == null ) ) {
737 throw new RuntimeException( "Distance list or matrix have not been read in (successfully)." );
739 sp_hashmap = getInferredUltraParalogs( query_name );
740 if ( sp_hashmap == null ) {
741 throw new RuntimeException( "Ultra paralogs for " + query_name + " were not established" );
743 if ( _seq_names.size() > 0 ) {
744 I: for( int i = 0; i < _seq_names.size(); ++i ) {
745 name = _seq_names.get( i );
746 if ( name.equals( query_name ) ) {
749 sp = getBootstrapValueFromHash( sp_hashmap, name );
750 if ( sp < threshold_ultra_paralogs ) {
753 if ( return_dists ) {
755 d = getDistance( query_name, name );
758 d = getDistance( name );
760 nv.add( new Tuplet( name, sp, d, 1 ) );
763 nv.add( new Tuplet( name, sp, 5 ) );
765 } // End of I for loop.
766 if ( ( nv != null ) && ( nv.size() > 0 ) ) {
767 final Tuplet[] nv_array = new Tuplet[ nv.size() ];
768 for( int j = 0; j < nv.size(); ++j ) {
769 nv_array[ j ] = nv.get( j );
771 Arrays.sort( nv_array );
772 if ( return_dists ) {
778 for( int i = 0; i < nv_array.length; ++i ) {
779 name = nv_array[ i ].getKey();
780 value1 = nv_array[ i ].getValue1();
781 value2 = nv_array[ i ].getValue2();
782 ultra_paralogs += addNameAndValues( name, value1, value2, 0.0, 0.0, sort );
786 // No ultra paralogs found.
787 if ( ( ultra_paralogs == null ) || ( ultra_paralogs.length() < 1 ) ) {
788 ultra_paralogs = "-";
790 return ultra_paralogs;
793 public final void readDistanceMatrix( final File matrix_file ) throws IOException {
794 DistanceMatrix[] matrices = null;
795 final SymmetricalDistanceMatrixParser parser = SymmetricalDistanceMatrixParser.createInstance();
796 matrices = parser.parse( matrix_file );
797 if ( ( matrices == null ) || ( matrices.length == 0 ) ) {
798 throw new IOException( "failed to parse distance matrix from [" + matrix_file + "]" );
800 if ( matrices.length > 1 ) {
801 throw new IOException( "[" + matrix_file + "] contains more than once distance matrix" );
807 * Brings this into the same state as immediately after construction.
809 private final void reset() {
811 _so_hash_maps = null;
812 _up_hash_maps = null;
822 * Sets the numbers of trees analyzed.
824 * numbers of trees analyzed
826 private void setBootstraps( int i ) {
834 * Sets number of ext nodes in gene trees analyzed (after stripping).
836 * number of ext nodes in gene trees analyzed (after stripping)
838 private void setExtNodesOfAnalyzedGeneTrees( int i ) {
845 // Helper for doInferOrthologs( Phylogeny, Phylogeny, String )
846 // and doInferOrthologs( Phylogeny, Phylogeny ).
847 private void updateHash( final HashMap<String, HashMap<String, Integer>> counter_map,
848 final String query_seq_name,
849 final List<PhylogenyNode> nodes ) {
850 final HashMap<String, Integer> hash_map = counter_map.get( query_seq_name );
851 if ( hash_map == null ) {
852 throw new RuntimeException( "Unexpected failure in method updateHash." );
854 for( int j = 0; j < nodes.size(); ++j ) {
856 if ( ( nodes.get( j ) ).getNodeData().isHasSequence()
857 && !ForesterUtil.isEmpty( ( nodes.get( j ) ).getNodeData().getSequence().getName() ) ) {
858 seq_name = ( nodes.get( j ) ).getNodeData().getSequence().getName();
861 seq_name = ( nodes.get( j ) ).getName();
863 if ( hash_map.containsKey( seq_name ) ) {
864 hash_map.put( seq_name, hash_map.get( seq_name ) + 1 );
867 hash_map.put( seq_name, 1 );
872 // Helper method for inferredOrthologsToString
873 // and inferredUltraParalogsToString.
874 private final static String addNameAndValues( final String name,
880 final java.text.DecimalFormat df = new java.text.DecimalFormat( "0.#####" );
881 df.setDecimalSeparatorAlwaysShown( false );
883 if ( name.length() < 8 ) {
884 line += ( name + "\t\t\t" );
886 else if ( name.length() < 16 ) {
887 line += ( name + "\t\t" );
890 line += ( name + "\t" );
894 line += addToLine( value1, df );
900 line += addToLine( value1, df );
902 line += addToLine( value2, df );
906 line += addToLine( value2, df );
908 line += addToLine( value1, df );
912 line += addToLine( value1, df );
915 line += addToLine( value2, df );
918 line += addToLine( value2, df );
921 line += addToLine( value1, df );
924 line += addToLine( value1, df );
926 line += addToLine( value2, df );
927 line += addToLine( value3, df );
930 line += addToLine( value1, df );
932 line += addToLine( value3, df );
933 line += addToLine( value2, df );
936 line += addToLine( value2, df );
938 line += addToLine( value1, df );
939 line += addToLine( value3, df );
942 line += addToLine( value3, df );
944 line += addToLine( value1, df );
945 line += addToLine( value2, df );
948 line += addToLine( value2, df );
950 line += addToLine( value3, df );
951 line += addToLine( value1, df );
954 line += addToLine( value3, df );
956 line += addToLine( value2, df );
957 line += addToLine( value1, df );
960 line += addToLine( value1, df );
961 line += addToLine( value2, df );
963 line += addToLine( value3, df );
966 line += addToLine( value1, df );
967 line += addToLine( value2, df );
968 line += addToLine( value3, df );
969 line += addToLine( value4, df );
972 line += addToLine( value1, df );
973 line += addToLine( value3, df );
974 line += addToLine( value2, df );
975 line += addToLine( value4, df );
978 line += addToLine( value2, df );
979 line += addToLine( value1, df );
980 line += addToLine( value3, df );
981 line += addToLine( value4, df );
984 line += addToLine( value3, df );
985 line += addToLine( value1, df );
986 line += addToLine( value4, df );
987 line += addToLine( value2, df );
990 line += addToLine( value1, df );
991 line += addToLine( value3, df );
992 line += addToLine( value4, df );
993 line += addToLine( value2, df );
996 line += addToLine( value1, df );
997 line += addToLine( value2, df );
998 line += addToLine( value4, df );
999 line += addToLine( value3, df );
1002 line += addToLine( value1, df );
1006 line += addToLine( value1, df );
1007 line += addToLine( value2, df );
1010 line += ForesterUtil.LINE_SEPARATOR;
1014 // Helper for addNameAndValues.
1015 private final static String addToLine( final double value, final java.text.DecimalFormat df ) {
1017 if ( value != Tuplet.DEFAULT ) {
1018 s = df.format( value ) + "\t";
1026 private static List<String> getAllExternalSequenceNames( final Phylogeny phy ) {
1027 final List<String> names = new ArrayList<String>();
1028 for( final PhylogenyNodeIterator iter = phy.iteratorExternalForward(); iter.hasNext(); ) {
1029 final PhylogenyNode n = iter.next();
1030 if ( n.getNodeData().isHasSequence() && !ForesterUtil.isEmpty( n.getNodeData().getSequence().getName() ) ) {
1031 names.add( n.getNodeData().getSequence().getName() );
1033 else if ( !ForesterUtil.isEmpty( n.getName() ) ) {
1034 names.add( n.getName() );
1037 throw new IllegalArgumentException( "node has no (sequence) name: " + n );
1044 * Returns the order in which ortholog (o), "super ortholog" (s) and
1045 * distance (d) are returned and sorted (priority of sort always goes from
1046 * left to right), given sort. For the meaning of sort
1048 * @see #inferredOrthologsToString(String,int,double,double)
1051 * determines order and sort priority
1052 * @return String indicating the order
1054 public final static String getOrder( final int sort ) {
1058 order = "orthologies";
1061 order = "orthologies > super orthologies";
1064 order = "super orthologies > orthologies";
1067 order = "orthologies > distance to query";
1070 order = "distance to query > orthologies";
1073 order = "orthologies > super orthologies > distance to query";
1076 order = "orthologies > distance to query > super orthologies";
1079 order = "super orthologies > orthologies > distance to query";
1082 order = "super orthologies > distance to query > orthologies";
1085 order = "distance to query > orthologies > super orthologies";
1088 order = "distance to query > super orthologies > orthologies";
1091 order = "orthologies > subtree neighbors > distance to query";
1094 order = "orthologies > subtree neighbors > super orthologies > distance to query";
1097 order = "orthologies > super orthologies > subtree neighbors > distance to query";
1100 order = "subtree neighbors > orthologies > super orthologies > distance to query";
1103 order = "subtree neighbors > distance to query > orthologies > super orthologies";
1106 order = "orthologies > distance to query > subtree neighbors > super orthologies";
1109 order = "orthologies > subtree neighbors > distance to query > super orthologies";
1112 order = "orthologies";
1118 public final static StringBuffer getOrderHelp() {
1119 final StringBuffer sb = new StringBuffer();
1120 sb.append( " 0: orthologies" + ForesterUtil.LINE_SEPARATOR );
1121 sb.append( " 1: orthologies > super orthologies" + ForesterUtil.LINE_SEPARATOR );
1122 sb.append( " 2: super orthologies > orthologies" + ForesterUtil.LINE_SEPARATOR );
1123 sb.append( " 3: orthologies > distance to query" + ForesterUtil.LINE_SEPARATOR );
1124 sb.append( " 4: distance to query > orthologies" + ForesterUtil.LINE_SEPARATOR );
1125 sb.append( " 5: orthologies > super orthologies > distance to query" + ForesterUtil.LINE_SEPARATOR );
1126 sb.append( " 6: orthologies > distance to query > super orthologies" + ForesterUtil.LINE_SEPARATOR );
1127 sb.append( " 7: super orthologies > orthologies > distance to query" + ForesterUtil.LINE_SEPARATOR );
1128 sb.append( " 8: super orthologies > distance to query > orthologies" + ForesterUtil.LINE_SEPARATOR );
1129 sb.append( " 9: distance to query > orthologies > super orthologies" + ForesterUtil.LINE_SEPARATOR );
1130 sb.append( " 10: distance to query > super orthologies > orthologies" + ForesterUtil.LINE_SEPARATOR );
1131 sb.append( " 11: orthologies > subtree neighbors > distance to query" + ForesterUtil.LINE_SEPARATOR );
1132 sb.append( " 12: orthologies > subtree neighbors > super orthologies > distance to query"
1133 + ForesterUtil.LINE_SEPARATOR );
1134 sb.append( " 13: orthologies > super orthologies > subtree neighbors > distance to query"
1135 + ForesterUtil.LINE_SEPARATOR );
1136 sb.append( " 14: subtree neighbors > orthologies > super orthologies > distance to query"
1137 + ForesterUtil.LINE_SEPARATOR );
1138 sb.append( " 15: subtree neighbors > distance to query > orthologies > super orthologies"
1139 + ForesterUtil.LINE_SEPARATOR );
1140 sb.append( " 16: orthologies > distance to query > subtree neighbors > super orthologies"
1141 + ForesterUtil.LINE_SEPARATOR );
1142 sb.append( " 17: orthologies > subtree neighbors > distance to query > super orthologies"
1143 + ForesterUtil.LINE_SEPARATOR );
1147 private final static List<PhylogenyNode> getSubtreeNeighbors( final PhylogenyNode query, final int level ) {
1148 PhylogenyNode node = query;
1149 if ( !node.isExternal() ) {
1152 if ( !node.isRoot() ) {
1153 node = node.getParent();
1156 if ( !node.isRoot() ) {
1157 node = node.getParent();
1161 throw new IllegalArgumentException( "currently only supporting level 2 subtree neighbors " );
1163 final List<PhylogenyNode> sn = node.getAllExternalDescendants();