3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: www.phylosoft.org/forester
27 package org.forester.surfacing;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.text.DecimalFormat;
35 import java.text.NumberFormat;
36 import java.util.ArrayList;
37 import java.util.Arrays;
38 import java.util.Collections;
39 import java.util.Comparator;
40 import java.util.HashMap;
41 import java.util.HashSet;
42 import java.util.List;
44 import java.util.Map.Entry;
45 import java.util.PriorityQueue;
47 import java.util.SortedMap;
48 import java.util.SortedSet;
49 import java.util.TreeMap;
50 import java.util.TreeSet;
51 import java.util.regex.Matcher;
52 import java.util.regex.Pattern;
54 import org.forester.application.surfacing;
55 import org.forester.evoinference.distance.NeighborJoining;
56 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
57 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
58 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
61 import org.forester.evoinference.matrix.distance.BasicSymmetricalDistanceMatrix;
62 import org.forester.evoinference.matrix.distance.DistanceMatrix;
63 import org.forester.go.GoId;
64 import org.forester.go.GoNameSpace;
65 import org.forester.go.GoTerm;
66 import org.forester.go.PfamToGoMapping;
67 import org.forester.io.parsers.nexus.NexusConstants;
68 import org.forester.io.writers.PhylogenyWriter;
69 import org.forester.msa.MsaCompactor.SORT_BY;
70 import org.forester.phylogeny.Phylogeny;
71 import org.forester.phylogeny.PhylogenyMethods;
72 import org.forester.phylogeny.PhylogenyNode;
73 import org.forester.phylogeny.PhylogenyNodeI.NH_CONVERSION_SUPPORT_VALUE_STYLE;
74 import org.forester.phylogeny.data.BinaryCharacters;
75 import org.forester.phylogeny.data.Confidence;
76 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
77 import org.forester.protein.BasicDomain;
78 import org.forester.protein.BasicProtein;
79 import org.forester.protein.BinaryDomainCombination;
80 import org.forester.protein.Domain;
81 import org.forester.protein.DomainId;
82 import org.forester.protein.Protein;
83 import org.forester.species.Species;
84 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
85 import org.forester.surfacing.DomainSimilarityCalculator.GoAnnotationOutput;
86 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
87 import org.forester.util.AsciiHistogram;
88 import org.forester.util.BasicDescriptiveStatistics;
89 import org.forester.util.BasicTable;
90 import org.forester.util.BasicTableParser;
91 import org.forester.util.DescriptiveStatistics;
92 import org.forester.util.ForesterUtil;
94 public final class SurfacingUtil {
96 private final static NumberFormat FORMATTER = new DecimalFormat( "0.0E0" );
97 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
98 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
101 public int compare( final Domain d1,
103 if ( d1.getPerSequenceEvalue() < d2
104 .getPerSequenceEvalue() ) {
108 .getPerSequenceEvalue() > d2
109 .getPerSequenceEvalue() ) {
113 return d1.compareTo( d2 );
117 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
118 private static final boolean USE_LAST = true;
120 private SurfacingUtil() {
121 // Hidden constructor.
124 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
125 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
126 final SortedMap<DomainId, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
127 for( final DomainId domain_id : all_cd.keySet() ) {
128 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
132 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
133 final SortedSet<DomainId> domain_ids ) {
134 final SortedSet<DomainId> domains = genome.getAllDomainIds();
135 for( final DomainId domain : domains ) {
136 domain_ids.add( domain );
140 public static void addHtmlHead( final Writer w, final String title ) throws IOException {
141 w.write( SurfacingConstants.NL );
143 w.write( "<title>" );
145 w.write( "</title>" );
146 w.write( SurfacingConstants.NL );
147 w.write( "<style>" );
148 w.write( SurfacingConstants.NL );
149 w.write( "a:visited { color : #6633FF; text-decoration : none; }" );
150 w.write( SurfacingConstants.NL );
151 w.write( "a:link { color : #6633FF; text-decoration : none; }" );
152 w.write( SurfacingConstants.NL );
153 w.write( "a:active { color : #99FF00; text-decoration : none; }" );
154 w.write( SurfacingConstants.NL );
155 w.write( "a:hover { color : #FFFFFF; background-color : #99FF00; text-decoration : none; }" );
156 w.write( SurfacingConstants.NL );
157 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
158 w.write( SurfacingConstants.NL );
159 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
160 w.write( SurfacingConstants.NL );
161 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
162 w.write( SurfacingConstants.NL );
163 w.write( "</style>" );
164 w.write( SurfacingConstants.NL );
165 w.write( "</head>" );
166 w.write( SurfacingConstants.NL );
169 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
170 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
171 for( final DomainSimilarity similarity : similarities ) {
172 stats.addValue( similarity.getMeanSimilarityScore() );
177 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
178 final String outfilename_for_counts,
179 final String outfilename_for_dc,
180 final String outfilename_for_dc_for_go_mapping,
181 final String outfilename_for_dc_for_go_mapping_unique,
182 final String outfilename_for_rank_counts,
183 final String outfilename_for_ancestor_species_counts,
184 final String outfilename_for_protein_stats,
185 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
186 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
187 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
190 // if ( protein_length_stats_by_dc != null ) {
191 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
192 // System.out.print( entry.getKey().toString() );
193 // System.out.print( ": " );
194 // double[] a = entry.getValue().getDataAsDoubleArray();
195 // for( int i = 0; i < a.length; i++ ) {
196 // System.out.print( a[ i ] + " " );
198 // System.out.println();
201 // if ( domain_number_stats_by_dc != null ) {
202 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
203 // System.out.print( entry.getKey().toString() );
204 // System.out.print( ": " );
205 // double[] a = entry.getValue().getDataAsDoubleArray();
206 // for( int i = 0; i < a.length; i++ ) {
207 // System.out.print( a[ i ] + " " );
209 // System.out.println();
213 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
214 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
215 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
216 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
217 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
218 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
219 final PhylogenyNode n = it.next();
220 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
221 for( final String dc : gained_dc ) {
222 if ( dc_gain_counts.containsKey( dc ) ) {
223 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
226 dc_gain_counts.put( dc, 1 );
230 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
231 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
232 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
233 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
234 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
235 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
236 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
237 final Set<String> dcs = dc_gain_counts.keySet();
238 final SortedSet<String> more_than_once = new TreeSet<String>();
239 final DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
240 final DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
241 final DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
242 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
243 long gained_multiple_times_domain_length_sum = 0;
244 long gained_once_domain_length_sum = 0;
245 long gained_multiple_times_domain_length_count = 0;
246 long gained_once_domain_length_count = 0;
247 for( final String dc : dcs ) {
248 final int count = dc_gain_counts.get( dc );
249 if ( histogram.containsKey( count ) ) {
250 histogram.put( count, histogram.get( count ) + 1 );
251 domain_lists.get( count ).append( ", " + dc );
252 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
253 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
256 histogram.put( count, 1 );
257 domain_lists.put( count, new StringBuilder( dc ) );
258 final PriorityQueue<String> q = new PriorityQueue<String>();
259 q.addAll( splitDomainCombination( dc ) );
260 domain_lists_go.put( count, q );
261 final SortedSet<String> set = new TreeSet<String>();
262 set.addAll( splitDomainCombination( dc ) );
263 domain_lists_go_unique.put( count, set );
265 if ( protein_length_stats_by_dc != null ) {
266 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
267 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
269 dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
272 if ( domain_number_stats_by_dc != null ) {
273 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
274 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
276 dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
279 if ( domain_length_stats_by_domain != null ) {
280 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
281 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
283 final String[] ds = dc.split( "=" );
284 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
285 .get( ds[ 0 ] ).arithmeticMean() );
286 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
287 .get( ds[ 1 ] ).arithmeticMean() );
290 more_than_once.add( dc );
291 if ( protein_length_stats_by_dc != null ) {
292 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
293 for( final double element : s.getData() ) {
294 gained_multiple_times_lengths_stats.addValue( element );
297 if ( domain_number_stats_by_dc != null ) {
298 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
299 for( final double element : s.getData() ) {
300 gained_multiple_times_domain_count_stats.addValue( element );
303 if ( domain_length_stats_by_domain != null ) {
304 final String[] ds = dc.split( "=" );
305 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
306 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
307 for( final double element : s0.getData() ) {
308 gained_multiple_times_domain_length_sum += element;
309 ++gained_multiple_times_domain_length_count;
311 for( final double element : s1.getData() ) {
312 gained_multiple_times_domain_length_sum += element;
313 ++gained_multiple_times_domain_length_count;
318 if ( protein_length_stats_by_dc != null ) {
319 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
320 for( final double element : s.getData() ) {
321 gained_once_lengths_stats.addValue( element );
324 if ( domain_number_stats_by_dc != null ) {
325 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
326 for( final double element : s.getData() ) {
327 gained_once_domain_count_stats.addValue( element );
330 if ( domain_length_stats_by_domain != null ) {
331 final String[] ds = dc.split( "=" );
332 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
333 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
334 for( final double element : s0.getData() ) {
335 gained_once_domain_length_sum += element;
336 ++gained_once_domain_length_count;
338 for( final double element : s1.getData() ) {
339 gained_once_domain_length_sum += element;
340 ++gained_once_domain_length_count;
345 final Set<Integer> histogram_keys = histogram.keySet();
346 for( final Integer histogram_key : histogram_keys ) {
347 final int count = histogram.get( histogram_key );
348 final StringBuilder dc = domain_lists.get( histogram_key );
349 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
350 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
351 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
352 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
353 Arrays.sort( sorted );
354 for( final Object domain : sorted ) {
355 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
357 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
358 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
359 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
364 out_dc_for_go_mapping.close();
365 out_dc_for_go_mapping_unique.close();
366 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
367 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
368 for( final String dc : more_than_once ) {
369 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
370 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
371 final PhylogenyNode n = it.next();
372 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
376 for( int i = 0; i < nodes.size() - 1; ++i ) {
377 for( int j = i + 1; j < nodes.size(); ++j ) {
378 final PhylogenyNode lca = PhylogenyMethods.obtainLCA( nodes.get( i ),
380 String rank = "unknown";
381 if ( lca.getNodeData().isHasTaxonomy()
382 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
383 rank = lca.getNodeData().getTaxonomy().getRank();
385 addToCountMap( lca_rank_counts, rank );
387 if ( lca.getNodeData().isHasTaxonomy()
388 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
389 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
391 else if ( lca.getNodeData().isHasTaxonomy()
392 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
393 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
396 lca_species = lca.getName();
398 addToCountMap( lca_ancestor_species_counts, lca_species );
402 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
403 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
404 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
405 ForesterUtil.map2writer( out_for_ancestor_species_counts,
406 lca_ancestor_species_counts,
408 ForesterUtil.LINE_SEPARATOR );
409 out_for_rank_counts.close();
410 out_for_ancestor_species_counts.close();
411 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
412 && ( ( domain_length_stats_by_domain != null ) || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
413 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
414 w.write( "Domain Lengths: " );
416 if ( domain_length_stats_by_domain != null ) {
417 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
419 w.write( entry.getKey().toString() );
420 w.write( "\t" + entry.getValue().arithmeticMean() );
421 w.write( "\t" + entry.getValue().median() );
428 w.write( "Protein Lengths: " );
430 if ( protein_length_stats_by_dc != null ) {
431 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
433 w.write( entry.getKey().toString() );
434 w.write( "\t" + entry.getValue().arithmeticMean() );
435 w.write( "\t" + entry.getValue().median() );
442 w.write( "Number of domains: " );
444 if ( domain_number_stats_by_dc != null ) {
445 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
447 w.write( entry.getKey().toString() );
448 w.write( "\t" + entry.getValue().arithmeticMean() );
449 w.write( "\t" + entry.getValue().median() );
456 w.write( "Gained once, domain lengths:" );
458 w.write( "N: " + gained_once_domain_length_count );
460 w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
463 w.write( "Gained multiple times, domain lengths:" );
465 w.write( "N: " + gained_multiple_times_domain_length_count );
468 + ( ( double ) gained_multiple_times_domain_length_sum / gained_multiple_times_domain_length_count ) );
473 w.write( "Gained once, protein lengths:" );
475 w.write( gained_once_lengths_stats.toString() );
478 w.write( "Gained once, domain counts:" );
480 w.write( gained_once_domain_count_stats.toString() );
483 w.write( "Gained multiple times, protein lengths:" );
485 w.write( gained_multiple_times_lengths_stats.toString() );
488 w.write( "Gained multiple times, domain counts:" );
490 w.write( gained_multiple_times_domain_count_stats.toString() );
495 catch ( final IOException e ) {
496 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
498 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
499 + outfilename_for_counts + "]" );
500 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
501 + outfilename_for_dc + "]" );
502 ForesterUtil.programMessage( surfacing.PRG_NAME,
503 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
504 + outfilename_for_dc_for_go_mapping + "]" );
505 ForesterUtil.programMessage( surfacing.PRG_NAME,
506 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
507 + outfilename_for_dc_for_go_mapping_unique + "]" );
510 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
511 if ( map.containsKey( s ) ) {
512 map.put( s, map.get( s ) + 1 );
519 public static int calculateOverlap( final Domain domain, final List<Boolean> covered_positions ) {
520 int overlap_count = 0;
521 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
522 if ( ( i < covered_positions.size() ) && ( covered_positions.get( i ) == true ) ) {
526 return overlap_count;
529 public static void checkForOutputFileWriteability( final File outfile ) {
530 final String error = ForesterUtil.isWritableFile( outfile );
531 if ( !ForesterUtil.isEmpty( error ) ) {
532 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
536 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
537 final boolean get_gains ) {
538 final SortedSet<String> domains = new TreeSet<String>();
539 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
540 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
542 domains.addAll( chars.getGainedCharacters() );
545 domains.addAll( chars.getLostCharacters() );
551 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
552 final BinaryDomainCombination.DomainCombinationType dc_type,
553 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
554 final boolean get_gains ) {
555 final SortedSet<String> sorted_ids = new TreeSet<String>();
556 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
557 sorted_ids.add( matrix.getIdentifier( i ) );
559 for( final String id : sorted_ids ) {
560 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
561 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
562 || ( !get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
563 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
564 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
565 .createInstance( matrix.getCharacter( c ) ) );
567 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
568 all_binary_domains_combination_gained.add( DirectedBinaryDomainCombination
569 .createInstance( matrix.getCharacter( c ) ) );
572 all_binary_domains_combination_gained.add( BasicBinaryDomainCombination.createInstance( matrix
573 .getCharacter( c ) ) );
580 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
581 final boolean domain_combinations,
582 final CharacterStateMatrix.GainLossStates state,
583 final String outfile ) {
584 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
585 + ForesterUtil.FILE_SEPARATOR + base_dir );
586 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
587 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
589 if ( domain_combinations ) {
590 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
591 + ForesterUtil.FILE_SEPARATOR + "DC" );
594 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
595 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
597 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
598 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
600 if ( state == GainLossStates.GAIN ) {
601 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
602 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
604 else if ( state == GainLossStates.LOSS ) {
605 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
606 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
609 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
610 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
612 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
613 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
615 return per_node_go_mapped_domain_gain_loss_files_base_dir;
618 public static Map<DomainId, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
619 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map = new HashMap<DomainId, List<GoId>>( pfam_to_go_mappings
621 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
622 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
623 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
625 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
627 return domain_id_to_go_ids_map;
630 public static Map<DomainId, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
632 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, "\t" );
633 final Map<DomainId, Set<String>> map = new TreeMap<DomainId, Set<String>>();
634 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
635 final DomainId domain_id = new DomainId( primary_table.getValue( 0, r ) );
636 if ( !map.containsKey( domain_id ) ) {
637 map.put( domain_id, new HashSet<String>() );
639 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
644 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile, final DistanceMatrix distance ) {
645 checkForOutputFileWriteability( nj_tree_outfile );
646 final NeighborJoining nj = NeighborJoining.createInstance();
647 final Phylogeny phylogeny = nj.execute( ( BasicSymmetricalDistanceMatrix ) distance );
648 phylogeny.setName( nj_tree_outfile.getName() );
649 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
653 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
654 final SortedMap<DomainId, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
655 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
656 for( final DomainId domain_id : cds.keySet() ) {
657 final CombinableDomains cd = cds.get( domain_id );
658 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
660 return binary_combinations;
663 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
664 final Detailedness detailedness,
665 final GoAnnotationOutput go_annotation_output,
666 final Map<GoId, GoTerm> go_id_to_term_map,
667 final GoNameSpace go_namespace_limit ) {
668 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) ) {
669 throw new IllegalArgumentException( "attempt to use a GO namespace limit without a GO id to term map" );
671 for( final DomainSimilarity domain_similarity : domain_similarities ) {
672 if ( domain_similarity instanceof PrintableDomainSimilarity ) {
673 final PrintableDomainSimilarity printable_domain_similarity = ( PrintableDomainSimilarity ) domain_similarity;
674 printable_domain_similarity.setDetailedness( detailedness );
675 printable_domain_similarity.setGoAnnotationOutput( go_annotation_output );
676 printable_domain_similarity.setGoIdToTermMap( go_id_to_term_map );
677 printable_domain_similarity.setGoNamespaceLimit( go_namespace_limit );
682 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
683 final int number_of_genomes,
684 final DomainLengthsTable domain_lengths_table,
685 final File outfile ) throws IOException {
686 final DecimalFormat df = new DecimalFormat( "#.00" );
687 checkForOutputFileWriteability( outfile );
688 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
689 out.write( "MEAN BASED STATISTICS PER SPECIES" );
690 out.write( ForesterUtil.LINE_SEPARATOR );
691 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
692 out.write( ForesterUtil.LINE_SEPARATOR );
693 out.write( ForesterUtil.LINE_SEPARATOR );
694 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
695 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
696 out.write( ForesterUtil.LINE_SEPARATOR );
697 for( final DomainLengths domain_lengths : domain_lengths_list ) {
698 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
699 if ( species_list.size() > 0 ) {
700 out.write( domain_lengths.getDomainId() + "\t" );
701 for( final Species species : species_list ) {
702 out.write( species + "\t" );
704 out.write( ForesterUtil.LINE_SEPARATOR );
705 // DescriptiveStatistics stats_for_domain = domain_lengths
706 // .calculateMeanBasedStatistics();
707 //AsciiHistogram histo = new AsciiHistogram( stats_for_domain );
708 //System.out.println( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
711 out.write( ForesterUtil.LINE_SEPARATOR );
712 out.write( ForesterUtil.LINE_SEPARATOR );
713 out.write( "OUTLIER SPECIES (Z 1.0)" );
714 out.write( ForesterUtil.LINE_SEPARATOR );
715 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
716 .calculateMeanBasedStatisticsForAllSpecies();
717 out.write( stats_for_all_species.asSummary() );
718 out.write( ForesterUtil.LINE_SEPARATOR );
719 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
720 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
721 out.write( ForesterUtil.LINE_SEPARATOR );
722 final double population_sd = stats_for_all_species.sampleStandardDeviation();
723 final double population_mean = stats_for_all_species.arithmeticMean();
724 for( final Species species : domain_lengths_table.getSpecies() ) {
725 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
726 final double z = ( x - population_mean ) / population_sd;
727 out.write( species + "\t" + z );
728 out.write( ForesterUtil.LINE_SEPARATOR );
730 out.write( ForesterUtil.LINE_SEPARATOR );
731 for( final Species species : domain_lengths_table.getSpecies() ) {
732 final DescriptiveStatistics stats_for_species = domain_lengths_table
733 .calculateMeanBasedStatisticsForSpecies( species );
734 final double x = stats_for_species.arithmeticMean();
735 final double z = ( x - population_mean ) / population_sd;
736 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
737 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
738 out.write( ForesterUtil.LINE_SEPARATOR );
742 // final List<HistogramData> histogram_datas = new ArrayList<HistogramData>();
743 // for( int i = 0; i < number_of_genomes; ++i ) {
744 // final Species species = new BasicSpecies( input_file_properties[ i ][ 0 ] );
746 // .add( new HistogramData( species.toString(), domain_lengths_table
747 // .calculateMeanBasedStatisticsForSpecies( species )
748 // .getDataAsDoubleArray(), 5, 600, null, 60 ) );
750 // final HistogramsFrame hf = new HistogramsFrame( histogram_datas );
751 // hf.setVisible( true );
757 * @param all_binary_domains_combination_lost_fitch
758 * @param consider_directedness_and_adjacency_for_bin_combinations
759 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
760 * which were gained under unweighted (Fitch) parsimony.
762 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
763 final boolean radomize_fitch_parsimony,
764 final String outfile_name,
765 final DomainParsimonyCalculator domain_parsimony,
766 final Phylogeny phylogeny,
767 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
768 final Map<GoId, GoTerm> go_id_to_term_map,
769 final GoNameSpace go_namespace_limit,
770 final String parameters_str,
771 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
772 final SortedSet<DomainId> positive_filter,
773 final boolean output_binary_domain_combinations_for_graphs,
774 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
775 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
776 final BinaryDomainCombination.DomainCombinationType dc_type,
777 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
778 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
779 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
780 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
781 final String date_time = ForesterUtil.getCurrentDateTime();
782 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
783 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
784 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
785 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
786 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
787 writeToNexus( outfile_name, domain_parsimony, phylogeny );
790 Phylogeny local_phylogeny_l = phylogeny.copy();
791 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
792 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
795 domain_parsimony.executeDolloParsimonyOnDomainPresence();
797 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
798 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
799 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
800 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
801 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
802 CharacterStateMatrix.GainLossStates.GAIN,
803 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
805 ForesterUtil.LINE_SEPARATOR,
807 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
808 CharacterStateMatrix.GainLossStates.LOSS,
809 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
811 ForesterUtil.LINE_SEPARATOR,
813 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
814 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D, sep, ForesterUtil.LINE_SEPARATOR, null );
816 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
820 domain_parsimony.getGainLossMatrix(),
821 CharacterStateMatrix.GainLossStates.GAIN,
822 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
824 ForesterUtil.LINE_SEPARATOR,
825 "Dollo Parsimony | Gains | Domains",
827 domain_id_to_secondary_features_maps,
828 all_pfams_encountered,
829 all_pfams_gained_as_domains,
831 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
835 domain_parsimony.getGainLossMatrix(),
836 CharacterStateMatrix.GainLossStates.LOSS,
837 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
839 ForesterUtil.LINE_SEPARATOR,
840 "Dollo Parsimony | Losses | Domains",
842 domain_id_to_secondary_features_maps,
843 all_pfams_encountered,
844 all_pfams_lost_as_domains,
846 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
850 domain_parsimony.getGainLossMatrix(),
852 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
854 ForesterUtil.LINE_SEPARATOR,
855 "Dollo Parsimony | Present | Domains",
857 domain_id_to_secondary_features_maps,
858 all_pfams_encountered,
860 "_dollo_present_d" );
861 preparePhylogeny( local_phylogeny_l,
864 "Dollo parsimony on domain presence/absence",
865 "dollo_on_domains_" + outfile_name,
867 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
868 + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
870 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
871 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
873 catch ( final IOException e ) {
875 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
877 if ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) {
878 // FITCH DOMAIN COMBINATIONS
879 // -------------------------
880 local_phylogeny_l = phylogeny.copy();
881 String randomization = "no";
882 if ( radomize_fitch_parsimony ) {
883 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
884 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
887 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( USE_LAST );
889 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
890 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
891 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
892 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
894 .writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
895 CharacterStateMatrix.GainLossStates.GAIN,
896 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
898 ForesterUtil.LINE_SEPARATOR,
900 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
901 CharacterStateMatrix.GainLossStates.LOSS,
903 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
905 ForesterUtil.LINE_SEPARATOR,
907 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
908 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC, sep, ForesterUtil.LINE_SEPARATOR, null );
909 if ( all_binary_domains_combination_gained_fitch != null ) {
910 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
912 all_binary_domains_combination_gained_fitch,
915 if ( all_binary_domains_combination_lost_fitch != null ) {
916 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
918 all_binary_domains_combination_lost_fitch,
921 if ( output_binary_domain_combinations_for_graphs ) {
923 .writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
924 .getGainLossMatrix(),
927 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
929 ForesterUtil.LINE_SEPARATOR,
930 BinaryDomainCombination.OutputFormat.DOT );
933 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
937 domain_parsimony.getGainLossMatrix(),
938 CharacterStateMatrix.GainLossStates.GAIN,
939 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
941 ForesterUtil.LINE_SEPARATOR,
942 "Fitch Parsimony | Gains | Domain Combinations",
945 all_pfams_encountered,
946 all_pfams_gained_as_dom_combinations,
948 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
952 domain_parsimony.getGainLossMatrix(),
953 CharacterStateMatrix.GainLossStates.LOSS,
954 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
956 ForesterUtil.LINE_SEPARATOR,
957 "Fitch Parsimony | Losses | Domain Combinations",
960 all_pfams_encountered,
961 all_pfams_lost_as_dom_combinations,
962 "_fitch_losses_dc" );
963 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
967 domain_parsimony.getGainLossMatrix(),
969 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
971 ForesterUtil.LINE_SEPARATOR,
972 "Fitch Parsimony | Present | Domain Combinations",
975 all_pfams_encountered,
977 "_fitch_present_dc" );
978 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
981 all_pfams_encountered );
982 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX, all_pfams_gained_as_domains );
983 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
984 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
985 all_pfams_gained_as_dom_combinations );
986 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX, all_pfams_lost_as_dom_combinations );
987 preparePhylogeny( local_phylogeny_l,
990 "Fitch parsimony on binary domain combination presence/absence randomization: "
992 "fitch_on_binary_domain_combinations_" + outfile_name,
994 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
995 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
996 calculateIndependentDomainCombinationGains( local_phylogeny_l,
998 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
1000 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
1002 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
1004 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
1005 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
1006 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
1007 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
1008 protein_length_stats_by_dc,
1009 domain_number_stats_by_dc,
1010 domain_length_stats_by_domain );
1014 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
1015 final DomainParsimonyCalculator secondary_features_parsimony,
1016 final Phylogeny phylogeny,
1017 final String parameters_str,
1018 final Map<Species, MappingResults> mapping_results_map ) {
1019 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
1020 final String date_time = ForesterUtil.getCurrentDateTime();
1021 System.out.println();
1022 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
1023 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
1025 Phylogeny local_phylogeny_copy = phylogeny.copy();
1026 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
1027 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(), outfile_name
1028 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
1029 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(), outfile_name
1030 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
1032 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1033 CharacterStateMatrix.GainLossStates.GAIN,
1035 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
1037 ForesterUtil.LINE_SEPARATOR,
1040 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1041 CharacterStateMatrix.GainLossStates.LOSS,
1043 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
1045 ForesterUtil.LINE_SEPARATOR,
1048 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1051 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
1053 ForesterUtil.LINE_SEPARATOR,
1055 preparePhylogeny( local_phylogeny_copy,
1056 secondary_features_parsimony,
1058 "Dollo parsimony on secondary feature presence/absence",
1059 "dollo_on_secondary_features_" + outfile_name,
1061 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1062 + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
1063 // FITCH DOMAIN COMBINATIONS
1064 // -------------------------
1065 local_phylogeny_copy = phylogeny.copy();
1066 final String randomization = "no";
1067 secondary_features_parsimony.executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( USE_LAST );
1068 preparePhylogeny( local_phylogeny_copy,
1069 secondary_features_parsimony,
1071 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
1073 "fitch_on_binary_domain_combinations_" + outfile_name,
1075 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1076 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
1077 calculateIndependentDomainCombinationGains( local_phylogeny_copy, outfile_name
1078 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX, outfile_name
1079 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX, outfile_name
1080 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX, outfile_name
1081 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX, outfile_name
1082 + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt", outfile_name
1083 + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt", null, null, null, null );
1086 public static void doit( final List<Protein> proteins,
1087 final List<DomainId> query_domain_ids_nc_order,
1089 final String separator,
1090 final String limit_to_species,
1091 final Map<String, List<Integer>> average_protein_lengths_by_dc ) throws IOException {
1092 for( final Protein protein : proteins ) {
1093 if ( ForesterUtil.isEmpty( limit_to_species )
1094 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1095 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1096 out.write( protein.getSpecies().getSpeciesId() );
1097 out.write( separator );
1098 out.write( protein.getProteinId().getId() );
1099 out.write( separator );
1101 final Set<DomainId> visited_domain_ids = new HashSet<DomainId>();
1102 boolean first = true;
1103 for( final Domain domain : protein.getProteinDomains() ) {
1104 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1105 visited_domain_ids.add( domain.getDomainId() );
1112 out.write( domain.getDomainId().getId() );
1114 out.write( "" + domain.getTotalCount() );
1119 out.write( separator );
1120 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1121 .equals( SurfacingConstants.NONE ) ) ) {
1122 out.write( protein.getDescription() );
1124 out.write( separator );
1125 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1126 .equals( SurfacingConstants.NONE ) ) ) {
1127 out.write( protein.getAccession() );
1129 out.write( SurfacingConstants.NL );
1136 public static void extractProteinNames( final List<Protein> proteins,
1137 final List<DomainId> query_domain_ids_nc_order,
1139 final String separator,
1140 final String limit_to_species ) throws IOException {
1141 for( final Protein protein : proteins ) {
1142 if ( ForesterUtil.isEmpty( limit_to_species )
1143 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1144 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1145 out.write( protein.getSpecies().getSpeciesId() );
1146 out.write( separator );
1147 out.write( protein.getProteinId().getId() );
1148 out.write( separator );
1150 final Set<DomainId> visited_domain_ids = new HashSet<DomainId>();
1151 boolean first = true;
1152 for( final Domain domain : protein.getProteinDomains() ) {
1153 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1154 visited_domain_ids.add( domain.getDomainId() );
1161 out.write( domain.getDomainId().getId() );
1163 out.write( "" + domain.getTotalCount() );
1168 out.write( separator );
1169 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1170 .equals( SurfacingConstants.NONE ) ) ) {
1171 out.write( protein.getDescription() );
1173 out.write( separator );
1174 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1175 .equals( SurfacingConstants.NONE ) ) ) {
1176 out.write( protein.getAccession() );
1178 out.write( SurfacingConstants.NL );
1185 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
1186 final DomainId domain_id,
1188 final String separator,
1189 final String limit_to_species ) throws IOException {
1190 for( final Species species : protein_lists_per_species.keySet() ) {
1191 for( final Protein protein : protein_lists_per_species.get( species ) ) {
1192 if ( ForesterUtil.isEmpty( limit_to_species )
1193 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1194 final List<Domain> domains = protein.getProteinDomains( domain_id );
1195 if ( domains.size() > 0 ) {
1196 out.write( protein.getSpecies().getSpeciesId() );
1197 out.write( separator );
1198 out.write( protein.getProteinId().getId() );
1199 out.write( separator );
1200 out.write( domain_id.toString() );
1201 out.write( separator );
1202 for( final Domain domain : domains ) {
1204 out.write( domain.getFrom() + "-" + domain.getTo() );
1207 out.write( separator );
1209 Domain domain_ary[] = new Domain[ protein.getProteinDomains().size() ];
1211 for( int i = 0; i < protein.getProteinDomains().size(); ++i ) {
1212 domain_ary[ i ] = protein.getProteinDomains().get( i );
1215 Arrays.sort( domain_ary, new DomainComparator( false ) );
1218 boolean first = true;
1220 for( final Domain domain : domain_ary ) {
1227 out.write( domain.getDomainId().toString() );
1229 out.write( domain.getFrom() );
1231 out.write( domain.getTo() );
1234 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1235 .equals( SurfacingConstants.NONE ) ) ) {
1236 out.write( protein.getDescription() );
1238 out.write( separator );
1239 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1240 .equals( SurfacingConstants.NONE ) ) ) {
1241 out.write( protein.getAccession() );
1243 out.write( SurfacingConstants.NL );
1251 public static SortedSet<DomainId> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
1252 final SortedSet<DomainId> all_domains_ids = new TreeSet<DomainId>();
1253 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1254 final Set<DomainId> all_domains = gwcd.getAllDomainIds();
1255 // for( final Domain domain : all_domains ) {
1256 all_domains_ids.addAll( all_domains );
1259 return all_domains_ids;
1262 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
1263 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
1264 for( final Protein protein_domain_collection : protein_domain_collections ) {
1265 for( final Object name : protein_domain_collection.getProteinDomains() ) {
1266 final BasicDomain protein_domain = ( BasicDomain ) name;
1267 final String id = protein_domain.getDomainId().getId();
1268 if ( map.containsKey( id ) ) {
1269 map.put( id, map.get( id ) + 1 );
1279 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
1280 final PhylogenyNodeIterator it = p.iteratorPostorder();
1282 while ( it.hasNext() ) {
1283 final PhylogenyNode n = it.next();
1284 if ( ForesterUtil.isEmpty( n.getName() )
1285 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1286 .getScientificName() ) )
1287 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1288 .getCommonName() ) ) ) {
1289 if ( n.getParent() != null ) {
1290 names.append( " " );
1291 names.append( n.getParent().getName() );
1293 final List l = n.getAllExternalDescendants();
1294 for( final Object object : l ) {
1295 System.out.println( l.toString() );
1304 * Returns true is Domain domain falls in an uninterrupted stretch of
1305 * covered positions.
1308 * @param covered_positions
1311 public static boolean isEngulfed( final Domain domain, final List<Boolean> covered_positions ) {
1312 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1313 if ( ( i >= covered_positions.size() ) || ( covered_positions.get( i ) != true ) ) {
1320 public static void preparePhylogeny( final Phylogeny p,
1321 final DomainParsimonyCalculator domain_parsimony,
1322 final String date_time,
1323 final String method,
1325 final String parameters_str ) {
1326 domain_parsimony.decoratePhylogenyWithDomains( p );
1327 final StringBuilder desc = new StringBuilder();
1328 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1329 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1330 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1331 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1332 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1333 desc.append( "[Parameters: " + parameters_str + "]" );
1335 p.setDescription( desc.toString() );
1336 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1337 p.setRerootable( false );
1338 p.setRooted( true );
1342 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1346 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1347 final String protein_id,
1348 final String separator ) {
1349 final StringBuffer sb = new StringBuffer();
1350 if ( protein.getSpecies() == null ) {
1351 throw new IllegalArgumentException( "species must not be null" );
1353 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1354 throw new IllegalArgumentException( "species id must not be empty" );
1356 final List<Domain> domains = protein.getProteinDomains();
1357 if ( domains.size() > 1 ) {
1358 final Map<String, Integer> counts = new HashMap<String, Integer>();
1359 for( final Domain domain : domains ) {
1360 final String id = domain.getDomainId().getId();
1361 if ( counts.containsKey( id ) ) {
1362 counts.put( id, counts.get( id ) + 1 );
1365 counts.put( id, 1 );
1368 final Set<String> dcs = new HashSet<String>();
1369 for( int i = 1; i < domains.size(); ++i ) {
1370 for( int j = 0; j < i; ++j ) {
1371 Domain domain_n = domains.get( i );
1372 Domain domain_c = domains.get( j );
1373 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1374 domain_n = domains.get( j );
1375 domain_c = domains.get( i );
1377 final String dc = domain_n.getDomainId().getId() + domain_c.getDomainId().getId();
1378 if ( !dcs.contains( dc ) ) {
1380 sb.append( protein.getSpecies() );
1381 sb.append( separator );
1382 sb.append( protein_id );
1383 sb.append( separator );
1384 sb.append( domain_n.getDomainId().getId() );
1385 sb.append( separator );
1386 sb.append( domain_c.getDomainId().getId() );
1387 sb.append( separator );
1388 sb.append( domain_n.getPerDomainEvalue() );
1389 sb.append( separator );
1390 sb.append( domain_c.getPerDomainEvalue() );
1391 sb.append( separator );
1392 sb.append( counts.get( domain_n.getDomainId().getId() ) );
1393 sb.append( separator );
1394 sb.append( counts.get( domain_c.getDomainId().getId() ) );
1395 sb.append( ForesterUtil.LINE_SEPARATOR );
1400 else if ( domains.size() == 1 ) {
1401 sb.append( protein.getSpecies() );
1402 sb.append( separator );
1403 sb.append( protein_id );
1404 sb.append( separator );
1405 sb.append( domains.get( 0 ).getDomainId().getId() );
1406 sb.append( separator );
1407 sb.append( separator );
1408 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1409 sb.append( separator );
1410 sb.append( separator );
1412 sb.append( separator );
1413 sb.append( ForesterUtil.LINE_SEPARATOR );
1416 sb.append( protein.getSpecies() );
1417 sb.append( separator );
1418 sb.append( protein_id );
1419 sb.append( separator );
1420 sb.append( separator );
1421 sb.append( separator );
1422 sb.append( separator );
1423 sb.append( separator );
1424 sb.append( separator );
1425 sb.append( ForesterUtil.LINE_SEPARATOR );
1432 * Example regarding engulfment: ------------0.1 ----------0.2 --0.3 =>
1433 * domain with 0.3 is ignored
1435 * -----------0.1 ----------0.2 --0.3 => domain with 0.3 is ignored
1438 * ------------0.1 ----------0.3 --0.2 => domains with 0.3 and 0.2 are _not_
1441 * @param max_allowed_overlap
1442 * maximal allowed overlap (inclusive) to be still considered not
1443 * overlapping (zero or negative value to allow any overlap)
1444 * @param remove_engulfed_domains
1445 * to remove domains which are completely engulfed by coverage of
1446 * domains with better support
1450 public static Protein removeOverlappingDomains( final int max_allowed_overlap,
1451 final boolean remove_engulfed_domains,
1452 final Protein protein ) {
1453 final Protein pruned_protein = new BasicProtein( protein.getProteinId().getId(), protein.getSpecies()
1454 .getSpeciesId(), protein.getLength() );
1455 final List<Domain> sorted = SurfacingUtil.sortDomainsWithAscendingConfidenceValues( protein );
1456 final List<Boolean> covered_positions = new ArrayList<Boolean>();
1457 for( final Domain domain : sorted ) {
1458 if ( ( ( max_allowed_overlap < 0 ) || ( SurfacingUtil.calculateOverlap( domain, covered_positions ) <= max_allowed_overlap ) )
1459 && ( !remove_engulfed_domains || !isEngulfed( domain, covered_positions ) ) ) {
1460 final int covered_positions_size = covered_positions.size();
1461 for( int i = covered_positions_size; i < domain.getFrom(); ++i ) {
1462 covered_positions.add( false );
1464 final int new_covered_positions_size = covered_positions.size();
1465 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1466 if ( i < new_covered_positions_size ) {
1467 covered_positions.set( i, true );
1470 covered_positions.add( true );
1473 pruned_protein.addProteinDomain( domain );
1476 return pruned_protein;
1479 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1480 final List<Domain> domains = new ArrayList<Domain>();
1481 for( final Domain d : protein.getProteinDomains() ) {
1484 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1488 private static List<String> splitDomainCombination( final String dc ) {
1489 final String[] s = dc.split( "=" );
1490 if ( s.length != 2 ) {
1491 ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
1495 final List<String> l = new ArrayList<String>( 2 );
1501 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1502 final boolean get_gains,
1503 final String outdir,
1504 final String suffix_for_filename ) throws IOException {
1505 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1507 state = CharacterStateMatrix.GainLossStates.LOSS;
1509 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1513 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1514 final PhylogenyNode node = it.next();
1515 if ( !node.isExternal() ) {
1516 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1517 if ( domains.size() > 0 ) {
1518 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1519 + node.getName() + suffix_for_filename );
1520 for( final String domain : domains ) {
1521 writer.write( domain );
1522 writer.write( ForesterUtil.LINE_SEPARATOR );
1530 private static void writeAllEncounteredPfamsToFile( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
1531 final Map<GoId, GoTerm> go_id_to_term_map,
1532 final String outfile_name,
1533 final SortedSet<String> all_pfams_encountered ) {
1534 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
1535 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
1536 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
1537 final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
1538 int biological_process_counter = 0;
1539 int cellular_component_counter = 0;
1540 int molecular_function_counter = 0;
1541 int pfams_with_mappings_counter = 0;
1542 int pfams_without_mappings_counter = 0;
1543 int pfams_without_mappings_to_bp_or_mf_counter = 0;
1544 int pfams_with_mappings_to_bp_or_mf_counter = 0;
1546 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
1547 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
1548 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
1549 summary_writer.write( "# Pfam to GO mapping summary" );
1550 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1551 summary_writer.write( "# Actual summary is at the end of this file." );
1552 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1553 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
1554 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1555 for( final String pfam : all_pfams_encountered ) {
1556 all_pfams_encountered_writer.write( pfam );
1557 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
1558 final DomainId domain_id = new DomainId( pfam );
1559 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
1560 ++pfams_with_mappings_counter;
1561 all_pfams_encountered_with_go_annotation_writer.write( pfam );
1562 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
1563 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
1564 boolean maps_to_bp = false;
1565 boolean maps_to_cc = false;
1566 boolean maps_to_mf = false;
1567 for( final GoId go_id : go_ids ) {
1568 final GoTerm go_term = go_id_to_term_map.get( go_id );
1569 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
1572 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
1575 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
1580 ++biological_process_counter;
1583 ++cellular_component_counter;
1586 ++molecular_function_counter;
1588 if ( maps_to_bp || maps_to_mf ) {
1589 ++pfams_with_mappings_to_bp_or_mf_counter;
1592 ++pfams_without_mappings_to_bp_or_mf_counter;
1596 ++pfams_without_mappings_to_bp_or_mf_counter;
1597 ++pfams_without_mappings_counter;
1598 summary_writer.write( pfam );
1599 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1602 all_pfams_encountered_writer.close();
1603 all_pfams_encountered_with_go_annotation_writer.close();
1604 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + all_pfams_encountered.size()
1605 + "] encountered Pfams to: \"" + all_pfams_encountered_file + "\"" );
1606 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + pfams_with_mappings_counter
1607 + "] encountered Pfams with GO mappings to: \"" + all_pfams_encountered_with_go_annotation_file
1609 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote summary (including all ["
1610 + pfams_without_mappings_counter + "] encountered Pfams without GO mappings) to: \""
1611 + encountered_pfams_summary_file + "\"" );
1612 ForesterUtil.programMessage( surfacing.PRG_NAME, "Sum of Pfams encountered : "
1613 + all_pfams_encountered.size() );
1614 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without a mapping : "
1615 + pfams_without_mappings_counter + " ["
1616 + ( 100 * pfams_without_mappings_counter / all_pfams_encountered.size() ) + "%]" );
1617 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without mapping to proc. or func. : "
1618 + pfams_without_mappings_to_bp_or_mf_counter + " ["
1619 + ( 100 * pfams_without_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1620 ForesterUtil.programMessage( surfacing.PRG_NAME,
1621 "Pfams with a mapping : " + pfams_with_mappings_counter
1623 + ( 100 * pfams_with_mappings_counter / all_pfams_encountered.size() )
1625 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping to proc. or func. : "
1626 + pfams_with_mappings_to_bp_or_mf_counter + " ["
1627 + ( 100 * pfams_with_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1628 ForesterUtil.programMessage( surfacing.PRG_NAME,
1629 "Pfams with mapping to biological process: " + biological_process_counter
1631 + ( 100 * biological_process_counter / all_pfams_encountered.size() )
1633 ForesterUtil.programMessage( surfacing.PRG_NAME,
1634 "Pfams with mapping to molecular function: " + molecular_function_counter
1636 + ( 100 * molecular_function_counter / all_pfams_encountered.size() )
1638 ForesterUtil.programMessage( surfacing.PRG_NAME,
1639 "Pfams with mapping to cellular component: " + cellular_component_counter
1641 + ( 100 * cellular_component_counter / all_pfams_encountered.size() )
1643 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1644 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
1645 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1646 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter
1647 + " [" + ( 100 * pfams_without_mappings_counter / all_pfams_encountered.size() ) + "%]" );
1648 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1649 summary_writer.write( "# Pfams without mapping to proc. or func. : "
1650 + pfams_without_mappings_to_bp_or_mf_counter + " ["
1651 + ( 100 * pfams_without_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1652 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1653 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
1654 + ( 100 * pfams_with_mappings_counter / all_pfams_encountered.size() ) + "%]" );
1655 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1656 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
1657 + pfams_with_mappings_to_bp_or_mf_counter + " ["
1658 + ( 100 * pfams_with_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1659 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1660 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
1661 + ( 100 * biological_process_counter / all_pfams_encountered.size() ) + "%]" );
1662 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1663 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
1664 + ( 100 * molecular_function_counter / all_pfams_encountered.size() ) + "%]" );
1665 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1666 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
1667 + ( 100 * cellular_component_counter / all_pfams_encountered.size() ) + "%]" );
1668 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1669 summary_writer.close();
1671 catch ( final IOException e ) {
1672 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
1676 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1677 final File output_dir,
1678 final GenomeWideCombinableDomains gwcd,
1680 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1681 File dc_outfile_dot = new File( input_file_properties[ i ][ 0 ]
1682 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1683 if ( output_dir != null ) {
1684 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1686 checkForOutputFileWriteability( dc_outfile_dot );
1687 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1689 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1690 for( final BinaryDomainCombination bdc : binary_combinations ) {
1691 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1693 out_dot.write( SurfacingConstants.NL );
1697 catch ( final IOException e ) {
1698 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1700 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote binary domain combination for \""
1701 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1702 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile_dot + "\"" );
1705 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1706 final CharacterStateMatrix.GainLossStates state,
1707 final String filename,
1708 final String indentifier_characters_separator,
1709 final String character_separator,
1710 final Map<String, String> descriptions ) {
1711 final File outfile = new File( filename );
1712 checkForOutputFileWriteability( outfile );
1713 final SortedSet<String> sorted_ids = new TreeSet<String>();
1714 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1715 sorted_ids.add( matrix.getIdentifier( i ) );
1718 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1719 for( final String id : sorted_ids ) {
1720 out.write( indentifier_characters_separator );
1721 out.write( "#" + id );
1722 out.write( indentifier_characters_separator );
1723 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1725 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1726 if ( ( matrix.getState( id, c ) == state )
1727 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1728 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1729 out.write( matrix.getCharacter( c ) );
1730 if ( ( descriptions != null ) && !descriptions.isEmpty()
1731 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
1733 out.write( descriptions.get( matrix.getCharacter( c ) ) );
1735 out.write( character_separator );
1742 catch ( final IOException e ) {
1743 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1745 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1748 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1749 final CharacterStateMatrix.GainLossStates state,
1750 final String filename,
1751 final String indentifier_characters_separator,
1752 final String character_separator,
1753 final BinaryDomainCombination.OutputFormat bc_output_format ) {
1754 final File outfile = new File( filename );
1755 checkForOutputFileWriteability( outfile );
1756 final SortedSet<String> sorted_ids = new TreeSet<String>();
1757 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1758 sorted_ids.add( matrix.getIdentifier( i ) );
1761 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1762 for( final String id : sorted_ids ) {
1763 out.write( indentifier_characters_separator );
1764 out.write( "#" + id );
1765 out.write( indentifier_characters_separator );
1766 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1768 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1769 if ( ( matrix.getState( id, c ) == state )
1770 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1771 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1772 BinaryDomainCombination bdc = null;
1774 bdc = BasicBinaryDomainCombination.createInstance( matrix.getCharacter( c ) );
1776 catch ( final Exception e ) {
1777 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1779 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
1780 out.write( character_separator );
1787 catch ( final IOException e ) {
1788 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1790 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1793 public static void writeBinaryStatesMatrixToList( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
1794 final Map<GoId, GoTerm> go_id_to_term_map,
1795 final GoNameSpace go_namespace_limit,
1796 final boolean domain_combinations,
1797 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1798 final CharacterStateMatrix.GainLossStates state,
1799 final String filename,
1800 final String indentifier_characters_separator,
1801 final String character_separator,
1802 final String title_for_html,
1803 final String prefix_for_html,
1804 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
1805 final SortedSet<String> all_pfams_encountered,
1806 final SortedSet<String> pfams_gained_or_lost,
1807 final String suffix_for_per_node_events_file ) {
1808 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1809 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
1811 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
1812 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
1814 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1815 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
1817 final File outfile = new File( filename );
1818 checkForOutputFileWriteability( outfile );
1819 final SortedSet<String> sorted_ids = new TreeSet<String>();
1820 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1821 sorted_ids.add( matrix.getIdentifier( i ) );
1824 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
1825 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
1826 domain_combinations,
1829 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1830 File per_node_go_mapped_domain_gain_loss_outfile = null;
1831 int per_node_counter = 0;
1832 out.write( "<html>" );
1833 out.write( SurfacingConstants.NL );
1834 addHtmlHead( out, title_for_html );
1835 out.write( SurfacingConstants.NL );
1836 out.write( "<body>" );
1837 out.write( SurfacingConstants.NL );
1838 out.write( "<h1>" );
1839 out.write( SurfacingConstants.NL );
1840 out.write( title_for_html );
1841 out.write( SurfacingConstants.NL );
1842 out.write( "</h1>" );
1843 out.write( SurfacingConstants.NL );
1844 out.write( "<table>" );
1845 out.write( SurfacingConstants.NL );
1846 for( final String id : sorted_ids ) {
1847 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1848 if ( matcher.matches() ) {
1851 out.write( "<tr>" );
1852 out.write( "<td>" );
1853 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
1854 out.write( "</td>" );
1855 out.write( "</tr>" );
1856 out.write( SurfacingConstants.NL );
1858 out.write( "</table>" );
1859 out.write( SurfacingConstants.NL );
1860 for( final String id : sorted_ids ) {
1861 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1862 if ( matcher.matches() ) {
1865 out.write( SurfacingConstants.NL );
1866 out.write( "<h2>" );
1867 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
1868 writeTaxonomyLinks( out, id );
1869 out.write( "</h2>" );
1870 out.write( SurfacingConstants.NL );
1871 out.write( "<table>" );
1872 out.write( SurfacingConstants.NL );
1873 out.write( "<tr>" );
1874 out.write( "<td><b>" );
1875 out.write( "Pfam domain(s)" );
1876 out.write( "</b></td><td><b>" );
1877 out.write( "GO term acc" );
1878 out.write( "</b></td><td><b>" );
1879 out.write( "GO term" );
1880 out.write( "</b></td><td><b>" );
1881 out.write( "GO namespace" );
1882 out.write( "</b></td>" );
1883 out.write( "</tr>" );
1884 out.write( SurfacingConstants.NL );
1885 out.write( "</tr>" );
1886 out.write( SurfacingConstants.NL );
1887 per_node_counter = 0;
1888 if ( matrix.getNumberOfCharacters() > 0 ) {
1889 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
1890 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
1891 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
1892 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
1893 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
1896 per_node_go_mapped_domain_gain_loss_outfile = null;
1897 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1899 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1901 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1902 if ( ( matrix.getState( id, c ) == state )
1903 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) || ( matrix
1904 .getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
1905 final String character = matrix.getCharacter( c );
1906 String domain_0 = "";
1907 String domain_1 = "";
1908 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
1909 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
1910 if ( s.length != 2 ) {
1911 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
1912 + character + "]" );
1918 domain_0 = character;
1920 writeDomainData( domain_id_to_go_ids_map,
1927 character_separator,
1928 domain_id_to_secondary_features_maps,
1930 all_pfams_encountered.add( domain_0 );
1931 if ( pfams_gained_or_lost != null ) {
1932 pfams_gained_or_lost.add( domain_0 );
1934 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
1935 all_pfams_encountered.add( domain_1 );
1936 if ( pfams_gained_or_lost != null ) {
1937 pfams_gained_or_lost.add( domain_1 );
1940 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1941 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
1948 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1949 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
1950 if ( per_node_counter < 1 ) {
1951 per_node_go_mapped_domain_gain_loss_outfile.delete();
1953 per_node_counter = 0;
1955 out.write( "</table>" );
1956 out.write( SurfacingConstants.NL );
1957 out.write( "<hr>" );
1958 out.write( SurfacingConstants.NL );
1959 } // for( final String id : sorted_ids ) {
1960 out.write( "</body>" );
1961 out.write( SurfacingConstants.NL );
1962 out.write( "</html>" );
1963 out.write( SurfacingConstants.NL );
1967 catch ( final IOException e ) {
1968 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1970 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
1973 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
1974 final File output_dir,
1975 final Writer per_genome_domain_promiscuity_statistics_writer,
1976 final GenomeWideCombinableDomains gwcd,
1978 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1979 File dc_outfile = new File( input_file_properties[ i ][ 0 ]
1980 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1981 if ( output_dir != null ) {
1982 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
1984 checkForOutputFileWriteability( dc_outfile );
1986 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
1987 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
1990 catch ( final IOException e ) {
1991 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1993 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
1995 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 0 ] + "\t" );
1996 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
1997 if ( stats.getN() < 2 ) {
1998 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
2001 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
2002 .sampleStandardDeviation() ) + "\t" );
2004 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
2005 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
2006 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
2007 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
2008 final SortedSet<DomainId> mpds = gwcd.getMostPromiscuosDomain();
2009 for( final DomainId mpd : mpds ) {
2010 per_genome_domain_promiscuity_statistics_writer.write( mpd.getId() + " " );
2012 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
2014 catch ( final IOException e ) {
2015 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2017 if ( input_file_properties[ i ].length == 3 ) {
2018 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2019 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
2020 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
2023 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2024 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
2025 + dc_outfile + "\"" );
2029 private static void writeDomainData( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
2030 final Map<GoId, GoTerm> go_id_to_term_map,
2031 final GoNameSpace go_namespace_limit,
2033 final String domain_0,
2034 final String domain_1,
2035 final String prefix_for_html,
2036 final String character_separator_for_non_html_output,
2037 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
2038 final Set<GoId> all_go_ids ) throws IOException {
2039 boolean any_go_annotation_present = false;
2040 boolean first_has_no_go = false;
2041 int domain_count = 2; // To distinguish between domains and binary domain combinations.
2042 if ( ForesterUtil.isEmpty( domain_1 ) ) {
2045 // The following has a difficult to understand logic.
2046 for( int d = 0; d < domain_count; ++d ) {
2047 List<GoId> go_ids = null;
2048 boolean go_annotation_present = false;
2050 final DomainId domain_id = new DomainId( domain_0 );
2051 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2052 go_annotation_present = true;
2053 any_go_annotation_present = true;
2054 go_ids = domain_id_to_go_ids_map.get( domain_id );
2057 first_has_no_go = true;
2061 final DomainId domain_id = new DomainId( domain_1 );
2062 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2063 go_annotation_present = true;
2064 any_go_annotation_present = true;
2065 go_ids = domain_id_to_go_ids_map.get( domain_id );
2068 if ( go_annotation_present ) {
2069 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
2070 for( final GoId go_id : go_ids ) {
2071 out.write( "<tr>" );
2074 writeDomainIdsToHtml( out,
2078 domain_id_to_secondary_features_maps );
2081 out.write( "<td></td>" );
2083 if ( !go_id_to_term_map.containsKey( go_id ) ) {
2084 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
2086 final GoTerm go_term = go_id_to_term_map.get( go_id );
2087 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
2088 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
2089 final String go_id_str = go_id.getId();
2090 out.write( "<td>" );
2091 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
2092 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
2093 out.write( "</td><td>" );
2094 out.write( go_term.getName() );
2095 if ( domain_count == 2 ) {
2096 out.write( " (" + d + ")" );
2098 out.write( "</td><td>" );
2099 // out.write( top );
2100 // out.write( "</td><td>" );
2102 out.write( go_term.getGoNameSpace().toShortString() );
2104 out.write( "</td>" );
2105 if ( all_go_ids != null ) {
2106 all_go_ids.add( go_id );
2110 out.write( "<td>" );
2111 out.write( "</td><td>" );
2112 out.write( "</td><td>" );
2113 out.write( "</td><td>" );
2114 out.write( "</td>" );
2116 out.write( "</tr>" );
2117 out.write( SurfacingConstants.NL );
2120 } // for( int d = 0; d < domain_count; ++d )
2121 if ( !any_go_annotation_present ) {
2122 out.write( "<tr>" );
2123 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
2124 out.write( "<td>" );
2125 out.write( "</td><td>" );
2126 out.write( "</td><td>" );
2127 out.write( "</td><td>" );
2128 out.write( "</td>" );
2129 out.write( "</tr>" );
2130 out.write( SurfacingConstants.NL );
2134 private static void writeDomainIdsToHtml( final Writer out,
2135 final String domain_0,
2136 final String domain_1,
2137 final String prefix_for_detailed_html,
2138 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps )
2139 throws IOException {
2140 out.write( "<td>" );
2141 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
2142 out.write( prefix_for_detailed_html );
2145 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
2146 out.write( "</td>" );
2149 public static DescriptiveStatistics writeDomainSimilaritiesToFile( final StringBuilder html_desc,
2150 final StringBuilder html_title,
2151 final Writer single_writer,
2152 Map<Character, Writer> split_writers,
2153 final SortedSet<DomainSimilarity> similarities,
2154 final boolean treat_as_binary,
2155 final List<Species> species_order,
2156 final PrintableDomainSimilarity.PRINT_OPTION print_option,
2157 final DomainSimilarity.DomainSimilaritySortField sort_field,
2158 final DomainSimilarity.DomainSimilarityScoring scoring,
2159 final boolean verbose ) throws IOException {
2160 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
2161 String histogram_title = null;
2162 switch ( sort_field ) {
2163 case ABS_MAX_COUNTS_DIFFERENCE:
2164 if ( treat_as_binary ) {
2165 histogram_title = "absolute counts difference:";
2168 histogram_title = "absolute (maximal) counts difference:";
2171 case MAX_COUNTS_DIFFERENCE:
2172 if ( treat_as_binary ) {
2173 histogram_title = "counts difference:";
2176 histogram_title = "(maximal) counts difference:";
2180 histogram_title = "score mean:";
2183 histogram_title = "score minimum:";
2186 histogram_title = "score maximum:";
2188 case MAX_DIFFERENCE:
2189 if ( treat_as_binary ) {
2190 histogram_title = "difference:";
2193 histogram_title = "(maximal) difference:";
2197 histogram_title = "score mean:";
2200 histogram_title = "score standard deviation:";
2203 histogram_title = "species number:";
2206 throw new AssertionError( "Unknown sort field: " + sort_field );
2208 for( final DomainSimilarity similarity : similarities ) {
2209 switch ( sort_field ) {
2210 case ABS_MAX_COUNTS_DIFFERENCE:
2211 stats.addValue( Math.abs( similarity.getMaximalDifferenceInCounts() ) );
2213 case MAX_COUNTS_DIFFERENCE:
2214 stats.addValue( similarity.getMaximalDifferenceInCounts() );
2217 stats.addValue( similarity.getMeanSimilarityScore() );
2220 stats.addValue( similarity.getMinimalSimilarityScore() );
2223 stats.addValue( similarity.getMaximalSimilarityScore() );
2225 case MAX_DIFFERENCE:
2226 stats.addValue( similarity.getMaximalDifference() );
2229 stats.addValue( similarity.getMeanSimilarityScore() );
2232 stats.addValue( similarity.getStandardDeviationOfSimilarityScore() );
2235 stats.addValue( similarity.getSpecies().size() );
2238 throw new AssertionError( "Unknown sort field: " + sort_field );
2242 // final HistogramData[] hists = new HistogramData[ 1 ];
2245 // List<HistogramDataItem> data_items = new
2246 // ArrayList<HistogramDataItem>();
2247 // double[] values = stats.getDataAsDoubleArray();
2248 // for( int i = 0; i < values.length; i++ ) {
2249 // HistogramDataItem data_item = new BasicHistogramDataItem( "", values[
2251 // data_items.add( data_item );
2255 // HistogramData hd0 = new HistogramData( "name",
2263 // hists[ 0 ] = hd0;
2265 // final HistogramsFrame hf = new HistogramsFrame( hists );
2266 // hf.setVisible( true );
2268 AsciiHistogram histo = null;
2269 if ( stats.getMin() < stats.getMin() ) {
2270 histo = new AsciiHistogram( stats, histogram_title );
2273 if ( histo != null ) {
2274 System.out.println( histo.toStringBuffer( 20, '|', 40, 5 ) );
2276 System.out.println();
2277 System.out.println( "N : " + stats.getN() );
2278 System.out.println( "Min : " + stats.getMin() );
2279 System.out.println( "Max : " + stats.getMax() );
2280 System.out.println( "Mean : " + stats.arithmeticMean() );
2281 if ( stats.getN() > 1 ) {
2282 System.out.println( "SD : " + stats.sampleStandardDeviation() );
2285 System.out.println( "SD : n/a" );
2287 System.out.println( "Median : " + stats.median() );
2288 if ( stats.getN() > 1 ) {
2289 System.out.println( "Pearsonian skewness : " + stats.pearsonianSkewness() );
2292 System.out.println( "Pearsonian skewness : n/a" );
2295 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
2296 split_writers = new HashMap<Character, Writer>();
2297 split_writers.put( '_', single_writer );
2299 switch ( print_option ) {
2300 case SIMPLE_TAB_DELIMITED:
2303 for( final Character key : split_writers.keySet() ) {
2304 final Writer w = split_writers.get( key );
2305 w.write( "<html>" );
2306 w.write( SurfacingConstants.NL );
2308 addHtmlHead( w, "DCs (" + html_title + ") " + key.toString().toUpperCase() );
2311 addHtmlHead( w, "DCs (" + html_title + ")" );
2313 w.write( SurfacingConstants.NL );
2314 w.write( "<body>" );
2315 w.write( SurfacingConstants.NL );
2316 w.write( html_desc.toString() );
2317 w.write( SurfacingConstants.NL );
2320 w.write( SurfacingConstants.NL );
2321 w.write( "<tt><pre>" );
2322 w.write( SurfacingConstants.NL );
2323 if ( histo != null ) {
2324 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
2325 w.write( SurfacingConstants.NL );
2327 w.write( "</pre></tt>" );
2328 w.write( SurfacingConstants.NL );
2329 w.write( "<table>" );
2330 w.write( SurfacingConstants.NL );
2331 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
2332 w.write( SurfacingConstants.NL );
2333 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
2334 w.write( SurfacingConstants.NL );
2335 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
2336 w.write( SurfacingConstants.NL );
2337 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
2338 w.write( SurfacingConstants.NL );
2339 if ( stats.getN() > 1 ) {
2340 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
2343 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
2345 w.write( SurfacingConstants.NL );
2346 w.write( "<tr><td>Median: </td><td>" + stats.median() + "</td></tr>" );
2347 w.write( SurfacingConstants.NL );
2348 if ( stats.getN() > 1 ) {
2349 w.write( "<tr><td>Pearsonian skewness: </td><td>" + stats.pearsonianSkewness() + "</td></tr>" );
2352 w.write( "<tr><td>Pearsonian skewness: </td><td>n/a</td></tr>" );
2354 w.write( SurfacingConstants.NL );
2355 w.write( "</table>" );
2356 w.write( SurfacingConstants.NL );
2358 w.write( SurfacingConstants.NL );
2360 w.write( SurfacingConstants.NL );
2362 w.write( SurfacingConstants.NL );
2363 w.write( "<table>" );
2364 w.write( SurfacingConstants.NL );
2368 for( final Writer w : split_writers.values() ) {
2369 w.write( SurfacingConstants.NL );
2371 for( final DomainSimilarity similarity : similarities ) {
2372 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2373 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
2375 if ( single_writer != null ) {
2376 single_writer.write( similarity.toStringBuffer( print_option ).toString() );
2379 Writer local_writer = split_writers.get( ( similarity.getDomainId().getId().charAt( 0 ) + "" )
2380 .toLowerCase().charAt( 0 ) );
2381 if ( local_writer == null ) {
2382 local_writer = split_writers.get( '0' );
2384 local_writer.write( similarity.toStringBuffer( print_option ).toString() );
2386 for( final Writer w : split_writers.values() ) {
2387 w.write( SurfacingConstants.NL );
2390 switch ( print_option ) {
2392 for( final Writer w : split_writers.values() ) {
2393 w.write( SurfacingConstants.NL );
2394 w.write( "</table>" );
2395 w.write( SurfacingConstants.NL );
2396 w.write( "</font>" );
2397 w.write( SurfacingConstants.NL );
2398 w.write( "</body>" );
2399 w.write( SurfacingConstants.NL );
2400 w.write( "</html>" );
2401 w.write( SurfacingConstants.NL );
2405 for( final Writer w : split_writers.values() ) {
2411 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
2412 final String domain_0,
2413 final String domain_1 ) throws IOException {
2414 individual_files_writer.write( domain_0 );
2415 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2416 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2417 individual_files_writer.write( domain_1 );
2418 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2422 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
2423 final String filename,
2424 final Format format ) {
2425 final File outfile = new File( filename );
2426 checkForOutputFileWriteability( outfile );
2428 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
2429 matrix.toWriter( out, format );
2433 catch ( final IOException e ) {
2434 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2436 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
2439 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
2440 checkForOutputFileWriteability( matrix_outfile );
2442 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
2443 for( final DistanceMatrix distance_matrix : matrices ) {
2444 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
2445 out.write( ForesterUtil.LINE_SEPARATOR );
2450 catch ( final IOException e ) {
2451 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2453 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
2456 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
2458 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
2459 for( final String pfam : pfams ) {
2460 writer.write( pfam );
2461 writer.write( ForesterUtil.LINE_SEPARATOR );
2464 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote " + pfams.size() + " pfams to [" + outfile_name
2467 catch ( final IOException e ) {
2468 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2472 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
2473 final PhylogenyWriter writer = new PhylogenyWriter();
2475 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
2477 catch ( final IOException e ) {
2478 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
2481 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
2484 public static void writeTaxonomyLinks( final Writer writer, final String species ) throws IOException {
2485 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
2486 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( species );
2487 writer.write( " [" );
2488 if ( matcher.matches() ) {
2489 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_LINK + species
2490 + "\" target=\"taxonomy_window\">uniprot</a>" );
2493 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
2494 + "\" target=\"taxonomy_window\">eol</a>" );
2495 writer.write( "|" );
2496 writer.write( "<a href=\"" + SurfacingConstants.TOL_LINK + species
2497 + "\" target=\"taxonomy_window\">tol</a>" );
2499 writer.write( "]" );
2503 private static void writeToNexus( final String outfile_name,
2504 final CharacterStateMatrix<BinaryStates> matrix,
2505 final Phylogeny phylogeny ) {
2506 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
2507 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
2510 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
2511 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
2512 phylogenies.add( phylogeny );
2514 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
2515 w.write( NexusConstants.NEXUS );
2516 w.write( ForesterUtil.LINE_SEPARATOR );
2517 my_matrix.writeNexusTaxaBlock( w );
2518 my_matrix.writeNexusBinaryChractersBlock( w );
2519 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
2522 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
2524 catch ( final IOException e ) {
2525 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2529 private static void writeToNexus( final String outfile_name,
2530 final DomainParsimonyCalculator domain_parsimony,
2531 final Phylogeny phylogeny ) {
2532 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
2533 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
2535 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
2536 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
2540 public static void domainsPerProteinsStatistics( final String genome,
2541 final List<Protein> protein_list,
2542 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
2543 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
2544 final SortedSet<String> domains_which_are_always_single,
2545 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
2546 final SortedSet<String> domains_which_never_single,
2547 final Writer writer ) {
2548 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
2549 for( final Protein protein : protein_list ) {
2550 final int domains = protein.getNumberOfProteinDomains();
2551 //System.out.println( domains );
2552 stats.addValue( domains );
2553 all_genomes_domains_per_potein_stats.addValue( domains );
2554 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
2555 all_genomes_domains_per_potein_histo.put( domains, 1 );
2558 all_genomes_domains_per_potein_histo.put( domains,
2559 1 + all_genomes_domains_per_potein_histo.get( domains ) );
2561 if ( domains == 1 ) {
2562 final String domain = protein.getProteinDomain( 0 ).getDomainId().getId();
2563 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
2564 if ( domains_which_never_single.contains( domain ) ) {
2565 domains_which_never_single.remove( domain );
2566 domains_which_are_sometimes_single_sometimes_not.add( domain );
2569 domains_which_are_always_single.add( domain );
2573 else if ( domains > 1 ) {
2574 for( final Domain d : protein.getProteinDomains() ) {
2575 final String domain = d.getDomainId().getId();
2576 // System.out.println( domain );
2577 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
2578 if ( domains_which_are_always_single.contains( domain ) ) {
2579 domains_which_are_always_single.remove( domain );
2580 domains_which_are_sometimes_single_sometimes_not.add( domain );
2583 domains_which_never_single.add( domain );
2590 writer.write( genome );
2591 writer.write( "\t" );
2592 if ( stats.getN() >= 1 ) {
2593 writer.write( stats.arithmeticMean() + "" );
2594 writer.write( "\t" );
2595 if ( stats.getN() >= 2 ) {
2596 writer.write( stats.sampleStandardDeviation() + "" );
2601 writer.write( "\t" );
2602 writer.write( stats.median() + "" );
2603 writer.write( "\t" );
2604 writer.write( stats.getN() + "" );
2605 writer.write( "\t" );
2606 writer.write( stats.getMin() + "" );
2607 writer.write( "\t" );
2608 writer.write( stats.getMax() + "" );
2611 writer.write( "\t" );
2612 writer.write( "\t" );
2613 writer.write( "\t" );
2614 writer.write( "0" );
2615 writer.write( "\t" );
2616 writer.write( "\t" );
2618 writer.write( "\n" );
2620 catch ( final IOException e ) {
2621 e.printStackTrace();
2625 final static class DomainComparator implements Comparator<Domain> {
2627 final private boolean _ascending;
2629 public DomainComparator( final boolean ascending ) {
2630 _ascending = ascending;
2634 public final int compare( final Domain d0, final Domain d1 ) {
2636 if ( d0.getFrom() < d1.getFrom() ) {
2637 return _ascending ? -1 : 1;
2639 else if ( d0.getFrom() > d1.getFrom() ) {
2640 return _ascending ? 1 : -1;