3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.surfacing;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.text.DecimalFormat;
35 import java.text.NumberFormat;
36 import java.util.ArrayList;
37 import java.util.Arrays;
38 import java.util.Collections;
39 import java.util.Comparator;
40 import java.util.HashMap;
41 import java.util.HashSet;
42 import java.util.List;
44 import java.util.Map.Entry;
45 import java.util.PriorityQueue;
47 import java.util.SortedMap;
48 import java.util.SortedSet;
49 import java.util.TreeMap;
50 import java.util.TreeSet;
51 import java.util.regex.Matcher;
52 import java.util.regex.Pattern;
54 import org.forester.application.surfacing;
55 import org.forester.evoinference.distance.NeighborJoining;
56 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
57 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
58 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
61 import org.forester.evoinference.matrix.distance.BasicSymmetricalDistanceMatrix;
62 import org.forester.evoinference.matrix.distance.DistanceMatrix;
63 import org.forester.go.GoId;
64 import org.forester.go.GoNameSpace;
65 import org.forester.go.GoTerm;
66 import org.forester.go.PfamToGoMapping;
67 import org.forester.io.parsers.nexus.NexusConstants;
68 import org.forester.io.writers.PhylogenyWriter;
69 import org.forester.phylogeny.Phylogeny;
70 import org.forester.phylogeny.PhylogenyMethods;
71 import org.forester.phylogeny.PhylogenyNode;
72 import org.forester.phylogeny.PhylogenyNode.NH_CONVERSION_SUPPORT_VALUE_STYLE;
73 import org.forester.phylogeny.data.BinaryCharacters;
74 import org.forester.phylogeny.data.Confidence;
75 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
76 import org.forester.protein.BasicDomain;
77 import org.forester.protein.BasicProtein;
78 import org.forester.protein.BinaryDomainCombination;
79 import org.forester.protein.Domain;
80 import org.forester.protein.DomainId;
81 import org.forester.protein.Protein;
82 import org.forester.species.Species;
83 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
84 import org.forester.surfacing.DomainSimilarityCalculator.GoAnnotationOutput;
85 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
86 import org.forester.util.AsciiHistogram;
87 import org.forester.util.BasicDescriptiveStatistics;
88 import org.forester.util.BasicTable;
89 import org.forester.util.BasicTableParser;
90 import org.forester.util.DescriptiveStatistics;
91 import org.forester.util.ForesterUtil;
93 public final class SurfacingUtil {
95 private final static NumberFormat FORMATTER = new DecimalFormat( "0.0E0" );
96 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
97 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
100 public int compare( final Domain d1,
102 if ( d1.getPerSequenceEvalue() < d2
103 .getPerSequenceEvalue() ) {
107 .getPerSequenceEvalue() > d2
108 .getPerSequenceEvalue() ) {
112 return d1.compareTo( d2 );
116 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
117 private static final boolean USE_LAST = true;
119 private SurfacingUtil() {
120 // Hidden constructor.
123 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
124 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
125 final SortedMap<DomainId, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
126 for( final DomainId domain_id : all_cd.keySet() ) {
127 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
131 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
132 final SortedSet<DomainId> domain_ids ) {
133 final SortedSet<DomainId> domains = genome.getAllDomainIds();
134 for( final DomainId domain : domains ) {
135 domain_ids.add( domain );
139 public static void addHtmlHead( final Writer w, final String title ) throws IOException {
140 w.write( SurfacingConstants.NL );
142 w.write( "<title>" );
144 w.write( "</title>" );
145 w.write( SurfacingConstants.NL );
146 w.write( "<style>" );
147 w.write( SurfacingConstants.NL );
148 w.write( "a:visited { color : #6633FF; text-decoration : none; }" );
149 w.write( SurfacingConstants.NL );
150 w.write( "a:link { color : #6633FF; text-decoration : none; }" );
151 w.write( SurfacingConstants.NL );
152 w.write( "a:active { color : #99FF00; text-decoration : none; }" );
153 w.write( SurfacingConstants.NL );
154 w.write( "a:hover { color : #FFFFFF; background-color : #99FF00; text-decoration : none; }" );
155 w.write( SurfacingConstants.NL );
156 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
157 w.write( SurfacingConstants.NL );
158 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
159 w.write( SurfacingConstants.NL );
160 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
161 w.write( SurfacingConstants.NL );
162 w.write( "</style>" );
163 w.write( SurfacingConstants.NL );
164 w.write( "</head>" );
165 w.write( SurfacingConstants.NL );
168 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
169 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
170 for( final DomainSimilarity similarity : similarities ) {
171 stats.addValue( similarity.getMeanSimilarityScore() );
176 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
177 final String outfilename_for_counts,
178 final String outfilename_for_dc,
179 final String outfilename_for_dc_for_go_mapping,
180 final String outfilename_for_dc_for_go_mapping_unique,
181 final String outfilename_for_rank_counts,
182 final String outfilename_for_ancestor_species_counts,
183 final String outfilename_for_protein_stats,
184 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
185 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
186 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
189 // if ( protein_length_stats_by_dc != null ) {
190 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
191 // System.out.print( entry.getKey().toString() );
192 // System.out.print( ": " );
193 // double[] a = entry.getValue().getDataAsDoubleArray();
194 // for( int i = 0; i < a.length; i++ ) {
195 // System.out.print( a[ i ] + " " );
197 // System.out.println();
200 // if ( domain_number_stats_by_dc != null ) {
201 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
202 // System.out.print( entry.getKey().toString() );
203 // System.out.print( ": " );
204 // double[] a = entry.getValue().getDataAsDoubleArray();
205 // for( int i = 0; i < a.length; i++ ) {
206 // System.out.print( a[ i ] + " " );
208 // System.out.println();
212 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
213 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
214 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
215 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
216 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
217 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
218 final PhylogenyNode n = it.next();
219 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
220 for( final String dc : gained_dc ) {
221 if ( dc_gain_counts.containsKey( dc ) ) {
222 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
225 dc_gain_counts.put( dc, 1 );
229 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
230 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
231 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
232 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
233 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
234 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
235 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
236 final Set<String> dcs = dc_gain_counts.keySet();
237 final SortedSet<String> more_than_once = new TreeSet<String>();
238 final DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
239 final DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
240 final DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
241 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
242 long gained_multiple_times_domain_length_sum = 0;
243 long gained_once_domain_length_sum = 0;
244 long gained_multiple_times_domain_length_count = 0;
245 long gained_once_domain_length_count = 0;
246 for( final String dc : dcs ) {
247 final int count = dc_gain_counts.get( dc );
248 if ( histogram.containsKey( count ) ) {
249 histogram.put( count, histogram.get( count ) + 1 );
250 domain_lists.get( count ).append( ", " + dc );
251 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
252 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
255 histogram.put( count, 1 );
256 domain_lists.put( count, new StringBuilder( dc ) );
257 final PriorityQueue<String> q = new PriorityQueue<String>();
258 q.addAll( splitDomainCombination( dc ) );
259 domain_lists_go.put( count, q );
260 final SortedSet<String> set = new TreeSet<String>();
261 set.addAll( splitDomainCombination( dc ) );
262 domain_lists_go_unique.put( count, set );
264 if ( protein_length_stats_by_dc != null ) {
265 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
266 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
268 dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
271 if ( domain_number_stats_by_dc != null ) {
272 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
273 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
275 dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
278 if ( domain_length_stats_by_domain != null ) {
279 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
280 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
282 final String[] ds = dc.split( "=" );
283 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
284 .get( ds[ 0 ] ).arithmeticMean() );
285 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
286 .get( ds[ 1 ] ).arithmeticMean() );
289 more_than_once.add( dc );
290 if ( protein_length_stats_by_dc != null ) {
291 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
292 for( final double element : s.getData() ) {
293 gained_multiple_times_lengths_stats.addValue( element );
296 if ( domain_number_stats_by_dc != null ) {
297 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
298 for( final double element : s.getData() ) {
299 gained_multiple_times_domain_count_stats.addValue( element );
302 if ( domain_length_stats_by_domain != null ) {
303 final String[] ds = dc.split( "=" );
304 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
305 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
306 for( final double element : s0.getData() ) {
307 gained_multiple_times_domain_length_sum += element;
308 ++gained_multiple_times_domain_length_count;
310 for( final double element : s1.getData() ) {
311 gained_multiple_times_domain_length_sum += element;
312 ++gained_multiple_times_domain_length_count;
317 if ( protein_length_stats_by_dc != null ) {
318 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
319 for( final double element : s.getData() ) {
320 gained_once_lengths_stats.addValue( element );
323 if ( domain_number_stats_by_dc != null ) {
324 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
325 for( final double element : s.getData() ) {
326 gained_once_domain_count_stats.addValue( element );
329 if ( domain_length_stats_by_domain != null ) {
330 final String[] ds = dc.split( "=" );
331 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
332 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
333 for( final double element : s0.getData() ) {
334 gained_once_domain_length_sum += element;
335 ++gained_once_domain_length_count;
337 for( final double element : s1.getData() ) {
338 gained_once_domain_length_sum += element;
339 ++gained_once_domain_length_count;
344 final Set<Integer> histogram_keys = histogram.keySet();
345 for( final Integer histogram_key : histogram_keys ) {
346 final int count = histogram.get( histogram_key );
347 final StringBuilder dc = domain_lists.get( histogram_key );
348 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
349 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
350 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
351 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
352 Arrays.sort( sorted );
353 for( final Object domain : sorted ) {
354 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
356 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
357 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
358 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
363 out_dc_for_go_mapping.close();
364 out_dc_for_go_mapping_unique.close();
365 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
366 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
367 for( final String dc : more_than_once ) {
368 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
369 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
370 final PhylogenyNode n = it.next();
371 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
375 for( int i = 0; i < ( nodes.size() - 1 ); ++i ) {
376 for( int j = i + 1; j < nodes.size(); ++j ) {
377 final PhylogenyNode lca = PhylogenyMethods.calculateLCA( nodes.get( i ), nodes.get( j ) );
378 String rank = "unknown";
379 if ( lca.getNodeData().isHasTaxonomy()
380 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
381 rank = lca.getNodeData().getTaxonomy().getRank();
383 addToCountMap( lca_rank_counts, rank );
385 if ( lca.getNodeData().isHasTaxonomy()
386 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
387 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
389 else if ( lca.getNodeData().isHasTaxonomy()
390 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
391 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
394 lca_species = lca.getName();
396 addToCountMap( lca_ancestor_species_counts, lca_species );
400 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
401 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
402 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
403 ForesterUtil.map2writer( out_for_ancestor_species_counts,
404 lca_ancestor_species_counts,
406 ForesterUtil.LINE_SEPARATOR );
407 out_for_rank_counts.close();
408 out_for_ancestor_species_counts.close();
409 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
410 && ( ( domain_length_stats_by_domain != null ) || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
411 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
412 w.write( "Domain Lengths: " );
414 if ( domain_length_stats_by_domain != null ) {
415 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
417 w.write( entry.getKey().toString() );
418 w.write( "\t" + entry.getValue().arithmeticMean() );
419 w.write( "\t" + entry.getValue().median() );
426 w.write( "Protein Lengths: " );
428 if ( protein_length_stats_by_dc != null ) {
429 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
431 w.write( entry.getKey().toString() );
432 w.write( "\t" + entry.getValue().arithmeticMean() );
433 w.write( "\t" + entry.getValue().median() );
440 w.write( "Number of domains: " );
442 if ( domain_number_stats_by_dc != null ) {
443 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
445 w.write( entry.getKey().toString() );
446 w.write( "\t" + entry.getValue().arithmeticMean() );
447 w.write( "\t" + entry.getValue().median() );
454 w.write( "Gained once, domain lengths:" );
456 w.write( "N: " + gained_once_domain_length_count );
458 w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
461 w.write( "Gained multiple times, domain lengths:" );
463 w.write( "N: " + gained_multiple_times_domain_length_count );
466 + ( ( double ) gained_multiple_times_domain_length_sum / gained_multiple_times_domain_length_count ) );
471 w.write( "Gained once, protein lengths:" );
473 w.write( gained_once_lengths_stats.toString() );
476 w.write( "Gained once, domain counts:" );
478 w.write( gained_once_domain_count_stats.toString() );
481 w.write( "Gained multiple times, protein lengths:" );
483 w.write( gained_multiple_times_lengths_stats.toString() );
486 w.write( "Gained multiple times, domain counts:" );
488 w.write( gained_multiple_times_domain_count_stats.toString() );
493 catch ( final IOException e ) {
494 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
496 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
497 + outfilename_for_counts + "]" );
498 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
499 + outfilename_for_dc + "]" );
500 ForesterUtil.programMessage( surfacing.PRG_NAME,
501 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
502 + outfilename_for_dc_for_go_mapping + "]" );
503 ForesterUtil.programMessage( surfacing.PRG_NAME,
504 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
505 + outfilename_for_dc_for_go_mapping_unique + "]" );
508 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
509 if ( map.containsKey( s ) ) {
510 map.put( s, map.get( s ) + 1 );
517 public static int calculateOverlap( final Domain domain, final List<Boolean> covered_positions ) {
518 int overlap_count = 0;
519 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
520 if ( ( i < covered_positions.size() ) && ( covered_positions.get( i ) == true ) ) {
524 return overlap_count;
527 public static void checkForOutputFileWriteability( final File outfile ) {
528 final String error = ForesterUtil.isWritableFile( outfile );
529 if ( !ForesterUtil.isEmpty( error ) ) {
530 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
534 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
535 final boolean get_gains ) {
536 final SortedSet<String> domains = new TreeSet<String>();
537 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
538 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
540 domains.addAll( chars.getGainedCharacters() );
543 domains.addAll( chars.getLostCharacters() );
549 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
550 final BinaryDomainCombination.DomainCombinationType dc_type,
551 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
552 final boolean get_gains ) {
553 final SortedSet<String> sorted_ids = new TreeSet<String>();
554 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
555 sorted_ids.add( matrix.getIdentifier( i ) );
557 for( final String id : sorted_ids ) {
558 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
559 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
560 || ( !get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
561 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
562 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
563 .createInstance( matrix.getCharacter( c ) ) );
565 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
566 all_binary_domains_combination_gained.add( DirectedBinaryDomainCombination
567 .createInstance( matrix.getCharacter( c ) ) );
570 all_binary_domains_combination_gained.add( BasicBinaryDomainCombination.createInstance( matrix
571 .getCharacter( c ) ) );
578 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
579 final boolean domain_combinations,
580 final CharacterStateMatrix.GainLossStates state,
581 final String outfile ) {
582 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
583 + ForesterUtil.FILE_SEPARATOR + base_dir );
584 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
585 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
587 if ( domain_combinations ) {
588 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
589 + ForesterUtil.FILE_SEPARATOR + "DC" );
592 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
593 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
595 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
596 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
598 if ( state == GainLossStates.GAIN ) {
599 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
600 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
602 else if ( state == GainLossStates.LOSS ) {
603 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
604 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
607 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
608 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
610 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
611 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
613 return per_node_go_mapped_domain_gain_loss_files_base_dir;
616 public static Map<DomainId, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
617 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map = new HashMap<DomainId, List<GoId>>( pfam_to_go_mappings
619 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
620 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
621 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
623 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
625 return domain_id_to_go_ids_map;
628 public static Map<DomainId, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
630 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, "\t" );
631 final Map<DomainId, Set<String>> map = new TreeMap<DomainId, Set<String>>();
632 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
633 final DomainId domain_id = new DomainId( primary_table.getValue( 0, r ) );
634 if ( !map.containsKey( domain_id ) ) {
635 map.put( domain_id, new HashSet<String>() );
637 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
642 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile, final DistanceMatrix distance ) {
643 checkForOutputFileWriteability( nj_tree_outfile );
644 final NeighborJoining nj = NeighborJoining.createInstance();
645 final Phylogeny phylogeny = nj.execute( ( BasicSymmetricalDistanceMatrix ) distance );
646 phylogeny.setName( nj_tree_outfile.getName() );
647 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
651 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
652 final SortedMap<DomainId, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
653 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
654 for( final DomainId domain_id : cds.keySet() ) {
655 final CombinableDomains cd = cds.get( domain_id );
656 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
658 return binary_combinations;
661 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
662 final Detailedness detailedness,
663 final GoAnnotationOutput go_annotation_output,
664 final Map<GoId, GoTerm> go_id_to_term_map,
665 final GoNameSpace go_namespace_limit ) {
666 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) ) {
667 throw new IllegalArgumentException( "attempt to use a GO namespace limit without a GO id to term map" );
669 for( final DomainSimilarity domain_similarity : domain_similarities ) {
670 if ( domain_similarity instanceof PrintableDomainSimilarity ) {
671 final PrintableDomainSimilarity printable_domain_similarity = ( PrintableDomainSimilarity ) domain_similarity;
672 printable_domain_similarity.setDetailedness( detailedness );
673 printable_domain_similarity.setGoAnnotationOutput( go_annotation_output );
674 printable_domain_similarity.setGoIdToTermMap( go_id_to_term_map );
675 printable_domain_similarity.setGoNamespaceLimit( go_namespace_limit );
680 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
681 final int number_of_genomes,
682 final DomainLengthsTable domain_lengths_table,
683 final File outfile ) throws IOException {
684 final DecimalFormat df = new DecimalFormat( "#.00" );
685 checkForOutputFileWriteability( outfile );
686 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
687 out.write( "MEAN BASED STATISTICS PER SPECIES" );
688 out.write( ForesterUtil.LINE_SEPARATOR );
689 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
690 out.write( ForesterUtil.LINE_SEPARATOR );
691 out.write( ForesterUtil.LINE_SEPARATOR );
692 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
693 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
694 out.write( ForesterUtil.LINE_SEPARATOR );
695 for( final DomainLengths domain_lengths : domain_lengths_list ) {
696 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
697 if ( species_list.size() > 0 ) {
698 out.write( domain_lengths.getDomainId() + "\t" );
699 for( final Species species : species_list ) {
700 out.write( species + "\t" );
702 out.write( ForesterUtil.LINE_SEPARATOR );
703 // DescriptiveStatistics stats_for_domain = domain_lengths
704 // .calculateMeanBasedStatistics();
705 //AsciiHistogram histo = new AsciiHistogram( stats_for_domain );
706 //System.out.println( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
709 out.write( ForesterUtil.LINE_SEPARATOR );
710 out.write( ForesterUtil.LINE_SEPARATOR );
711 out.write( "OUTLIER SPECIES (Z 1.0)" );
712 out.write( ForesterUtil.LINE_SEPARATOR );
713 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
714 .calculateMeanBasedStatisticsForAllSpecies();
715 out.write( stats_for_all_species.asSummary() );
716 out.write( ForesterUtil.LINE_SEPARATOR );
717 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
718 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
719 out.write( ForesterUtil.LINE_SEPARATOR );
720 final double population_sd = stats_for_all_species.sampleStandardDeviation();
721 final double population_mean = stats_for_all_species.arithmeticMean();
722 for( final Species species : domain_lengths_table.getSpecies() ) {
723 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
724 final double z = ( x - population_mean ) / population_sd;
725 out.write( species + "\t" + z );
726 out.write( ForesterUtil.LINE_SEPARATOR );
728 out.write( ForesterUtil.LINE_SEPARATOR );
729 for( final Species species : domain_lengths_table.getSpecies() ) {
730 final DescriptiveStatistics stats_for_species = domain_lengths_table
731 .calculateMeanBasedStatisticsForSpecies( species );
732 final double x = stats_for_species.arithmeticMean();
733 final double z = ( x - population_mean ) / population_sd;
734 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
735 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
736 out.write( ForesterUtil.LINE_SEPARATOR );
740 // final List<HistogramData> histogram_datas = new ArrayList<HistogramData>();
741 // for( int i = 0; i < number_of_genomes; ++i ) {
742 // final Species species = new BasicSpecies( input_file_properties[ i ][ 0 ] );
744 // .add( new HistogramData( species.toString(), domain_lengths_table
745 // .calculateMeanBasedStatisticsForSpecies( species )
746 // .getDataAsDoubleArray(), 5, 600, null, 60 ) );
748 // final HistogramsFrame hf = new HistogramsFrame( histogram_datas );
749 // hf.setVisible( true );
755 * @param all_binary_domains_combination_lost_fitch
756 * @param consider_directedness_and_adjacency_for_bin_combinations
757 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
758 * which were gained under unweighted (Fitch) parsimony.
760 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
761 final boolean radomize_fitch_parsimony,
762 final String outfile_name,
763 final DomainParsimonyCalculator domain_parsimony,
764 final Phylogeny phylogeny,
765 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
766 final Map<GoId, GoTerm> go_id_to_term_map,
767 final GoNameSpace go_namespace_limit,
768 final String parameters_str,
769 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
770 final SortedSet<DomainId> positive_filter,
771 final boolean output_binary_domain_combinations_for_graphs,
772 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
773 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
774 final BinaryDomainCombination.DomainCombinationType dc_type,
775 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
776 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
777 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
778 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
779 final String date_time = ForesterUtil.getCurrentDateTime();
780 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
781 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
782 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
783 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
784 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
785 writeToNexus( outfile_name, domain_parsimony, phylogeny );
788 Phylogeny local_phylogeny_l = phylogeny.copy();
789 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
790 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
793 domain_parsimony.executeDolloParsimonyOnDomainPresence();
795 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
796 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
797 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
798 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
799 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
800 CharacterStateMatrix.GainLossStates.GAIN,
801 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
803 ForesterUtil.LINE_SEPARATOR,
805 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
806 CharacterStateMatrix.GainLossStates.LOSS,
807 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
809 ForesterUtil.LINE_SEPARATOR,
811 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
812 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D, sep, ForesterUtil.LINE_SEPARATOR, null );
814 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
818 domain_parsimony.getGainLossMatrix(),
819 CharacterStateMatrix.GainLossStates.GAIN,
820 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
822 ForesterUtil.LINE_SEPARATOR,
823 "Dollo Parsimony | Gains | Domains",
825 domain_id_to_secondary_features_maps,
826 all_pfams_encountered,
827 all_pfams_gained_as_domains,
829 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
833 domain_parsimony.getGainLossMatrix(),
834 CharacterStateMatrix.GainLossStates.LOSS,
835 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
837 ForesterUtil.LINE_SEPARATOR,
838 "Dollo Parsimony | Losses | Domains",
840 domain_id_to_secondary_features_maps,
841 all_pfams_encountered,
842 all_pfams_lost_as_domains,
844 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
848 domain_parsimony.getGainLossMatrix(),
850 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
852 ForesterUtil.LINE_SEPARATOR,
853 "Dollo Parsimony | Present | Domains",
855 domain_id_to_secondary_features_maps,
856 all_pfams_encountered,
858 "_dollo_present_d" );
859 preparePhylogeny( local_phylogeny_l,
862 "Dollo parsimony on domain presence/absence",
863 "dollo_on_domains_" + outfile_name,
865 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
866 + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
868 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
869 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
871 catch ( final IOException e ) {
873 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
875 if ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) {
876 // FITCH DOMAIN COMBINATIONS
877 // -------------------------
878 local_phylogeny_l = phylogeny.copy();
879 String randomization = "no";
880 if ( radomize_fitch_parsimony ) {
881 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
882 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
885 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( USE_LAST );
887 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
888 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
889 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
890 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
892 .writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
893 CharacterStateMatrix.GainLossStates.GAIN,
894 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
896 ForesterUtil.LINE_SEPARATOR,
898 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
899 CharacterStateMatrix.GainLossStates.LOSS,
901 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
903 ForesterUtil.LINE_SEPARATOR,
905 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
906 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC, sep, ForesterUtil.LINE_SEPARATOR, null );
907 if ( all_binary_domains_combination_gained_fitch != null ) {
908 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
910 all_binary_domains_combination_gained_fitch,
913 if ( all_binary_domains_combination_lost_fitch != null ) {
914 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
916 all_binary_domains_combination_lost_fitch,
919 if ( output_binary_domain_combinations_for_graphs ) {
921 .writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
922 .getGainLossMatrix(),
925 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
927 ForesterUtil.LINE_SEPARATOR,
928 BinaryDomainCombination.OutputFormat.DOT );
931 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
935 domain_parsimony.getGainLossMatrix(),
936 CharacterStateMatrix.GainLossStates.GAIN,
937 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
939 ForesterUtil.LINE_SEPARATOR,
940 "Fitch Parsimony | Gains | Domain Combinations",
943 all_pfams_encountered,
944 all_pfams_gained_as_dom_combinations,
946 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
950 domain_parsimony.getGainLossMatrix(),
951 CharacterStateMatrix.GainLossStates.LOSS,
952 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
954 ForesterUtil.LINE_SEPARATOR,
955 "Fitch Parsimony | Losses | Domain Combinations",
958 all_pfams_encountered,
959 all_pfams_lost_as_dom_combinations,
960 "_fitch_losses_dc" );
961 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
965 domain_parsimony.getGainLossMatrix(),
967 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
969 ForesterUtil.LINE_SEPARATOR,
970 "Fitch Parsimony | Present | Domain Combinations",
973 all_pfams_encountered,
975 "_fitch_present_dc" );
976 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
979 all_pfams_encountered );
980 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX, all_pfams_gained_as_domains );
981 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
982 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
983 all_pfams_gained_as_dom_combinations );
984 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX, all_pfams_lost_as_dom_combinations );
985 preparePhylogeny( local_phylogeny_l,
988 "Fitch parsimony on binary domain combination presence/absence randomization: "
990 "fitch_on_binary_domain_combinations_" + outfile_name,
992 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
993 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
994 calculateIndependentDomainCombinationGains( local_phylogeny_l,
996 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
998 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
1000 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
1002 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
1003 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
1004 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
1005 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
1006 protein_length_stats_by_dc,
1007 domain_number_stats_by_dc,
1008 domain_length_stats_by_domain );
1012 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
1013 final DomainParsimonyCalculator secondary_features_parsimony,
1014 final Phylogeny phylogeny,
1015 final String parameters_str,
1016 final Map<Species, MappingResults> mapping_results_map ) {
1017 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
1018 final String date_time = ForesterUtil.getCurrentDateTime();
1019 System.out.println();
1020 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
1021 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
1023 Phylogeny local_phylogeny_copy = phylogeny.copy();
1024 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
1025 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(), outfile_name
1026 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
1027 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(), outfile_name
1028 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
1030 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1031 CharacterStateMatrix.GainLossStates.GAIN,
1033 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
1035 ForesterUtil.LINE_SEPARATOR,
1038 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1039 CharacterStateMatrix.GainLossStates.LOSS,
1041 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
1043 ForesterUtil.LINE_SEPARATOR,
1046 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1049 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
1051 ForesterUtil.LINE_SEPARATOR,
1053 preparePhylogeny( local_phylogeny_copy,
1054 secondary_features_parsimony,
1056 "Dollo parsimony on secondary feature presence/absence",
1057 "dollo_on_secondary_features_" + outfile_name,
1059 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1060 + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
1061 // FITCH DOMAIN COMBINATIONS
1062 // -------------------------
1063 local_phylogeny_copy = phylogeny.copy();
1064 final String randomization = "no";
1065 secondary_features_parsimony.executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( USE_LAST );
1066 preparePhylogeny( local_phylogeny_copy,
1067 secondary_features_parsimony,
1069 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
1071 "fitch_on_binary_domain_combinations_" + outfile_name,
1073 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1074 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
1075 calculateIndependentDomainCombinationGains( local_phylogeny_copy, outfile_name
1076 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX, outfile_name
1077 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX, outfile_name
1078 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX, outfile_name
1079 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX, outfile_name
1080 + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt", outfile_name
1081 + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt", null, null, null, null );
1084 public static void doit( final List<Protein> proteins,
1085 final List<DomainId> query_domain_ids_nc_order,
1087 final String separator,
1088 final String limit_to_species,
1089 final Map<String, List<Integer>> average_protein_lengths_by_dc ) throws IOException {
1090 for( final Protein protein : proteins ) {
1091 if ( ForesterUtil.isEmpty( limit_to_species )
1092 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1093 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1094 out.write( protein.getSpecies().getSpeciesId() );
1095 out.write( separator );
1096 out.write( protein.getProteinId().getId() );
1097 out.write( separator );
1099 final Set<DomainId> visited_domain_ids = new HashSet<DomainId>();
1100 boolean first = true;
1101 for( final Domain domain : protein.getProteinDomains() ) {
1102 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1103 visited_domain_ids.add( domain.getDomainId() );
1110 out.write( domain.getDomainId().getId() );
1112 out.write( "" + domain.getTotalCount() );
1117 out.write( separator );
1118 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1119 .equals( SurfacingConstants.NONE ) ) ) {
1120 out.write( protein.getDescription() );
1122 out.write( separator );
1123 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1124 .equals( SurfacingConstants.NONE ) ) ) {
1125 out.write( protein.getAccession() );
1127 out.write( SurfacingConstants.NL );
1134 public static void extractProteinNames( final List<Protein> proteins,
1135 final List<DomainId> query_domain_ids_nc_order,
1137 final String separator,
1138 final String limit_to_species ) throws IOException {
1139 for( final Protein protein : proteins ) {
1140 if ( ForesterUtil.isEmpty( limit_to_species )
1141 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1142 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1143 out.write( protein.getSpecies().getSpeciesId() );
1144 out.write( separator );
1145 out.write( protein.getProteinId().getId() );
1146 out.write( separator );
1148 final Set<DomainId> visited_domain_ids = new HashSet<DomainId>();
1149 boolean first = true;
1150 for( final Domain domain : protein.getProteinDomains() ) {
1151 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1152 visited_domain_ids.add( domain.getDomainId() );
1159 out.write( domain.getDomainId().getId() );
1161 out.write( "" + domain.getTotalCount() );
1166 out.write( separator );
1167 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1168 .equals( SurfacingConstants.NONE ) ) ) {
1169 out.write( protein.getDescription() );
1171 out.write( separator );
1172 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1173 .equals( SurfacingConstants.NONE ) ) ) {
1174 out.write( protein.getAccession() );
1176 out.write( SurfacingConstants.NL );
1183 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
1184 final DomainId domain_id,
1186 final String separator,
1187 final String limit_to_species,
1188 final double domain_e_cutoff ) throws IOException {
1189 System.out.println( "Per domain E-value: " + domain_e_cutoff );
1190 for( final Species species : protein_lists_per_species.keySet() ) {
1191 System.out.println( species + ":" );
1192 for( final Protein protein : protein_lists_per_species.get( species ) ) {
1193 if ( ForesterUtil.isEmpty( limit_to_species )
1194 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1195 final List<Domain> domains = protein.getProteinDomains( domain_id );
1196 if ( domains.size() > 0 ) {
1197 out.write( protein.getSpecies().getSpeciesId() );
1198 out.write( separator );
1199 out.write( protein.getProteinId().getId() );
1200 out.write( separator );
1201 out.write( domain_id.toString() );
1202 out.write( separator );
1204 for( final Domain domain : domains ) {
1205 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1207 out.write( domain.getFrom() + "-" + domain.getTo() );
1208 if ( prev_to >= 0 ) {
1209 final int l = domain.getFrom() - prev_to;
1210 System.out.println( l );
1212 prev_to = domain.getTo();
1216 out.write( separator );
1217 final List<Domain> domain_list = new ArrayList<Domain>();
1218 for( final Domain domain : protein.getProteinDomains() ) {
1219 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1220 domain_list.add( domain );
1223 final Domain domain_ary[] = new Domain[ domain_list.size() ];
1224 for( int i = 0; i < domain_list.size(); ++i ) {
1225 domain_ary[ i ] = domain_list.get( i );
1227 Arrays.sort( domain_ary, new DomainComparator( true ) );
1229 boolean first = true;
1230 for( final Domain domain : domain_ary ) {
1237 out.write( domain.getDomainId().toString() );
1238 out.write( ":" + domain.getFrom() + "-" + domain.getTo() );
1239 out.write( ":" + domain.getPerDomainEvalue() );
1242 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1243 .equals( SurfacingConstants.NONE ) ) ) {
1244 out.write( protein.getDescription() );
1246 out.write( separator );
1247 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1248 .equals( SurfacingConstants.NONE ) ) ) {
1249 out.write( protein.getAccession() );
1251 out.write( SurfacingConstants.NL );
1259 public static SortedSet<DomainId> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
1260 final SortedSet<DomainId> all_domains_ids = new TreeSet<DomainId>();
1261 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1262 final Set<DomainId> all_domains = gwcd.getAllDomainIds();
1263 // for( final Domain domain : all_domains ) {
1264 all_domains_ids.addAll( all_domains );
1267 return all_domains_ids;
1270 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
1271 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
1272 for( final Protein protein_domain_collection : protein_domain_collections ) {
1273 for( final Object name : protein_domain_collection.getProteinDomains() ) {
1274 final BasicDomain protein_domain = ( BasicDomain ) name;
1275 final String id = protein_domain.getDomainId().getId();
1276 if ( map.containsKey( id ) ) {
1277 map.put( id, map.get( id ) + 1 );
1287 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
1288 final PhylogenyNodeIterator it = p.iteratorPostorder();
1290 while ( it.hasNext() ) {
1291 final PhylogenyNode n = it.next();
1292 if ( ForesterUtil.isEmpty( n.getName() )
1293 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1294 .getScientificName() ) )
1295 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1296 .getCommonName() ) ) ) {
1297 if ( n.getParent() != null ) {
1298 names.append( " " );
1299 names.append( n.getParent().getName() );
1301 final List l = n.getAllExternalDescendants();
1302 for( final Object object : l ) {
1303 System.out.println( l.toString() );
1312 * Returns true is Domain domain falls in an uninterrupted stretch of
1313 * covered positions.
1316 * @param covered_positions
1319 public static boolean isEngulfed( final Domain domain, final List<Boolean> covered_positions ) {
1320 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1321 if ( ( i >= covered_positions.size() ) || ( covered_positions.get( i ) != true ) ) {
1328 public static void preparePhylogeny( final Phylogeny p,
1329 final DomainParsimonyCalculator domain_parsimony,
1330 final String date_time,
1331 final String method,
1333 final String parameters_str ) {
1334 domain_parsimony.decoratePhylogenyWithDomains( p );
1335 final StringBuilder desc = new StringBuilder();
1336 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1337 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1338 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1339 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1340 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1341 desc.append( "[Parameters: " + parameters_str + "]" );
1343 p.setDescription( desc.toString() );
1344 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1345 p.setRerootable( false );
1346 p.setRooted( true );
1350 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1354 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1355 final String protein_id,
1356 final String separator ) {
1357 final StringBuffer sb = new StringBuffer();
1358 if ( protein.getSpecies() == null ) {
1359 throw new IllegalArgumentException( "species must not be null" );
1361 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1362 throw new IllegalArgumentException( "species id must not be empty" );
1364 final List<Domain> domains = protein.getProteinDomains();
1365 if ( domains.size() > 1 ) {
1366 final Map<String, Integer> counts = new HashMap<String, Integer>();
1367 for( final Domain domain : domains ) {
1368 final String id = domain.getDomainId().getId();
1369 if ( counts.containsKey( id ) ) {
1370 counts.put( id, counts.get( id ) + 1 );
1373 counts.put( id, 1 );
1376 final Set<String> dcs = new HashSet<String>();
1377 for( int i = 1; i < domains.size(); ++i ) {
1378 for( int j = 0; j < i; ++j ) {
1379 Domain domain_n = domains.get( i );
1380 Domain domain_c = domains.get( j );
1381 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1382 domain_n = domains.get( j );
1383 domain_c = domains.get( i );
1385 final String dc = domain_n.getDomainId().getId() + domain_c.getDomainId().getId();
1386 if ( !dcs.contains( dc ) ) {
1388 sb.append( protein.getSpecies() );
1389 sb.append( separator );
1390 sb.append( protein_id );
1391 sb.append( separator );
1392 sb.append( domain_n.getDomainId().getId() );
1393 sb.append( separator );
1394 sb.append( domain_c.getDomainId().getId() );
1395 sb.append( separator );
1396 sb.append( domain_n.getPerDomainEvalue() );
1397 sb.append( separator );
1398 sb.append( domain_c.getPerDomainEvalue() );
1399 sb.append( separator );
1400 sb.append( counts.get( domain_n.getDomainId().getId() ) );
1401 sb.append( separator );
1402 sb.append( counts.get( domain_c.getDomainId().getId() ) );
1403 sb.append( ForesterUtil.LINE_SEPARATOR );
1408 else if ( domains.size() == 1 ) {
1409 sb.append( protein.getSpecies() );
1410 sb.append( separator );
1411 sb.append( protein_id );
1412 sb.append( separator );
1413 sb.append( domains.get( 0 ).getDomainId().getId() );
1414 sb.append( separator );
1415 sb.append( separator );
1416 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1417 sb.append( separator );
1418 sb.append( separator );
1420 sb.append( separator );
1421 sb.append( ForesterUtil.LINE_SEPARATOR );
1424 sb.append( protein.getSpecies() );
1425 sb.append( separator );
1426 sb.append( protein_id );
1427 sb.append( separator );
1428 sb.append( separator );
1429 sb.append( separator );
1430 sb.append( separator );
1431 sb.append( separator );
1432 sb.append( separator );
1433 sb.append( ForesterUtil.LINE_SEPARATOR );
1440 * Example regarding engulfment: ------------0.1 ----------0.2 --0.3 =>
1441 * domain with 0.3 is ignored
1443 * -----------0.1 ----------0.2 --0.3 => domain with 0.3 is ignored
1446 * ------------0.1 ----------0.3 --0.2 => domains with 0.3 and 0.2 are _not_
1449 * @param max_allowed_overlap
1450 * maximal allowed overlap (inclusive) to be still considered not
1451 * overlapping (zero or negative value to allow any overlap)
1452 * @param remove_engulfed_domains
1453 * to remove domains which are completely engulfed by coverage of
1454 * domains with better support
1458 public static Protein removeOverlappingDomains( final int max_allowed_overlap,
1459 final boolean remove_engulfed_domains,
1460 final Protein protein ) {
1461 final Protein pruned_protein = new BasicProtein( protein.getProteinId().getId(), protein.getSpecies()
1462 .getSpeciesId(), protein.getLength() );
1463 final List<Domain> sorted = SurfacingUtil.sortDomainsWithAscendingConfidenceValues( protein );
1464 final List<Boolean> covered_positions = new ArrayList<Boolean>();
1465 for( final Domain domain : sorted ) {
1466 if ( ( ( max_allowed_overlap < 0 ) || ( SurfacingUtil.calculateOverlap( domain, covered_positions ) <= max_allowed_overlap ) )
1467 && ( !remove_engulfed_domains || !isEngulfed( domain, covered_positions ) ) ) {
1468 final int covered_positions_size = covered_positions.size();
1469 for( int i = covered_positions_size; i < domain.getFrom(); ++i ) {
1470 covered_positions.add( false );
1472 final int new_covered_positions_size = covered_positions.size();
1473 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1474 if ( i < new_covered_positions_size ) {
1475 covered_positions.set( i, true );
1478 covered_positions.add( true );
1481 pruned_protein.addProteinDomain( domain );
1484 return pruned_protein;
1487 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1488 final List<Domain> domains = new ArrayList<Domain>();
1489 for( final Domain d : protein.getProteinDomains() ) {
1492 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1496 private static List<String> splitDomainCombination( final String dc ) {
1497 final String[] s = dc.split( "=" );
1498 if ( s.length != 2 ) {
1499 ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
1503 final List<String> l = new ArrayList<String>( 2 );
1509 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1510 final boolean get_gains,
1511 final String outdir,
1512 final String suffix_for_filename ) throws IOException {
1513 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1515 state = CharacterStateMatrix.GainLossStates.LOSS;
1517 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1521 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1522 final PhylogenyNode node = it.next();
1523 if ( !node.isExternal() ) {
1524 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1525 if ( domains.size() > 0 ) {
1526 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1527 + node.getName() + suffix_for_filename );
1528 for( final String domain : domains ) {
1529 writer.write( domain );
1530 writer.write( ForesterUtil.LINE_SEPARATOR );
1538 private static void writeAllEncounteredPfamsToFile( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
1539 final Map<GoId, GoTerm> go_id_to_term_map,
1540 final String outfile_name,
1541 final SortedSet<String> all_pfams_encountered ) {
1542 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
1543 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
1544 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
1545 final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
1546 int biological_process_counter = 0;
1547 int cellular_component_counter = 0;
1548 int molecular_function_counter = 0;
1549 int pfams_with_mappings_counter = 0;
1550 int pfams_without_mappings_counter = 0;
1551 int pfams_without_mappings_to_bp_or_mf_counter = 0;
1552 int pfams_with_mappings_to_bp_or_mf_counter = 0;
1554 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
1555 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
1556 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
1557 summary_writer.write( "# Pfam to GO mapping summary" );
1558 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1559 summary_writer.write( "# Actual summary is at the end of this file." );
1560 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1561 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
1562 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1563 for( final String pfam : all_pfams_encountered ) {
1564 all_pfams_encountered_writer.write( pfam );
1565 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
1566 final DomainId domain_id = new DomainId( pfam );
1567 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
1568 ++pfams_with_mappings_counter;
1569 all_pfams_encountered_with_go_annotation_writer.write( pfam );
1570 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
1571 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
1572 boolean maps_to_bp = false;
1573 boolean maps_to_cc = false;
1574 boolean maps_to_mf = false;
1575 for( final GoId go_id : go_ids ) {
1576 final GoTerm go_term = go_id_to_term_map.get( go_id );
1577 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
1580 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
1583 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
1588 ++biological_process_counter;
1591 ++cellular_component_counter;
1594 ++molecular_function_counter;
1596 if ( maps_to_bp || maps_to_mf ) {
1597 ++pfams_with_mappings_to_bp_or_mf_counter;
1600 ++pfams_without_mappings_to_bp_or_mf_counter;
1604 ++pfams_without_mappings_to_bp_or_mf_counter;
1605 ++pfams_without_mappings_counter;
1606 summary_writer.write( pfam );
1607 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1610 all_pfams_encountered_writer.close();
1611 all_pfams_encountered_with_go_annotation_writer.close();
1612 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + all_pfams_encountered.size()
1613 + "] encountered Pfams to: \"" + all_pfams_encountered_file + "\"" );
1614 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + pfams_with_mappings_counter
1615 + "] encountered Pfams with GO mappings to: \"" + all_pfams_encountered_with_go_annotation_file
1617 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote summary (including all ["
1618 + pfams_without_mappings_counter + "] encountered Pfams without GO mappings) to: \""
1619 + encountered_pfams_summary_file + "\"" );
1620 ForesterUtil.programMessage( surfacing.PRG_NAME, "Sum of Pfams encountered : "
1621 + all_pfams_encountered.size() );
1622 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without a mapping : "
1623 + pfams_without_mappings_counter + " ["
1624 + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
1625 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without mapping to proc. or func. : "
1626 + pfams_without_mappings_to_bp_or_mf_counter + " ["
1627 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
1628 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping : "
1629 + pfams_with_mappings_counter + " ["
1630 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
1631 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping to proc. or func. : "
1632 + pfams_with_mappings_to_bp_or_mf_counter + " ["
1633 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
1634 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to biological process: "
1635 + biological_process_counter + " ["
1636 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
1637 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to molecular function: "
1638 + molecular_function_counter + " ["
1639 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
1640 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to cellular component: "
1641 + cellular_component_counter + " ["
1642 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
1643 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1644 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
1645 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1646 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter
1647 + " [" + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
1648 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1649 summary_writer.write( "# Pfams without mapping to proc. or func. : "
1650 + pfams_without_mappings_to_bp_or_mf_counter + " ["
1651 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
1652 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1653 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
1654 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
1655 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1656 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
1657 + pfams_with_mappings_to_bp_or_mf_counter + " ["
1658 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
1659 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1660 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
1661 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
1662 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1663 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
1664 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
1665 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1666 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
1667 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
1668 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1669 summary_writer.close();
1671 catch ( final IOException e ) {
1672 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
1676 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1677 final File output_dir,
1678 final GenomeWideCombinableDomains gwcd,
1680 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1681 File dc_outfile_dot = new File( input_file_properties[ i ][ 0 ]
1682 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1683 if ( output_dir != null ) {
1684 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1686 checkForOutputFileWriteability( dc_outfile_dot );
1687 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1689 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1690 for( final BinaryDomainCombination bdc : binary_combinations ) {
1691 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1693 out_dot.write( SurfacingConstants.NL );
1697 catch ( final IOException e ) {
1698 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1700 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote binary domain combination for \""
1701 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1702 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile_dot + "\"" );
1705 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1706 final CharacterStateMatrix.GainLossStates state,
1707 final String filename,
1708 final String indentifier_characters_separator,
1709 final String character_separator,
1710 final Map<String, String> descriptions ) {
1711 final File outfile = new File( filename );
1712 checkForOutputFileWriteability( outfile );
1713 final SortedSet<String> sorted_ids = new TreeSet<String>();
1714 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1715 sorted_ids.add( matrix.getIdentifier( i ) );
1718 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1719 for( final String id : sorted_ids ) {
1720 out.write( indentifier_characters_separator );
1721 out.write( "#" + id );
1722 out.write( indentifier_characters_separator );
1723 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1725 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1726 if ( ( matrix.getState( id, c ) == state )
1727 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1728 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1729 out.write( matrix.getCharacter( c ) );
1730 if ( ( descriptions != null ) && !descriptions.isEmpty()
1731 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
1733 out.write( descriptions.get( matrix.getCharacter( c ) ) );
1735 out.write( character_separator );
1742 catch ( final IOException e ) {
1743 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1745 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1748 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1749 final CharacterStateMatrix.GainLossStates state,
1750 final String filename,
1751 final String indentifier_characters_separator,
1752 final String character_separator,
1753 final BinaryDomainCombination.OutputFormat bc_output_format ) {
1754 final File outfile = new File( filename );
1755 checkForOutputFileWriteability( outfile );
1756 final SortedSet<String> sorted_ids = new TreeSet<String>();
1757 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1758 sorted_ids.add( matrix.getIdentifier( i ) );
1761 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1762 for( final String id : sorted_ids ) {
1763 out.write( indentifier_characters_separator );
1764 out.write( "#" + id );
1765 out.write( indentifier_characters_separator );
1766 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1768 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1769 if ( ( matrix.getState( id, c ) == state )
1770 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1771 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1772 BinaryDomainCombination bdc = null;
1774 bdc = BasicBinaryDomainCombination.createInstance( matrix.getCharacter( c ) );
1776 catch ( final Exception e ) {
1777 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1779 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
1780 out.write( character_separator );
1787 catch ( final IOException e ) {
1788 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1790 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1793 public static void writeBinaryStatesMatrixToList( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
1794 final Map<GoId, GoTerm> go_id_to_term_map,
1795 final GoNameSpace go_namespace_limit,
1796 final boolean domain_combinations,
1797 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1798 final CharacterStateMatrix.GainLossStates state,
1799 final String filename,
1800 final String indentifier_characters_separator,
1801 final String character_separator,
1802 final String title_for_html,
1803 final String prefix_for_html,
1804 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
1805 final SortedSet<String> all_pfams_encountered,
1806 final SortedSet<String> pfams_gained_or_lost,
1807 final String suffix_for_per_node_events_file ) {
1808 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1809 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
1811 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
1812 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
1814 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1815 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
1817 final File outfile = new File( filename );
1818 checkForOutputFileWriteability( outfile );
1819 final SortedSet<String> sorted_ids = new TreeSet<String>();
1820 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1821 sorted_ids.add( matrix.getIdentifier( i ) );
1824 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
1825 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
1826 domain_combinations,
1829 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1830 File per_node_go_mapped_domain_gain_loss_outfile = null;
1831 int per_node_counter = 0;
1832 out.write( "<html>" );
1833 out.write( SurfacingConstants.NL );
1834 addHtmlHead( out, title_for_html );
1835 out.write( SurfacingConstants.NL );
1836 out.write( "<body>" );
1837 out.write( SurfacingConstants.NL );
1838 out.write( "<h1>" );
1839 out.write( SurfacingConstants.NL );
1840 out.write( title_for_html );
1841 out.write( SurfacingConstants.NL );
1842 out.write( "</h1>" );
1843 out.write( SurfacingConstants.NL );
1844 out.write( "<table>" );
1845 out.write( SurfacingConstants.NL );
1846 for( final String id : sorted_ids ) {
1847 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1848 if ( matcher.matches() ) {
1851 out.write( "<tr>" );
1852 out.write( "<td>" );
1853 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
1854 out.write( "</td>" );
1855 out.write( "</tr>" );
1856 out.write( SurfacingConstants.NL );
1858 out.write( "</table>" );
1859 out.write( SurfacingConstants.NL );
1860 for( final String id : sorted_ids ) {
1861 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1862 if ( matcher.matches() ) {
1865 out.write( SurfacingConstants.NL );
1866 out.write( "<h2>" );
1867 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
1868 writeTaxonomyLinks( out, id );
1869 out.write( "</h2>" );
1870 out.write( SurfacingConstants.NL );
1871 out.write( "<table>" );
1872 out.write( SurfacingConstants.NL );
1873 out.write( "<tr>" );
1874 out.write( "<td><b>" );
1875 out.write( "Pfam domain(s)" );
1876 out.write( "</b></td><td><b>" );
1877 out.write( "GO term acc" );
1878 out.write( "</b></td><td><b>" );
1879 out.write( "GO term" );
1880 out.write( "</b></td><td><b>" );
1881 out.write( "GO namespace" );
1882 out.write( "</b></td>" );
1883 out.write( "</tr>" );
1884 out.write( SurfacingConstants.NL );
1885 out.write( "</tr>" );
1886 out.write( SurfacingConstants.NL );
1887 per_node_counter = 0;
1888 if ( matrix.getNumberOfCharacters() > 0 ) {
1889 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
1890 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
1891 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
1892 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
1893 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
1896 per_node_go_mapped_domain_gain_loss_outfile = null;
1897 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1899 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1901 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1902 if ( ( matrix.getState( id, c ) == state )
1903 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) || ( matrix
1904 .getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
1905 final String character = matrix.getCharacter( c );
1906 String domain_0 = "";
1907 String domain_1 = "";
1908 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
1909 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
1910 if ( s.length != 2 ) {
1911 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
1912 + character + "]" );
1918 domain_0 = character;
1920 writeDomainData( domain_id_to_go_ids_map,
1927 character_separator,
1928 domain_id_to_secondary_features_maps,
1930 all_pfams_encountered.add( domain_0 );
1931 if ( pfams_gained_or_lost != null ) {
1932 pfams_gained_or_lost.add( domain_0 );
1934 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
1935 all_pfams_encountered.add( domain_1 );
1936 if ( pfams_gained_or_lost != null ) {
1937 pfams_gained_or_lost.add( domain_1 );
1940 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1941 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
1948 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1949 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
1950 if ( per_node_counter < 1 ) {
1951 per_node_go_mapped_domain_gain_loss_outfile.delete();
1953 per_node_counter = 0;
1955 out.write( "</table>" );
1956 out.write( SurfacingConstants.NL );
1957 out.write( "<hr>" );
1958 out.write( SurfacingConstants.NL );
1959 } // for( final String id : sorted_ids ) {
1960 out.write( "</body>" );
1961 out.write( SurfacingConstants.NL );
1962 out.write( "</html>" );
1963 out.write( SurfacingConstants.NL );
1967 catch ( final IOException e ) {
1968 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1970 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
1973 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
1974 final File output_dir,
1975 final Writer per_genome_domain_promiscuity_statistics_writer,
1976 final GenomeWideCombinableDomains gwcd,
1978 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1979 File dc_outfile = new File( input_file_properties[ i ][ 0 ]
1980 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1981 if ( output_dir != null ) {
1982 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
1984 checkForOutputFileWriteability( dc_outfile );
1986 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
1987 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
1990 catch ( final IOException e ) {
1991 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1993 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
1995 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 0 ] + "\t" );
1996 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
1997 if ( stats.getN() < 2 ) {
1998 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
2001 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
2002 .sampleStandardDeviation() ) + "\t" );
2004 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
2005 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
2006 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
2007 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
2008 final SortedSet<DomainId> mpds = gwcd.getMostPromiscuosDomain();
2009 for( final DomainId mpd : mpds ) {
2010 per_genome_domain_promiscuity_statistics_writer.write( mpd.getId() + " " );
2012 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
2014 catch ( final IOException e ) {
2015 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2017 if ( input_file_properties[ i ].length == 3 ) {
2018 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2019 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
2020 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
2023 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2024 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
2025 + dc_outfile + "\"" );
2029 private static void writeDomainData( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
2030 final Map<GoId, GoTerm> go_id_to_term_map,
2031 final GoNameSpace go_namespace_limit,
2033 final String domain_0,
2034 final String domain_1,
2035 final String prefix_for_html,
2036 final String character_separator_for_non_html_output,
2037 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
2038 final Set<GoId> all_go_ids ) throws IOException {
2039 boolean any_go_annotation_present = false;
2040 boolean first_has_no_go = false;
2041 int domain_count = 2; // To distinguish between domains and binary domain combinations.
2042 if ( ForesterUtil.isEmpty( domain_1 ) ) {
2045 // The following has a difficult to understand logic.
2046 for( int d = 0; d < domain_count; ++d ) {
2047 List<GoId> go_ids = null;
2048 boolean go_annotation_present = false;
2050 final DomainId domain_id = new DomainId( domain_0 );
2051 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2052 go_annotation_present = true;
2053 any_go_annotation_present = true;
2054 go_ids = domain_id_to_go_ids_map.get( domain_id );
2057 first_has_no_go = true;
2061 final DomainId domain_id = new DomainId( domain_1 );
2062 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2063 go_annotation_present = true;
2064 any_go_annotation_present = true;
2065 go_ids = domain_id_to_go_ids_map.get( domain_id );
2068 if ( go_annotation_present ) {
2069 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
2070 for( final GoId go_id : go_ids ) {
2071 out.write( "<tr>" );
2074 writeDomainIdsToHtml( out,
2078 domain_id_to_secondary_features_maps );
2081 out.write( "<td></td>" );
2083 if ( !go_id_to_term_map.containsKey( go_id ) ) {
2084 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
2086 final GoTerm go_term = go_id_to_term_map.get( go_id );
2087 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
2088 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
2089 final String go_id_str = go_id.getId();
2090 out.write( "<td>" );
2091 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
2092 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
2093 out.write( "</td><td>" );
2094 out.write( go_term.getName() );
2095 if ( domain_count == 2 ) {
2096 out.write( " (" + d + ")" );
2098 out.write( "</td><td>" );
2099 // out.write( top );
2100 // out.write( "</td><td>" );
2102 out.write( go_term.getGoNameSpace().toShortString() );
2104 out.write( "</td>" );
2105 if ( all_go_ids != null ) {
2106 all_go_ids.add( go_id );
2110 out.write( "<td>" );
2111 out.write( "</td><td>" );
2112 out.write( "</td><td>" );
2113 out.write( "</td><td>" );
2114 out.write( "</td>" );
2116 out.write( "</tr>" );
2117 out.write( SurfacingConstants.NL );
2120 } // for( int d = 0; d < domain_count; ++d )
2121 if ( !any_go_annotation_present ) {
2122 out.write( "<tr>" );
2123 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
2124 out.write( "<td>" );
2125 out.write( "</td><td>" );
2126 out.write( "</td><td>" );
2127 out.write( "</td><td>" );
2128 out.write( "</td>" );
2129 out.write( "</tr>" );
2130 out.write( SurfacingConstants.NL );
2134 private static void writeDomainIdsToHtml( final Writer out,
2135 final String domain_0,
2136 final String domain_1,
2137 final String prefix_for_detailed_html,
2138 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps )
2139 throws IOException {
2140 out.write( "<td>" );
2141 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
2142 out.write( prefix_for_detailed_html );
2145 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
2146 out.write( "</td>" );
2149 public static DescriptiveStatistics writeDomainSimilaritiesToFile( final StringBuilder html_desc,
2150 final StringBuilder html_title,
2151 final Writer single_writer,
2152 Map<Character, Writer> split_writers,
2153 final SortedSet<DomainSimilarity> similarities,
2154 final boolean treat_as_binary,
2155 final List<Species> species_order,
2156 final PrintableDomainSimilarity.PRINT_OPTION print_option,
2157 final DomainSimilarity.DomainSimilaritySortField sort_field,
2158 final DomainSimilarity.DomainSimilarityScoring scoring,
2159 final boolean verbose ) throws IOException {
2160 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
2161 String histogram_title = null;
2162 switch ( sort_field ) {
2163 case ABS_MAX_COUNTS_DIFFERENCE:
2164 if ( treat_as_binary ) {
2165 histogram_title = "absolute counts difference:";
2168 histogram_title = "absolute (maximal) counts difference:";
2171 case MAX_COUNTS_DIFFERENCE:
2172 if ( treat_as_binary ) {
2173 histogram_title = "counts difference:";
2176 histogram_title = "(maximal) counts difference:";
2180 histogram_title = "score mean:";
2183 histogram_title = "score minimum:";
2186 histogram_title = "score maximum:";
2188 case MAX_DIFFERENCE:
2189 if ( treat_as_binary ) {
2190 histogram_title = "difference:";
2193 histogram_title = "(maximal) difference:";
2197 histogram_title = "score mean:";
2200 histogram_title = "score standard deviation:";
2203 histogram_title = "species number:";
2206 throw new AssertionError( "Unknown sort field: " + sort_field );
2208 for( final DomainSimilarity similarity : similarities ) {
2209 switch ( sort_field ) {
2210 case ABS_MAX_COUNTS_DIFFERENCE:
2211 stats.addValue( Math.abs( similarity.getMaximalDifferenceInCounts() ) );
2213 case MAX_COUNTS_DIFFERENCE:
2214 stats.addValue( similarity.getMaximalDifferenceInCounts() );
2217 stats.addValue( similarity.getMeanSimilarityScore() );
2220 stats.addValue( similarity.getMinimalSimilarityScore() );
2223 stats.addValue( similarity.getMaximalSimilarityScore() );
2225 case MAX_DIFFERENCE:
2226 stats.addValue( similarity.getMaximalDifference() );
2229 stats.addValue( similarity.getMeanSimilarityScore() );
2232 stats.addValue( similarity.getStandardDeviationOfSimilarityScore() );
2235 stats.addValue( similarity.getSpecies().size() );
2238 throw new AssertionError( "Unknown sort field: " + sort_field );
2242 // final HistogramData[] hists = new HistogramData[ 1 ];
2245 // List<HistogramDataItem> data_items = new
2246 // ArrayList<HistogramDataItem>();
2247 // double[] values = stats.getDataAsDoubleArray();
2248 // for( int i = 0; i < values.length; i++ ) {
2249 // HistogramDataItem data_item = new BasicHistogramDataItem( "", values[
2251 // data_items.add( data_item );
2255 // HistogramData hd0 = new HistogramData( "name",
2263 // hists[ 0 ] = hd0;
2265 // final HistogramsFrame hf = new HistogramsFrame( hists );
2266 // hf.setVisible( true );
2268 AsciiHistogram histo = null;
2269 if ( stats.getMin() < stats.getMin() ) {
2270 histo = new AsciiHistogram( stats, histogram_title );
2273 if ( histo != null ) {
2274 System.out.println( histo.toStringBuffer( 20, '|', 40, 5 ) );
2276 System.out.println();
2277 System.out.println( "N : " + stats.getN() );
2278 System.out.println( "Min : " + stats.getMin() );
2279 System.out.println( "Max : " + stats.getMax() );
2280 System.out.println( "Mean : " + stats.arithmeticMean() );
2281 if ( stats.getN() > 1 ) {
2282 System.out.println( "SD : " + stats.sampleStandardDeviation() );
2285 System.out.println( "SD : n/a" );
2287 System.out.println( "Median : " + stats.median() );
2288 if ( stats.getN() > 1 ) {
2289 System.out.println( "Pearsonian skewness : " + stats.pearsonianSkewness() );
2292 System.out.println( "Pearsonian skewness : n/a" );
2295 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
2296 split_writers = new HashMap<Character, Writer>();
2297 split_writers.put( '_', single_writer );
2299 switch ( print_option ) {
2300 case SIMPLE_TAB_DELIMITED:
2303 for( final Character key : split_writers.keySet() ) {
2304 final Writer w = split_writers.get( key );
2305 w.write( "<html>" );
2306 w.write( SurfacingConstants.NL );
2308 addHtmlHead( w, "DCs (" + html_title + ") " + key.toString().toUpperCase() );
2311 addHtmlHead( w, "DCs (" + html_title + ")" );
2313 w.write( SurfacingConstants.NL );
2314 w.write( "<body>" );
2315 w.write( SurfacingConstants.NL );
2316 w.write( html_desc.toString() );
2317 w.write( SurfacingConstants.NL );
2320 w.write( SurfacingConstants.NL );
2321 w.write( "<tt><pre>" );
2322 w.write( SurfacingConstants.NL );
2323 if ( histo != null ) {
2324 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
2325 w.write( SurfacingConstants.NL );
2327 w.write( "</pre></tt>" );
2328 w.write( SurfacingConstants.NL );
2329 w.write( "<table>" );
2330 w.write( SurfacingConstants.NL );
2331 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
2332 w.write( SurfacingConstants.NL );
2333 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
2334 w.write( SurfacingConstants.NL );
2335 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
2336 w.write( SurfacingConstants.NL );
2337 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
2338 w.write( SurfacingConstants.NL );
2339 if ( stats.getN() > 1 ) {
2340 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
2343 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
2345 w.write( SurfacingConstants.NL );
2346 w.write( "<tr><td>Median: </td><td>" + stats.median() + "</td></tr>" );
2347 w.write( SurfacingConstants.NL );
2348 if ( stats.getN() > 1 ) {
2349 w.write( "<tr><td>Pearsonian skewness: </td><td>" + stats.pearsonianSkewness() + "</td></tr>" );
2352 w.write( "<tr><td>Pearsonian skewness: </td><td>n/a</td></tr>" );
2354 w.write( SurfacingConstants.NL );
2355 w.write( "</table>" );
2356 w.write( SurfacingConstants.NL );
2358 w.write( SurfacingConstants.NL );
2360 w.write( SurfacingConstants.NL );
2362 w.write( SurfacingConstants.NL );
2363 w.write( "<table>" );
2364 w.write( SurfacingConstants.NL );
2368 for( final Writer w : split_writers.values() ) {
2369 w.write( SurfacingConstants.NL );
2371 for( final DomainSimilarity similarity : similarities ) {
2372 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2373 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
2375 if ( single_writer != null ) {
2376 single_writer.write( similarity.toStringBuffer( print_option ).toString() );
2379 Writer local_writer = split_writers.get( ( similarity.getDomainId().getId().charAt( 0 ) + "" )
2380 .toLowerCase().charAt( 0 ) );
2381 if ( local_writer == null ) {
2382 local_writer = split_writers.get( '0' );
2384 local_writer.write( similarity.toStringBuffer( print_option ).toString() );
2386 for( final Writer w : split_writers.values() ) {
2387 w.write( SurfacingConstants.NL );
2390 switch ( print_option ) {
2392 for( final Writer w : split_writers.values() ) {
2393 w.write( SurfacingConstants.NL );
2394 w.write( "</table>" );
2395 w.write( SurfacingConstants.NL );
2396 w.write( "</font>" );
2397 w.write( SurfacingConstants.NL );
2398 w.write( "</body>" );
2399 w.write( SurfacingConstants.NL );
2400 w.write( "</html>" );
2401 w.write( SurfacingConstants.NL );
2405 for( final Writer w : split_writers.values() ) {
2411 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
2412 final String domain_0,
2413 final String domain_1 ) throws IOException {
2414 individual_files_writer.write( domain_0 );
2415 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2416 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2417 individual_files_writer.write( domain_1 );
2418 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2422 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
2423 final String filename,
2424 final Format format ) {
2425 final File outfile = new File( filename );
2426 checkForOutputFileWriteability( outfile );
2428 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
2429 matrix.toWriter( out, format );
2433 catch ( final IOException e ) {
2434 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2436 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
2439 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
2440 checkForOutputFileWriteability( matrix_outfile );
2442 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
2443 for( final DistanceMatrix distance_matrix : matrices ) {
2444 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
2445 out.write( ForesterUtil.LINE_SEPARATOR );
2450 catch ( final IOException e ) {
2451 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2453 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
2456 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
2458 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
2459 for( final String pfam : pfams ) {
2460 writer.write( pfam );
2461 writer.write( ForesterUtil.LINE_SEPARATOR );
2464 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote " + pfams.size() + " pfams to [" + outfile_name
2467 catch ( final IOException e ) {
2468 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2472 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
2473 final PhylogenyWriter writer = new PhylogenyWriter();
2475 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
2477 catch ( final IOException e ) {
2478 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
2481 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
2484 public static void writeTaxonomyLinks( final Writer writer, final String species ) throws IOException {
2485 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
2486 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( species );
2487 writer.write( " [" );
2488 if ( matcher.matches() ) {
2489 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_LINK + species
2490 + "\" target=\"taxonomy_window\">uniprot</a>" );
2493 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
2494 + "\" target=\"taxonomy_window\">eol</a>" );
2495 writer.write( "|" );
2496 writer.write( "<a href=\"" + SurfacingConstants.TOL_LINK + species
2497 + "\" target=\"taxonomy_window\">tol</a>" );
2499 writer.write( "]" );
2503 private static void writeToNexus( final String outfile_name,
2504 final CharacterStateMatrix<BinaryStates> matrix,
2505 final Phylogeny phylogeny ) {
2506 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
2507 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
2510 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
2511 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
2512 phylogenies.add( phylogeny );
2514 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
2515 w.write( NexusConstants.NEXUS );
2516 w.write( ForesterUtil.LINE_SEPARATOR );
2517 my_matrix.writeNexusTaxaBlock( w );
2518 my_matrix.writeNexusBinaryChractersBlock( w );
2519 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
2522 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
2524 catch ( final IOException e ) {
2525 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2529 private static void writeToNexus( final String outfile_name,
2530 final DomainParsimonyCalculator domain_parsimony,
2531 final Phylogeny phylogeny ) {
2532 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
2533 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
2535 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
2536 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
2540 public static void domainsPerProteinsStatistics( final String genome,
2541 final List<Protein> protein_list,
2542 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
2543 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
2544 final SortedSet<String> domains_which_are_always_single,
2545 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
2546 final SortedSet<String> domains_which_never_single,
2547 final Writer writer ) {
2548 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
2549 for( final Protein protein : protein_list ) {
2550 final int domains = protein.getNumberOfProteinDomains();
2551 //System.out.println( domains );
2552 stats.addValue( domains );
2553 all_genomes_domains_per_potein_stats.addValue( domains );
2554 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
2555 all_genomes_domains_per_potein_histo.put( domains, 1 );
2558 all_genomes_domains_per_potein_histo.put( domains,
2559 1 + all_genomes_domains_per_potein_histo.get( domains ) );
2561 if ( domains == 1 ) {
2562 final String domain = protein.getProteinDomain( 0 ).getDomainId().getId();
2563 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
2564 if ( domains_which_never_single.contains( domain ) ) {
2565 domains_which_never_single.remove( domain );
2566 domains_which_are_sometimes_single_sometimes_not.add( domain );
2569 domains_which_are_always_single.add( domain );
2573 else if ( domains > 1 ) {
2574 for( final Domain d : protein.getProteinDomains() ) {
2575 final String domain = d.getDomainId().getId();
2576 // System.out.println( domain );
2577 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
2578 if ( domains_which_are_always_single.contains( domain ) ) {
2579 domains_which_are_always_single.remove( domain );
2580 domains_which_are_sometimes_single_sometimes_not.add( domain );
2583 domains_which_never_single.add( domain );
2590 writer.write( genome );
2591 writer.write( "\t" );
2592 if ( stats.getN() >= 1 ) {
2593 writer.write( stats.arithmeticMean() + "" );
2594 writer.write( "\t" );
2595 if ( stats.getN() >= 2 ) {
2596 writer.write( stats.sampleStandardDeviation() + "" );
2601 writer.write( "\t" );
2602 writer.write( stats.median() + "" );
2603 writer.write( "\t" );
2604 writer.write( stats.getN() + "" );
2605 writer.write( "\t" );
2606 writer.write( stats.getMin() + "" );
2607 writer.write( "\t" );
2608 writer.write( stats.getMax() + "" );
2611 writer.write( "\t" );
2612 writer.write( "\t" );
2613 writer.write( "\t" );
2614 writer.write( "0" );
2615 writer.write( "\t" );
2616 writer.write( "\t" );
2618 writer.write( "\n" );
2620 catch ( final IOException e ) {
2621 e.printStackTrace();
2625 final static class DomainComparator implements Comparator<Domain> {
2627 final private boolean _ascending;
2629 public DomainComparator( final boolean ascending ) {
2630 _ascending = ascending;
2634 public final int compare( final Domain d0, final Domain d1 ) {
2635 if ( d0.getFrom() < d1.getFrom() ) {
2636 return _ascending ? -1 : 1;
2638 else if ( d0.getFrom() > d1.getFrom() ) {
2639 return _ascending ? 1 : -1;