3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: www.phylosoft.org/forester
27 package org.forester.surfacing;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.text.DecimalFormat;
35 import java.text.NumberFormat;
36 import java.util.ArrayList;
37 import java.util.Arrays;
38 import java.util.Collections;
39 import java.util.Comparator;
40 import java.util.HashMap;
41 import java.util.HashSet;
42 import java.util.List;
44 import java.util.Map.Entry;
45 import java.util.PriorityQueue;
47 import java.util.SortedMap;
48 import java.util.SortedSet;
49 import java.util.TreeMap;
50 import java.util.TreeSet;
51 import java.util.regex.Matcher;
52 import java.util.regex.Pattern;
54 import org.forester.application.surfacing;
55 import org.forester.evoinference.distance.NeighborJoining;
56 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
57 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
58 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
61 import org.forester.evoinference.matrix.distance.DistanceMatrix;
62 import org.forester.go.GoId;
63 import org.forester.go.GoNameSpace;
64 import org.forester.go.GoTerm;
65 import org.forester.go.PfamToGoMapping;
66 import org.forester.io.parsers.nexus.NexusConstants;
67 import org.forester.io.writers.PhylogenyWriter;
68 import org.forester.phylogeny.Phylogeny;
69 import org.forester.phylogeny.PhylogenyMethods;
70 import org.forester.phylogeny.PhylogenyNode;
71 import org.forester.phylogeny.PhylogenyNodeI.NH_CONVERSION_SUPPORT_VALUE_STYLE;
72 import org.forester.phylogeny.data.BinaryCharacters;
73 import org.forester.phylogeny.data.Confidence;
74 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
75 import org.forester.protein.BasicDomain;
76 import org.forester.protein.BasicProtein;
77 import org.forester.protein.BinaryDomainCombination;
78 import org.forester.protein.Domain;
79 import org.forester.protein.DomainId;
80 import org.forester.protein.Protein;
81 import org.forester.species.Species;
82 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
83 import org.forester.surfacing.DomainSimilarityCalculator.GoAnnotationOutput;
84 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
85 import org.forester.util.AsciiHistogram;
86 import org.forester.util.BasicDescriptiveStatistics;
87 import org.forester.util.BasicTable;
88 import org.forester.util.BasicTableParser;
89 import org.forester.util.DescriptiveStatistics;
90 import org.forester.util.ForesterUtil;
92 public final class SurfacingUtil {
94 private final static NumberFormat FORMATTER = new DecimalFormat( "0.0E0" );
95 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
96 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
99 public int compare( final Domain d1,
101 if ( d1.getPerSequenceEvalue() < d2
102 .getPerSequenceEvalue() ) {
106 .getPerSequenceEvalue() > d2
107 .getPerSequenceEvalue() ) {
111 return d1.compareTo( d2 );
115 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
116 private static final boolean USE_LAST = true;
118 private SurfacingUtil() {
119 // Hidden constructor.
122 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
123 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
124 final SortedMap<DomainId, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
125 for( final DomainId domain_id : all_cd.keySet() ) {
126 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
130 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
131 final SortedSet<DomainId> domain_ids ) {
132 final SortedSet<DomainId> domains = genome.getAllDomainIds();
133 for( final DomainId domain : domains ) {
134 domain_ids.add( domain );
138 public static void addHtmlHead( final Writer w, final String title ) throws IOException {
139 w.write( SurfacingConstants.NL );
141 w.write( "<title>" );
143 w.write( "</title>" );
144 w.write( SurfacingConstants.NL );
145 w.write( "<style>" );
146 w.write( SurfacingConstants.NL );
147 w.write( "a:visited { color : #6633FF; text-decoration : none; }" );
148 w.write( SurfacingConstants.NL );
149 w.write( "a:link { color : #6633FF; text-decoration : none; }" );
150 w.write( SurfacingConstants.NL );
151 w.write( "a:active { color : #99FF00; text-decoration : none; }" );
152 w.write( SurfacingConstants.NL );
153 w.write( "a:hover { color : #FFFFFF; background-color : #99FF00; text-decoration : none; }" );
154 w.write( SurfacingConstants.NL );
155 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
156 w.write( SurfacingConstants.NL );
157 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
158 w.write( SurfacingConstants.NL );
159 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
160 w.write( SurfacingConstants.NL );
161 w.write( "</style>" );
162 w.write( SurfacingConstants.NL );
163 w.write( "</head>" );
164 w.write( SurfacingConstants.NL );
167 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
168 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
169 for( final DomainSimilarity similarity : similarities ) {
170 stats.addValue( similarity.getMeanSimilarityScore() );
175 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
176 final String outfilename_for_counts,
177 final String outfilename_for_dc,
178 final String outfilename_for_dc_for_go_mapping,
179 final String outfilename_for_dc_for_go_mapping_unique,
180 final String outfilename_for_rank_counts,
181 final String outfilename_for_ancestor_species_counts,
182 final String outfilename_for_protein_stats,
183 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
184 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
185 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
188 // if ( protein_length_stats_by_dc != null ) {
189 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
190 // System.out.print( entry.getKey().toString() );
191 // System.out.print( ": " );
192 // double[] a = entry.getValue().getDataAsDoubleArray();
193 // for( int i = 0; i < a.length; i++ ) {
194 // System.out.print( a[ i ] + " " );
196 // System.out.println();
199 // if ( domain_number_stats_by_dc != null ) {
200 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
201 // System.out.print( entry.getKey().toString() );
202 // System.out.print( ": " );
203 // double[] a = entry.getValue().getDataAsDoubleArray();
204 // for( int i = 0; i < a.length; i++ ) {
205 // System.out.print( a[ i ] + " " );
207 // System.out.println();
211 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
212 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
213 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
214 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
215 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
216 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
217 final PhylogenyNode n = it.next();
218 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
219 for( final String dc : gained_dc ) {
220 if ( dc_gain_counts.containsKey( dc ) ) {
221 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
224 dc_gain_counts.put( dc, 1 );
228 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
229 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
230 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
231 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
232 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
233 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
234 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
235 final Set<String> dcs = dc_gain_counts.keySet();
236 final SortedSet<String> more_than_once = new TreeSet<String>();
237 final DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
238 final DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
239 final DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
240 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
241 final DescriptiveStatistics gained_multiple_times_domain_length_stats = new BasicDescriptiveStatistics();
242 final DescriptiveStatistics gained_once_domain_length_stats = new BasicDescriptiveStatistics();
243 for( final String dc : dcs ) {
244 final int count = dc_gain_counts.get( dc );
245 if ( histogram.containsKey( count ) ) {
246 histogram.put( count, histogram.get( count ) + 1 );
247 domain_lists.get( count ).append( ", " + dc );
248 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
249 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
252 histogram.put( count, 1 );
253 domain_lists.put( count, new StringBuilder( dc ) );
254 final PriorityQueue<String> q = new PriorityQueue<String>();
255 q.addAll( splitDomainCombination( dc ) );
256 domain_lists_go.put( count, q );
257 final SortedSet<String> set = new TreeSet<String>();
258 set.addAll( splitDomainCombination( dc ) );
259 domain_lists_go_unique.put( count, set );
261 if ( protein_length_stats_by_dc != null ) {
262 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
263 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
265 dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
268 if ( domain_number_stats_by_dc != null ) {
269 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
270 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
272 dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
275 if ( domain_length_stats_by_domain != null ) {
276 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
277 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
279 final String[] ds = dc.split( "=" );
280 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
281 .get( ds[ 0 ] ).arithmeticMean() );
282 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
283 .get( ds[ 1 ] ).arithmeticMean() );
286 more_than_once.add( dc );
287 if ( protein_length_stats_by_dc != null ) {
288 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
289 final double[] a = s.getDataAsDoubleArray();
290 for( final double element : a ) {
291 gained_multiple_times_lengths_stats.addValue( element );
294 if ( domain_number_stats_by_dc != null ) {
295 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
296 final double[] a = s.getDataAsDoubleArray();
297 for( final double element : a ) {
298 gained_multiple_times_domain_count_stats.addValue( element );
301 if ( domain_length_stats_by_domain != null ) {
302 final String[] ds = dc.split( "=" );
303 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
304 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
305 final double[] a0 = s0.getDataAsDoubleArray();
306 final double[] a1 = s1.getDataAsDoubleArray();
307 for( final double element : a0 ) {
308 gained_multiple_times_domain_length_stats.addValue( element );
310 for( final double element : a1 ) {
311 gained_multiple_times_domain_length_stats.addValue( element );
316 if ( protein_length_stats_by_dc != null ) {
317 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
318 final double[] a = s.getDataAsDoubleArray();
319 for( final double element : a ) {
320 gained_once_lengths_stats.addValue( element );
323 if ( domain_number_stats_by_dc != null ) {
324 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
325 final double[] a = s.getDataAsDoubleArray();
326 for( final double element : a ) {
327 gained_once_domain_count_stats.addValue( element );
330 if ( domain_length_stats_by_domain != null ) {
331 final String[] ds = dc.split( "=" );
332 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
333 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
334 final double[] a0 = s0.getDataAsDoubleArray();
335 final double[] a1 = s1.getDataAsDoubleArray();
336 for( final double element : a0 ) {
337 gained_once_domain_length_stats.addValue( element );
339 for( final double element : a1 ) {
340 gained_once_domain_length_stats.addValue( element );
345 final Set<Integer> histogram_keys = histogram.keySet();
346 for( final Integer histogram_key : histogram_keys ) {
347 final int count = histogram.get( histogram_key );
348 final StringBuilder dc = domain_lists.get( histogram_key );
349 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
350 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
351 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
352 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
353 Arrays.sort( sorted );
354 for( final Object domain : sorted ) {
355 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
357 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
358 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
359 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
364 out_dc_for_go_mapping.close();
365 out_dc_for_go_mapping_unique.close();
366 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
367 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
368 for( final String dc : more_than_once ) {
369 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
370 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
371 final PhylogenyNode n = it.next();
372 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
376 for( int i = 0; i < nodes.size() - 1; ++i ) {
377 for( int j = i + 1; j < nodes.size(); ++j ) {
378 final PhylogenyNode lca = PhylogenyMethods.getInstance().obtainLCA( nodes.get( i ),
380 String rank = "unknown";
381 if ( lca.getNodeData().isHasTaxonomy()
382 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
383 rank = lca.getNodeData().getTaxonomy().getRank();
385 addToCountMap( lca_rank_counts, rank );
387 if ( lca.getNodeData().isHasTaxonomy()
388 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
389 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
391 else if ( lca.getNodeData().isHasTaxonomy()
392 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
393 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
396 lca_species = lca.getName();
398 addToCountMap( lca_ancestor_species_counts, lca_species );
402 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
403 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
404 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
405 ForesterUtil.map2writer( out_for_ancestor_species_counts,
406 lca_ancestor_species_counts,
408 ForesterUtil.LINE_SEPARATOR );
409 out_for_rank_counts.close();
410 out_for_ancestor_species_counts.close();
411 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
412 && ( ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
413 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
414 w.write( "Domain Lengths: " );
416 if ( domain_length_stats_by_domain != null ) {
417 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
419 w.write( entry.getKey().toString() );
420 w.write( "\t" + entry.getValue().arithmeticMean() );
421 w.write( "\t" + entry.getValue().median() );
428 w.write( "Protein Lengths: " );
430 if ( protein_length_stats_by_dc != null ) {
431 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
433 w.write( entry.getKey().toString() );
434 w.write( "\t" + entry.getValue().arithmeticMean() );
435 w.write( "\t" + entry.getValue().median() );
442 w.write( "Number of domains: " );
444 if ( domain_number_stats_by_dc != null ) {
445 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
447 w.write( entry.getKey().toString() );
448 w.write( "\t" + entry.getValue().arithmeticMean() );
449 w.write( "\t" + entry.getValue().median() );
456 w.write( "Gained once, domain lengths:" );
458 w.write( gained_once_domain_length_stats.toString() );
461 w.write( "Gained multiple times, domain lengths:" );
463 w.write( gained_multiple_times_domain_length_stats.toString() );
468 w.write( "Gained once, protein lengths:" );
470 w.write( gained_once_lengths_stats.toString() );
473 w.write( "Gained once, domain counts:" );
475 w.write( gained_once_domain_count_stats.toString() );
478 w.write( "Gained multiple times, protein lengths:" );
480 w.write( gained_multiple_times_lengths_stats.toString() );
483 w.write( "Gained multiple times, domain counts:" );
485 w.write( gained_multiple_times_domain_count_stats.toString() );
490 catch ( final IOException e ) {
491 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
493 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
494 + outfilename_for_counts + "]" );
495 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
496 + outfilename_for_dc + "]" );
497 ForesterUtil.programMessage( surfacing.PRG_NAME,
498 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
499 + outfilename_for_dc_for_go_mapping + "]" );
500 ForesterUtil.programMessage( surfacing.PRG_NAME,
501 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
502 + outfilename_for_dc_for_go_mapping_unique + "]" );
505 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
506 if ( map.containsKey( s ) ) {
507 map.put( s, map.get( s ) + 1 );
514 public static int calculateOverlap( final Domain domain, final List<Boolean> covered_positions ) {
515 int overlap_count = 0;
516 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
517 if ( ( i < covered_positions.size() ) && ( covered_positions.get( i ) == true ) ) {
521 return overlap_count;
524 public static void checkForOutputFileWriteability( final File outfile ) {
525 final String error = ForesterUtil.isWritableFile( outfile );
526 if ( !ForesterUtil.isEmpty( error ) ) {
527 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
531 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
532 final boolean get_gains ) {
533 final SortedSet<String> domains = new TreeSet<String>();
534 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
535 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
537 domains.addAll( chars.getGainedCharacters() );
540 domains.addAll( chars.getLostCharacters() );
546 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
547 final BinaryDomainCombination.DomainCombinationType dc_type,
548 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
549 final boolean get_gains ) {
550 final SortedSet<String> sorted_ids = new TreeSet<String>();
551 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
552 sorted_ids.add( matrix.getIdentifier( i ) );
554 for( final String id : sorted_ids ) {
555 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
556 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
557 || ( !get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
558 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
559 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
560 .createInstance( matrix.getCharacter( c ) ) );
562 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
563 all_binary_domains_combination_gained.add( DirectedBinaryDomainCombination
564 .createInstance( matrix.getCharacter( c ) ) );
567 all_binary_domains_combination_gained.add( BasicBinaryDomainCombination.createInstance( matrix
568 .getCharacter( c ) ) );
575 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
576 final boolean domain_combinations,
577 final CharacterStateMatrix.GainLossStates state,
578 final String outfile ) {
579 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
580 + ForesterUtil.FILE_SEPARATOR + base_dir );
581 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
582 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
584 if ( domain_combinations ) {
585 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
586 + ForesterUtil.FILE_SEPARATOR + "DC" );
589 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
590 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
592 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
593 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
595 if ( state == GainLossStates.GAIN ) {
596 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
597 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
599 else if ( state == GainLossStates.LOSS ) {
600 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
601 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
604 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
605 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
607 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
608 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
610 return per_node_go_mapped_domain_gain_loss_files_base_dir;
613 public static Map<DomainId, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
614 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map = new HashMap<DomainId, List<GoId>>( pfam_to_go_mappings
616 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
617 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
618 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
620 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
622 return domain_id_to_go_ids_map;
625 public static Map<DomainId, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
627 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, "\t" );
628 final Map<DomainId, Set<String>> map = new TreeMap<DomainId, Set<String>>();
629 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
630 final DomainId domain_id = new DomainId( primary_table.getValue( 0, r ) );
631 if ( !map.containsKey( domain_id ) ) {
632 map.put( domain_id, new HashSet<String>() );
634 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
639 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile, final DistanceMatrix distance ) {
640 checkForOutputFileWriteability( nj_tree_outfile );
641 final NeighborJoining nj = NeighborJoining.createInstance();
642 final Phylogeny phylogeny = nj.execute( distance );
643 phylogeny.setName( nj_tree_outfile.getName() );
644 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
648 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
649 final SortedMap<DomainId, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
650 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
651 for( final DomainId domain_id : cds.keySet() ) {
652 final CombinableDomains cd = cds.get( domain_id );
653 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
655 return binary_combinations;
658 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
659 final Detailedness detailedness,
660 final GoAnnotationOutput go_annotation_output,
661 final Map<GoId, GoTerm> go_id_to_term_map,
662 final GoNameSpace go_namespace_limit ) {
663 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) ) {
664 throw new IllegalArgumentException( "attempt to use a GO namespace limit without a GO id to term map" );
666 for( final DomainSimilarity domain_similarity : domain_similarities ) {
667 if ( domain_similarity instanceof PrintableDomainSimilarity ) {
668 final PrintableDomainSimilarity printable_domain_similarity = ( PrintableDomainSimilarity ) domain_similarity;
669 printable_domain_similarity.setDetailedness( detailedness );
670 printable_domain_similarity.setGoAnnotationOutput( go_annotation_output );
671 printable_domain_similarity.setGoIdToTermMap( go_id_to_term_map );
672 printable_domain_similarity.setGoNamespaceLimit( go_namespace_limit );
677 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
678 final int number_of_genomes,
679 final DomainLengthsTable domain_lengths_table,
680 final File outfile ) throws IOException {
681 final DecimalFormat df = new DecimalFormat( "#.00" );
682 checkForOutputFileWriteability( outfile );
683 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
684 out.write( "MEAN BASED STATISTICS PER SPECIES" );
685 out.write( ForesterUtil.LINE_SEPARATOR );
686 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
687 out.write( ForesterUtil.LINE_SEPARATOR );
688 out.write( ForesterUtil.LINE_SEPARATOR );
689 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
690 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
691 out.write( ForesterUtil.LINE_SEPARATOR );
692 for( final DomainLengths domain_lengths : domain_lengths_list ) {
693 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
694 if ( species_list.size() > 0 ) {
695 out.write( domain_lengths.getDomainId() + "\t" );
696 for( final Species species : species_list ) {
697 out.write( species + "\t" );
699 out.write( ForesterUtil.LINE_SEPARATOR );
700 // DescriptiveStatistics stats_for_domain = domain_lengths
701 // .calculateMeanBasedStatistics();
702 //AsciiHistogram histo = new AsciiHistogram( stats_for_domain );
703 //System.out.println( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
706 out.write( ForesterUtil.LINE_SEPARATOR );
707 out.write( ForesterUtil.LINE_SEPARATOR );
708 out.write( "OUTLIER SPECIES (Z 1.0)" );
709 out.write( ForesterUtil.LINE_SEPARATOR );
710 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
711 .calculateMeanBasedStatisticsForAllSpecies();
712 out.write( stats_for_all_species.asSummary() );
713 out.write( ForesterUtil.LINE_SEPARATOR );
714 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
715 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
716 out.write( ForesterUtil.LINE_SEPARATOR );
717 final double population_sd = stats_for_all_species.sampleStandardDeviation();
718 final double population_mean = stats_for_all_species.arithmeticMean();
719 for( final Species species : domain_lengths_table.getSpecies() ) {
720 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
721 final double z = ( x - population_mean ) / population_sd;
722 out.write( species + "\t" + z );
723 out.write( ForesterUtil.LINE_SEPARATOR );
725 out.write( ForesterUtil.LINE_SEPARATOR );
726 for( final Species species : domain_lengths_table.getSpecies() ) {
727 final DescriptiveStatistics stats_for_species = domain_lengths_table
728 .calculateMeanBasedStatisticsForSpecies( species );
729 final double x = stats_for_species.arithmeticMean();
730 final double z = ( x - population_mean ) / population_sd;
731 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
732 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
733 out.write( ForesterUtil.LINE_SEPARATOR );
737 // final List<HistogramData> histogram_datas = new ArrayList<HistogramData>();
738 // for( int i = 0; i < number_of_genomes; ++i ) {
739 // final Species species = new BasicSpecies( input_file_properties[ i ][ 0 ] );
741 // .add( new HistogramData( species.toString(), domain_lengths_table
742 // .calculateMeanBasedStatisticsForSpecies( species )
743 // .getDataAsDoubleArray(), 5, 600, null, 60 ) );
745 // final HistogramsFrame hf = new HistogramsFrame( histogram_datas );
746 // hf.setVisible( true );
752 * @param all_binary_domains_combination_lost_fitch
753 * @param consider_directedness_and_adjacency_for_bin_combinations
754 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
755 * which were gained under unweighted (Fitch) parsimony.
757 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
758 final boolean radomize_fitch_parsimony,
759 final String outfile_name,
760 final DomainParsimonyCalculator domain_parsimony,
761 final Phylogeny phylogeny,
762 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
763 final Map<GoId, GoTerm> go_id_to_term_map,
764 final GoNameSpace go_namespace_limit,
765 final String parameters_str,
766 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
767 final SortedSet<DomainId> positive_filter,
768 final boolean output_binary_domain_combinations_for_graphs,
769 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
770 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
771 final BinaryDomainCombination.DomainCombinationType dc_type,
772 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
773 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
774 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
775 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
776 final String date_time = ForesterUtil.getCurrentDateTime();
777 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
778 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
779 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
780 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
781 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
782 writeToNexus( outfile_name, domain_parsimony, phylogeny );
785 Phylogeny local_phylogeny_l = phylogeny.copy();
786 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
787 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
790 domain_parsimony.executeDolloParsimonyOnDomainPresence();
792 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
793 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
794 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
795 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
796 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
797 CharacterStateMatrix.GainLossStates.GAIN,
798 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
800 ForesterUtil.LINE_SEPARATOR,
802 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
803 CharacterStateMatrix.GainLossStates.LOSS,
804 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
806 ForesterUtil.LINE_SEPARATOR,
808 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
809 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D, sep, ForesterUtil.LINE_SEPARATOR, null );
811 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
815 domain_parsimony.getGainLossMatrix(),
816 CharacterStateMatrix.GainLossStates.GAIN,
817 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
819 ForesterUtil.LINE_SEPARATOR,
820 "Dollo Parsimony | Gains | Domains",
822 domain_id_to_secondary_features_maps,
823 all_pfams_encountered,
824 all_pfams_gained_as_domains,
826 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
830 domain_parsimony.getGainLossMatrix(),
831 CharacterStateMatrix.GainLossStates.LOSS,
832 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
834 ForesterUtil.LINE_SEPARATOR,
835 "Dollo Parsimony | Losses | Domains",
837 domain_id_to_secondary_features_maps,
838 all_pfams_encountered,
839 all_pfams_lost_as_domains,
841 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
845 domain_parsimony.getGainLossMatrix(),
847 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
849 ForesterUtil.LINE_SEPARATOR,
850 "Dollo Parsimony | Present | Domains",
852 domain_id_to_secondary_features_maps,
853 all_pfams_encountered,
855 "_dollo_present_d" );
856 preparePhylogeny( local_phylogeny_l,
859 "Dollo parsimony on domain presence/absence",
860 "dollo_on_domains_" + outfile_name,
862 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
863 + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
865 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
866 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
868 catch ( final IOException e ) {
870 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
872 if ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) {
873 // FITCH DOMAIN COMBINATIONS
874 // -------------------------
875 local_phylogeny_l = phylogeny.copy();
876 String randomization = "no";
877 if ( radomize_fitch_parsimony ) {
878 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
879 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
882 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( USE_LAST );
884 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
885 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
886 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
887 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
889 .writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
890 CharacterStateMatrix.GainLossStates.GAIN,
891 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
893 ForesterUtil.LINE_SEPARATOR,
895 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
896 CharacterStateMatrix.GainLossStates.LOSS,
898 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
900 ForesterUtil.LINE_SEPARATOR,
902 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
903 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC, sep, ForesterUtil.LINE_SEPARATOR, null );
904 if ( all_binary_domains_combination_gained_fitch != null ) {
905 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
907 all_binary_domains_combination_gained_fitch,
910 if ( all_binary_domains_combination_lost_fitch != null ) {
911 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
913 all_binary_domains_combination_lost_fitch,
916 if ( output_binary_domain_combinations_for_graphs ) {
918 .writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
919 .getGainLossMatrix(),
922 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
924 ForesterUtil.LINE_SEPARATOR,
925 BinaryDomainCombination.OutputFormat.DOT );
928 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
932 domain_parsimony.getGainLossMatrix(),
933 CharacterStateMatrix.GainLossStates.GAIN,
934 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
936 ForesterUtil.LINE_SEPARATOR,
937 "Fitch Parsimony | Gains | Domain Combinations",
940 all_pfams_encountered,
941 all_pfams_gained_as_dom_combinations,
943 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
947 domain_parsimony.getGainLossMatrix(),
948 CharacterStateMatrix.GainLossStates.LOSS,
949 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
951 ForesterUtil.LINE_SEPARATOR,
952 "Fitch Parsimony | Losses | Domain Combinations",
955 all_pfams_encountered,
956 all_pfams_lost_as_dom_combinations,
957 "_fitch_losses_dc" );
958 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
962 domain_parsimony.getGainLossMatrix(),
964 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
966 ForesterUtil.LINE_SEPARATOR,
967 "Fitch Parsimony | Present | Domain Combinations",
970 all_pfams_encountered,
972 "_fitch_present_dc" );
973 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
976 all_pfams_encountered );
977 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX, all_pfams_gained_as_domains );
978 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
979 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
980 all_pfams_gained_as_dom_combinations );
981 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX, all_pfams_lost_as_dom_combinations );
982 preparePhylogeny( local_phylogeny_l,
985 "Fitch parsimony on binary domain combination presence/absence randomization: "
987 "fitch_on_binary_domain_combinations_" + outfile_name,
989 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
990 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
991 calculateIndependentDomainCombinationGains( local_phylogeny_l,
993 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
995 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
997 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
999 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
1000 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
1001 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
1002 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
1003 protein_length_stats_by_dc,
1004 domain_number_stats_by_dc,
1005 domain_length_stats_by_domain );
1009 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
1010 final DomainParsimonyCalculator secondary_features_parsimony,
1011 final Phylogeny phylogeny,
1012 final String parameters_str,
1013 final Map<Species, MappingResults> mapping_results_map ) {
1014 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
1015 final String date_time = ForesterUtil.getCurrentDateTime();
1016 System.out.println();
1017 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
1018 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
1020 Phylogeny local_phylogeny_copy = phylogeny.copy();
1021 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
1022 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(), outfile_name
1023 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
1024 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(), outfile_name
1025 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
1027 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1028 CharacterStateMatrix.GainLossStates.GAIN,
1030 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
1032 ForesterUtil.LINE_SEPARATOR,
1035 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1036 CharacterStateMatrix.GainLossStates.LOSS,
1038 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
1040 ForesterUtil.LINE_SEPARATOR,
1043 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1046 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
1048 ForesterUtil.LINE_SEPARATOR,
1050 preparePhylogeny( local_phylogeny_copy,
1051 secondary_features_parsimony,
1053 "Dollo parsimony on secondary feature presence/absence",
1054 "dollo_on_secondary_features_" + outfile_name,
1056 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1057 + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
1058 // FITCH DOMAIN COMBINATIONS
1059 // -------------------------
1060 local_phylogeny_copy = phylogeny.copy();
1061 final String randomization = "no";
1062 secondary_features_parsimony.executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( USE_LAST );
1063 preparePhylogeny( local_phylogeny_copy,
1064 secondary_features_parsimony,
1066 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
1068 "fitch_on_binary_domain_combinations_" + outfile_name,
1070 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1071 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
1072 calculateIndependentDomainCombinationGains( local_phylogeny_copy, outfile_name
1073 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX, outfile_name
1074 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX, outfile_name
1075 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX, outfile_name
1076 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX, outfile_name
1077 + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt", outfile_name
1078 + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt", null, null, null, null );
1081 public static void doit( final List<Protein> proteins,
1082 final List<DomainId> query_domain_ids_nc_order,
1084 final String separator,
1085 final String limit_to_species,
1086 final Map<String, List<Integer>> average_protein_lengths_by_dc ) throws IOException {
1087 for( final Protein protein : proteins ) {
1088 if ( ForesterUtil.isEmpty( limit_to_species )
1089 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1090 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1091 out.write( protein.getSpecies().getSpeciesId() );
1092 out.write( separator );
1093 out.write( protein.getProteinId().getId() );
1094 out.write( separator );
1096 final Set<DomainId> visited_domain_ids = new HashSet<DomainId>();
1097 boolean first = true;
1098 for( final Domain domain : protein.getProteinDomains() ) {
1099 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1100 visited_domain_ids.add( domain.getDomainId() );
1107 out.write( domain.getDomainId().getId() );
1109 out.write( "" + domain.getTotalCount() );
1114 out.write( separator );
1115 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1116 .equals( SurfacingConstants.NONE ) ) ) {
1117 out.write( protein.getDescription() );
1119 out.write( separator );
1120 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1121 .equals( SurfacingConstants.NONE ) ) ) {
1122 out.write( protein.getAccession() );
1124 out.write( SurfacingConstants.NL );
1131 public static void extractProteinNames( final List<Protein> proteins,
1132 final List<DomainId> query_domain_ids_nc_order,
1134 final String separator,
1135 final String limit_to_species ) throws IOException {
1136 for( final Protein protein : proteins ) {
1137 if ( ForesterUtil.isEmpty( limit_to_species )
1138 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1139 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1140 out.write( protein.getSpecies().getSpeciesId() );
1141 out.write( separator );
1142 out.write( protein.getProteinId().getId() );
1143 out.write( separator );
1145 final Set<DomainId> visited_domain_ids = new HashSet<DomainId>();
1146 boolean first = true;
1147 for( final Domain domain : protein.getProteinDomains() ) {
1148 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1149 visited_domain_ids.add( domain.getDomainId() );
1156 out.write( domain.getDomainId().getId() );
1158 out.write( "" + domain.getTotalCount() );
1163 out.write( separator );
1164 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1165 .equals( SurfacingConstants.NONE ) ) ) {
1166 out.write( protein.getDescription() );
1168 out.write( separator );
1169 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1170 .equals( SurfacingConstants.NONE ) ) ) {
1171 out.write( protein.getAccession() );
1173 out.write( SurfacingConstants.NL );
1180 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
1181 final DomainId domain_id,
1183 final String separator,
1184 final String limit_to_species ) throws IOException {
1185 for( final Species species : protein_lists_per_species.keySet() ) {
1186 for( final Protein protein : protein_lists_per_species.get( species ) ) {
1187 if ( ForesterUtil.isEmpty( limit_to_species )
1188 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1189 final List<Domain> domains = protein.getProteinDomains( domain_id );
1190 if ( domains.size() > 0 ) {
1191 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
1192 for( final Domain domain : domains ) {
1193 stats.addValue( domain.getPerSequenceEvalue() );
1195 out.write( protein.getSpecies().getSpeciesId() );
1196 out.write( separator );
1197 out.write( protein.getProteinId().getId() );
1198 out.write( separator );
1199 out.write( "[" + FORMATTER.format( stats.median() ) + "]" );
1200 out.write( separator );
1201 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1202 .equals( SurfacingConstants.NONE ) ) ) {
1203 out.write( protein.getDescription() );
1205 out.write( separator );
1206 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1207 .equals( SurfacingConstants.NONE ) ) ) {
1208 out.write( protein.getAccession() );
1210 out.write( SurfacingConstants.NL );
1218 public static SortedSet<DomainId> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
1219 final SortedSet<DomainId> all_domains_ids = new TreeSet<DomainId>();
1220 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1221 final Set<DomainId> all_domains = gwcd.getAllDomainIds();
1222 // for( final Domain domain : all_domains ) {
1223 all_domains_ids.addAll( all_domains );
1226 return all_domains_ids;
1229 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
1230 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
1231 for( final Protein protein_domain_collection : protein_domain_collections ) {
1232 for( final Object name : protein_domain_collection.getProteinDomains() ) {
1233 final BasicDomain protein_domain = ( BasicDomain ) name;
1234 final String id = protein_domain.getDomainId().getId();
1235 if ( map.containsKey( id ) ) {
1236 map.put( id, map.get( id ) + 1 );
1246 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
1247 final PhylogenyNodeIterator it = p.iteratorPostorder();
1249 while ( it.hasNext() ) {
1250 final PhylogenyNode n = it.next();
1251 if ( ForesterUtil.isEmpty( n.getName() )
1252 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1253 .getScientificName() ) )
1254 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1255 .getCommonName() ) ) ) {
1256 if ( n.getParent() != null ) {
1257 names.append( " " );
1258 names.append( n.getParent().getName() );
1260 final List l = n.getAllExternalDescendants();
1261 for( final Object object : l ) {
1262 System.out.println( l.toString() );
1271 * Returns true is Domain domain falls in an uninterrupted stretch of
1272 * covered positions.
1275 * @param covered_positions
1278 public static boolean isEngulfed( final Domain domain, final List<Boolean> covered_positions ) {
1279 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1280 if ( ( i >= covered_positions.size() ) || ( covered_positions.get( i ) != true ) ) {
1287 public static void preparePhylogeny( final Phylogeny p,
1288 final DomainParsimonyCalculator domain_parsimony,
1289 final String date_time,
1290 final String method,
1292 final String parameters_str ) {
1293 domain_parsimony.decoratePhylogenyWithDomains( p );
1294 final StringBuilder desc = new StringBuilder();
1295 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1296 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1297 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1298 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1299 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1300 desc.append( "[Parameters: " + parameters_str + "]" );
1302 p.setDescription( desc.toString() );
1303 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1304 p.setRerootable( false );
1305 p.setRooted( true );
1309 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1313 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1314 final String protein_id,
1315 final String separator ) {
1316 final StringBuffer sb = new StringBuffer();
1317 if ( protein.getSpecies() == null ) {
1318 throw new IllegalArgumentException( "species must not be null" );
1320 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1321 throw new IllegalArgumentException( "species id must not be empty" );
1323 final List<Domain> domains = protein.getProteinDomains();
1324 if ( domains.size() > 1 ) {
1325 final Map<String, Integer> counts = new HashMap<String, Integer>();
1326 for( final Domain domain : domains ) {
1327 final String id = domain.getDomainId().getId();
1328 if ( counts.containsKey( id ) ) {
1329 counts.put( id, counts.get( id ) + 1 );
1332 counts.put( id, 1 );
1335 final Set<String> dcs = new HashSet<String>();
1336 for( int i = 1; i < domains.size(); ++i ) {
1337 for( int j = 0; j < i; ++j ) {
1338 Domain domain_n = domains.get( i );
1339 Domain domain_c = domains.get( j );
1340 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1341 domain_n = domains.get( j );
1342 domain_c = domains.get( i );
1344 final String dc = domain_n.getDomainId().getId() + domain_c.getDomainId().getId();
1345 if ( !dcs.contains( dc ) ) {
1347 sb.append( protein.getSpecies() );
1348 sb.append( separator );
1349 sb.append( protein_id );
1350 sb.append( separator );
1351 sb.append( domain_n.getDomainId().getId() );
1352 sb.append( separator );
1353 sb.append( domain_c.getDomainId().getId() );
1354 sb.append( separator );
1355 sb.append( domain_n.getPerDomainEvalue() );
1356 sb.append( separator );
1357 sb.append( domain_c.getPerDomainEvalue() );
1358 sb.append( separator );
1359 sb.append( counts.get( domain_n.getDomainId().getId() ) );
1360 sb.append( separator );
1361 sb.append( counts.get( domain_c.getDomainId().getId() ) );
1362 sb.append( ForesterUtil.LINE_SEPARATOR );
1367 else if ( domains.size() == 1 ) {
1368 sb.append( protein.getSpecies() );
1369 sb.append( separator );
1370 sb.append( protein_id );
1371 sb.append( separator );
1372 sb.append( domains.get( 0 ).getDomainId().getId() );
1373 sb.append( separator );
1374 sb.append( separator );
1375 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1376 sb.append( separator );
1377 sb.append( separator );
1379 sb.append( separator );
1380 sb.append( ForesterUtil.LINE_SEPARATOR );
1383 sb.append( protein.getSpecies() );
1384 sb.append( separator );
1385 sb.append( protein_id );
1386 sb.append( separator );
1387 sb.append( separator );
1388 sb.append( separator );
1389 sb.append( separator );
1390 sb.append( separator );
1391 sb.append( separator );
1392 sb.append( ForesterUtil.LINE_SEPARATOR );
1399 * Example regarding engulfment: ------------0.1 ----------0.2 --0.3 =>
1400 * domain with 0.3 is ignored
1402 * -----------0.1 ----------0.2 --0.3 => domain with 0.3 is ignored
1405 * ------------0.1 ----------0.3 --0.2 => domains with 0.3 and 0.2 are _not_
1408 * @param max_allowed_overlap
1409 * maximal allowed overlap (inclusive) to be still considered not
1410 * overlapping (zero or negative value to allow any overlap)
1411 * @param remove_engulfed_domains
1412 * to remove domains which are completely engulfed by coverage of
1413 * domains with better support
1417 public static Protein removeOverlappingDomains( final int max_allowed_overlap,
1418 final boolean remove_engulfed_domains,
1419 final Protein protein ) {
1420 final Protein pruned_protein = new BasicProtein( protein.getProteinId().getId(), protein.getSpecies()
1421 .getSpeciesId(), protein.getLength() );
1422 final List<Domain> sorted = SurfacingUtil.sortDomainsWithAscendingConfidenceValues( protein );
1423 final List<Boolean> covered_positions = new ArrayList<Boolean>();
1424 for( final Domain domain : sorted ) {
1425 if ( ( ( max_allowed_overlap < 0 ) || ( SurfacingUtil.calculateOverlap( domain, covered_positions ) <= max_allowed_overlap ) )
1426 && ( !remove_engulfed_domains || !isEngulfed( domain, covered_positions ) ) ) {
1427 final int covered_positions_size = covered_positions.size();
1428 for( int i = covered_positions_size; i < domain.getFrom(); ++i ) {
1429 covered_positions.add( false );
1431 final int new_covered_positions_size = covered_positions.size();
1432 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1433 if ( i < new_covered_positions_size ) {
1434 covered_positions.set( i, true );
1437 covered_positions.add( true );
1440 pruned_protein.addProteinDomain( domain );
1443 return pruned_protein;
1446 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1447 final List<Domain> domains = new ArrayList<Domain>();
1448 for( final Domain d : protein.getProteinDomains() ) {
1451 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1455 private static List<String> splitDomainCombination( final String dc ) {
1456 final String[] s = dc.split( "=" );
1457 if ( s.length != 2 ) {
1458 ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
1462 final List<String> l = new ArrayList<String>( 2 );
1468 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1469 final boolean get_gains,
1470 final String outdir,
1471 final String suffix_for_filename ) throws IOException {
1472 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1474 state = CharacterStateMatrix.GainLossStates.LOSS;
1476 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1480 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1481 final PhylogenyNode node = it.next();
1482 if ( !node.isExternal() ) {
1483 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1484 if ( domains.size() > 0 ) {
1485 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1486 + node.getName() + suffix_for_filename );
1487 for( final String domain : domains ) {
1488 writer.write( domain );
1489 writer.write( ForesterUtil.LINE_SEPARATOR );
1497 private static void writeAllEncounteredPfamsToFile( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
1498 final Map<GoId, GoTerm> go_id_to_term_map,
1499 final String outfile_name,
1500 final SortedSet<String> all_pfams_encountered ) {
1501 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
1502 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
1503 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
1504 final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
1505 int biological_process_counter = 0;
1506 int cellular_component_counter = 0;
1507 int molecular_function_counter = 0;
1508 int pfams_with_mappings_counter = 0;
1509 int pfams_without_mappings_counter = 0;
1510 int pfams_without_mappings_to_bp_or_mf_counter = 0;
1511 int pfams_with_mappings_to_bp_or_mf_counter = 0;
1513 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
1514 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
1515 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
1516 summary_writer.write( "# Pfam to GO mapping summary" );
1517 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1518 summary_writer.write( "# Actual summary is at the end of this file." );
1519 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1520 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
1521 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1522 for( final String pfam : all_pfams_encountered ) {
1523 all_pfams_encountered_writer.write( pfam );
1524 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
1525 final DomainId domain_id = new DomainId( pfam );
1526 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
1527 ++pfams_with_mappings_counter;
1528 all_pfams_encountered_with_go_annotation_writer.write( pfam );
1529 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
1530 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
1531 boolean maps_to_bp = false;
1532 boolean maps_to_cc = false;
1533 boolean maps_to_mf = false;
1534 for( final GoId go_id : go_ids ) {
1535 final GoTerm go_term = go_id_to_term_map.get( go_id );
1536 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
1539 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
1542 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
1547 ++biological_process_counter;
1550 ++cellular_component_counter;
1553 ++molecular_function_counter;
1555 if ( maps_to_bp || maps_to_mf ) {
1556 ++pfams_with_mappings_to_bp_or_mf_counter;
1559 ++pfams_without_mappings_to_bp_or_mf_counter;
1563 ++pfams_without_mappings_to_bp_or_mf_counter;
1564 ++pfams_without_mappings_counter;
1565 summary_writer.write( pfam );
1566 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1569 all_pfams_encountered_writer.close();
1570 all_pfams_encountered_with_go_annotation_writer.close();
1571 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + all_pfams_encountered.size()
1572 + "] encountered Pfams to: \"" + all_pfams_encountered_file + "\"" );
1573 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + pfams_with_mappings_counter
1574 + "] encountered Pfams with GO mappings to: \"" + all_pfams_encountered_with_go_annotation_file
1576 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote summary (including all ["
1577 + pfams_without_mappings_counter + "] encountered Pfams without GO mappings) to: \""
1578 + encountered_pfams_summary_file + "\"" );
1579 ForesterUtil.programMessage( surfacing.PRG_NAME, "Sum of Pfams encountered : "
1580 + all_pfams_encountered.size() );
1581 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without a mapping : "
1582 + pfams_without_mappings_counter + " ["
1583 + ( 100 * pfams_without_mappings_counter / all_pfams_encountered.size() ) + "%]" );
1584 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without mapping to proc. or func. : "
1585 + pfams_without_mappings_to_bp_or_mf_counter + " ["
1586 + ( 100 * pfams_without_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1587 ForesterUtil.programMessage( surfacing.PRG_NAME,
1588 "Pfams with a mapping : " + pfams_with_mappings_counter
1590 + ( 100 * pfams_with_mappings_counter / all_pfams_encountered.size() )
1592 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping to proc. or func. : "
1593 + pfams_with_mappings_to_bp_or_mf_counter + " ["
1594 + ( 100 * pfams_with_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1595 ForesterUtil.programMessage( surfacing.PRG_NAME,
1596 "Pfams with mapping to biological process: " + biological_process_counter
1598 + ( 100 * biological_process_counter / all_pfams_encountered.size() )
1600 ForesterUtil.programMessage( surfacing.PRG_NAME,
1601 "Pfams with mapping to molecular function: " + molecular_function_counter
1603 + ( 100 * molecular_function_counter / all_pfams_encountered.size() )
1605 ForesterUtil.programMessage( surfacing.PRG_NAME,
1606 "Pfams with mapping to cellular component: " + cellular_component_counter
1608 + ( 100 * cellular_component_counter / all_pfams_encountered.size() )
1610 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1611 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
1612 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1613 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter
1614 + " [" + ( 100 * pfams_without_mappings_counter / all_pfams_encountered.size() ) + "%]" );
1615 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1616 summary_writer.write( "# Pfams without mapping to proc. or func. : "
1617 + pfams_without_mappings_to_bp_or_mf_counter + " ["
1618 + ( 100 * pfams_without_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1619 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1620 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
1621 + ( 100 * pfams_with_mappings_counter / all_pfams_encountered.size() ) + "%]" );
1622 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1623 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
1624 + pfams_with_mappings_to_bp_or_mf_counter + " ["
1625 + ( 100 * pfams_with_mappings_to_bp_or_mf_counter / all_pfams_encountered.size() ) + "%]" );
1626 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1627 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
1628 + ( 100 * biological_process_counter / all_pfams_encountered.size() ) + "%]" );
1629 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1630 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
1631 + ( 100 * molecular_function_counter / all_pfams_encountered.size() ) + "%]" );
1632 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1633 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
1634 + ( 100 * cellular_component_counter / all_pfams_encountered.size() ) + "%]" );
1635 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
1636 summary_writer.close();
1638 catch ( final IOException e ) {
1639 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
1643 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1644 final File output_dir,
1645 final GenomeWideCombinableDomains gwcd,
1647 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1648 File dc_outfile_dot = new File( input_file_properties[ i ][ 0 ]
1649 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1650 if ( output_dir != null ) {
1651 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1653 checkForOutputFileWriteability( dc_outfile_dot );
1654 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1656 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1657 for( final BinaryDomainCombination bdc : binary_combinations ) {
1658 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1660 out_dot.write( SurfacingConstants.NL );
1664 catch ( final IOException e ) {
1665 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1667 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote binary domain combination for \""
1668 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1669 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile_dot + "\"" );
1672 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1673 final CharacterStateMatrix.GainLossStates state,
1674 final String filename,
1675 final String indentifier_characters_separator,
1676 final String character_separator,
1677 final Map<String, String> descriptions ) {
1678 final File outfile = new File( filename );
1679 checkForOutputFileWriteability( outfile );
1680 final SortedSet<String> sorted_ids = new TreeSet<String>();
1681 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1682 sorted_ids.add( matrix.getIdentifier( i ) );
1685 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1686 for( final String id : sorted_ids ) {
1687 out.write( indentifier_characters_separator );
1688 out.write( "#" + id );
1689 out.write( indentifier_characters_separator );
1690 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1692 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1693 if ( ( matrix.getState( id, c ) == state )
1694 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1695 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1696 out.write( matrix.getCharacter( c ) );
1697 if ( ( descriptions != null ) && !descriptions.isEmpty()
1698 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
1700 out.write( descriptions.get( matrix.getCharacter( c ) ) );
1702 out.write( character_separator );
1709 catch ( final IOException e ) {
1710 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1712 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1715 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1716 final CharacterStateMatrix.GainLossStates state,
1717 final String filename,
1718 final String indentifier_characters_separator,
1719 final String character_separator,
1720 final BinaryDomainCombination.OutputFormat bc_output_format ) {
1721 final File outfile = new File( filename );
1722 checkForOutputFileWriteability( outfile );
1723 final SortedSet<String> sorted_ids = new TreeSet<String>();
1724 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1725 sorted_ids.add( matrix.getIdentifier( i ) );
1728 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1729 for( final String id : sorted_ids ) {
1730 out.write( indentifier_characters_separator );
1731 out.write( "#" + id );
1732 out.write( indentifier_characters_separator );
1733 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1735 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1736 if ( ( matrix.getState( id, c ) == state )
1737 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1738 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1739 BinaryDomainCombination bdc = null;
1741 bdc = BasicBinaryDomainCombination.createInstance( matrix.getCharacter( c ) );
1743 catch ( final Exception e ) {
1744 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1746 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
1747 out.write( character_separator );
1754 catch ( final IOException e ) {
1755 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1757 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1760 public static void writeBinaryStatesMatrixToList( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
1761 final Map<GoId, GoTerm> go_id_to_term_map,
1762 final GoNameSpace go_namespace_limit,
1763 final boolean domain_combinations,
1764 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1765 final CharacterStateMatrix.GainLossStates state,
1766 final String filename,
1767 final String indentifier_characters_separator,
1768 final String character_separator,
1769 final String title_for_html,
1770 final String prefix_for_html,
1771 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
1772 final SortedSet<String> all_pfams_encountered,
1773 final SortedSet<String> pfams_gained_or_lost,
1774 final String suffix_for_per_node_events_file ) {
1775 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1776 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
1778 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
1779 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
1781 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1782 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
1784 final File outfile = new File( filename );
1785 checkForOutputFileWriteability( outfile );
1786 final SortedSet<String> sorted_ids = new TreeSet<String>();
1787 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1788 sorted_ids.add( matrix.getIdentifier( i ) );
1791 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
1792 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
1793 domain_combinations,
1796 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1797 File per_node_go_mapped_domain_gain_loss_outfile = null;
1798 int per_node_counter = 0;
1799 out.write( "<html>" );
1800 out.write( SurfacingConstants.NL );
1801 addHtmlHead( out, title_for_html );
1802 out.write( SurfacingConstants.NL );
1803 out.write( "<body>" );
1804 out.write( SurfacingConstants.NL );
1805 out.write( "<h1>" );
1806 out.write( SurfacingConstants.NL );
1807 out.write( title_for_html );
1808 out.write( SurfacingConstants.NL );
1809 out.write( "</h1>" );
1810 out.write( SurfacingConstants.NL );
1811 out.write( "<table>" );
1812 out.write( SurfacingConstants.NL );
1813 for( final String id : sorted_ids ) {
1814 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1815 if ( matcher.matches() ) {
1818 out.write( "<tr>" );
1819 out.write( "<td>" );
1820 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
1821 out.write( "</td>" );
1822 out.write( "</tr>" );
1823 out.write( SurfacingConstants.NL );
1825 out.write( "</table>" );
1826 out.write( SurfacingConstants.NL );
1827 for( final String id : sorted_ids ) {
1828 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1829 if ( matcher.matches() ) {
1832 out.write( SurfacingConstants.NL );
1833 out.write( "<h2>" );
1834 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
1835 writeTaxonomyLinks( out, id );
1836 out.write( "</h2>" );
1837 out.write( SurfacingConstants.NL );
1838 out.write( "<table>" );
1839 out.write( SurfacingConstants.NL );
1840 out.write( "<tr>" );
1841 out.write( "<td><b>" );
1842 out.write( "Pfam domain(s)" );
1843 out.write( "</b></td><td><b>" );
1844 out.write( "GO term acc" );
1845 out.write( "</b></td><td><b>" );
1846 out.write( "GO term" );
1847 out.write( "</b></td><td><b>" );
1848 out.write( "GO namespace" );
1849 out.write( "</b></td>" );
1850 out.write( "</tr>" );
1851 out.write( SurfacingConstants.NL );
1852 out.write( "</tr>" );
1853 out.write( SurfacingConstants.NL );
1854 per_node_counter = 0;
1855 if ( matrix.getNumberOfCharacters() > 0 ) {
1856 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
1857 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
1858 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
1859 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
1860 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
1863 per_node_go_mapped_domain_gain_loss_outfile = null;
1864 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1866 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1868 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1869 if ( ( matrix.getState( id, c ) == state )
1870 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) || ( matrix
1871 .getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
1872 final String character = matrix.getCharacter( c );
1873 String domain_0 = "";
1874 String domain_1 = "";
1875 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
1876 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
1877 if ( s.length != 2 ) {
1878 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
1879 + character + "]" );
1885 domain_0 = character;
1887 writeDomainData( domain_id_to_go_ids_map,
1894 character_separator,
1895 domain_id_to_secondary_features_maps,
1897 all_pfams_encountered.add( domain_0 );
1898 if ( pfams_gained_or_lost != null ) {
1899 pfams_gained_or_lost.add( domain_0 );
1901 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
1902 all_pfams_encountered.add( domain_1 );
1903 if ( pfams_gained_or_lost != null ) {
1904 pfams_gained_or_lost.add( domain_1 );
1907 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1908 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
1915 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1916 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
1917 if ( per_node_counter < 1 ) {
1918 per_node_go_mapped_domain_gain_loss_outfile.delete();
1920 per_node_counter = 0;
1922 out.write( "</table>" );
1923 out.write( SurfacingConstants.NL );
1924 out.write( "<hr>" );
1925 out.write( SurfacingConstants.NL );
1926 } // for( final String id : sorted_ids ) {
1927 out.write( "</body>" );
1928 out.write( SurfacingConstants.NL );
1929 out.write( "</html>" );
1930 out.write( SurfacingConstants.NL );
1934 catch ( final IOException e ) {
1935 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1937 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
1940 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
1941 final File output_dir,
1942 final Writer per_genome_domain_promiscuity_statistics_writer,
1943 final GenomeWideCombinableDomains gwcd,
1945 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1946 File dc_outfile = new File( input_file_properties[ i ][ 0 ]
1947 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1948 if ( output_dir != null ) {
1949 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
1951 checkForOutputFileWriteability( dc_outfile );
1953 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
1954 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
1957 catch ( final IOException e ) {
1958 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1960 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
1962 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 0 ] + "\t" );
1963 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
1964 if ( stats.getN() < 2 ) {
1965 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
1968 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
1969 .sampleStandardDeviation() ) + "\t" );
1971 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
1972 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
1973 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
1974 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
1975 final SortedSet<DomainId> mpds = gwcd.getMostPromiscuosDomain();
1976 for( final DomainId mpd : mpds ) {
1977 per_genome_domain_promiscuity_statistics_writer.write( mpd.getId() + " " );
1979 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
1981 catch ( final IOException e ) {
1982 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1984 if ( input_file_properties[ i ].length == 3 ) {
1985 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
1986 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1987 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
1990 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
1991 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
1992 + dc_outfile + "\"" );
1996 private static void writeDomainData( final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
1997 final Map<GoId, GoTerm> go_id_to_term_map,
1998 final GoNameSpace go_namespace_limit,
2000 final String domain_0,
2001 final String domain_1,
2002 final String prefix_for_html,
2003 final String character_separator_for_non_html_output,
2004 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps,
2005 final Set<GoId> all_go_ids ) throws IOException {
2006 boolean any_go_annotation_present = false;
2007 boolean first_has_no_go = false;
2008 int domain_count = 2; // To distinguish between domains and binary domain combinations.
2009 if ( ForesterUtil.isEmpty( domain_1 ) ) {
2012 // The following has a difficult to understand logic.
2013 for( int d = 0; d < domain_count; ++d ) {
2014 List<GoId> go_ids = null;
2015 boolean go_annotation_present = false;
2017 final DomainId domain_id = new DomainId( domain_0 );
2018 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2019 go_annotation_present = true;
2020 any_go_annotation_present = true;
2021 go_ids = domain_id_to_go_ids_map.get( domain_id );
2024 first_has_no_go = true;
2028 final DomainId domain_id = new DomainId( domain_1 );
2029 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2030 go_annotation_present = true;
2031 any_go_annotation_present = true;
2032 go_ids = domain_id_to_go_ids_map.get( domain_id );
2035 if ( go_annotation_present ) {
2036 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
2037 for( final GoId go_id : go_ids ) {
2038 out.write( "<tr>" );
2041 writeDomainIdsToHtml( out,
2045 domain_id_to_secondary_features_maps );
2048 out.write( "<td></td>" );
2050 if ( !go_id_to_term_map.containsKey( go_id ) ) {
2051 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
2053 final GoTerm go_term = go_id_to_term_map.get( go_id );
2054 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
2055 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
2056 final String go_id_str = go_id.getId();
2057 out.write( "<td>" );
2058 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
2059 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
2060 out.write( "</td><td>" );
2061 out.write( go_term.getName() );
2062 if ( domain_count == 2 ) {
2063 out.write( " (" + d + ")" );
2065 out.write( "</td><td>" );
2066 // out.write( top );
2067 // out.write( "</td><td>" );
2069 out.write( go_term.getGoNameSpace().toShortString() );
2071 out.write( "</td>" );
2072 if ( all_go_ids != null ) {
2073 all_go_ids.add( go_id );
2077 out.write( "<td>" );
2078 out.write( "</td><td>" );
2079 out.write( "</td><td>" );
2080 out.write( "</td><td>" );
2081 out.write( "</td>" );
2083 out.write( "</tr>" );
2084 out.write( SurfacingConstants.NL );
2087 } // for( int d = 0; d < domain_count; ++d )
2088 if ( !any_go_annotation_present ) {
2089 out.write( "<tr>" );
2090 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
2091 out.write( "<td>" );
2092 out.write( "</td><td>" );
2093 out.write( "</td><td>" );
2094 out.write( "</td><td>" );
2095 out.write( "</td>" );
2096 out.write( "</tr>" );
2097 out.write( SurfacingConstants.NL );
2101 private static void writeDomainIdsToHtml( final Writer out,
2102 final String domain_0,
2103 final String domain_1,
2104 final String prefix_for_detailed_html,
2105 final Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps )
2106 throws IOException {
2107 out.write( "<td>" );
2108 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
2109 out.write( prefix_for_detailed_html );
2112 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
2113 out.write( "</td>" );
2116 public static DescriptiveStatistics writeDomainSimilaritiesToFile( final StringBuilder html_desc,
2117 final StringBuilder html_title,
2118 final Writer single_writer,
2119 Map<Character, Writer> split_writers,
2120 final SortedSet<DomainSimilarity> similarities,
2121 final boolean treat_as_binary,
2122 final List<Species> species_order,
2123 final PrintableDomainSimilarity.PRINT_OPTION print_option,
2124 final DomainSimilarity.DomainSimilaritySortField sort_field,
2125 final DomainSimilarity.DomainSimilarityScoring scoring,
2126 final boolean verbose ) throws IOException {
2127 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
2128 String histogram_title = null;
2129 switch ( sort_field ) {
2130 case ABS_MAX_COUNTS_DIFFERENCE:
2131 if ( treat_as_binary ) {
2132 histogram_title = "absolute counts difference:";
2135 histogram_title = "absolute (maximal) counts difference:";
2138 case MAX_COUNTS_DIFFERENCE:
2139 if ( treat_as_binary ) {
2140 histogram_title = "counts difference:";
2143 histogram_title = "(maximal) counts difference:";
2147 histogram_title = "score mean:";
2150 histogram_title = "score minimum:";
2153 histogram_title = "score maximum:";
2155 case MAX_DIFFERENCE:
2156 if ( treat_as_binary ) {
2157 histogram_title = "difference:";
2160 histogram_title = "(maximal) difference:";
2164 histogram_title = "score mean:";
2167 histogram_title = "score standard deviation:";
2170 histogram_title = "species number:";
2173 throw new AssertionError( "Unknown sort field: " + sort_field );
2175 for( final DomainSimilarity similarity : similarities ) {
2176 switch ( sort_field ) {
2177 case ABS_MAX_COUNTS_DIFFERENCE:
2178 stats.addValue( Math.abs( similarity.getMaximalDifferenceInCounts() ) );
2180 case MAX_COUNTS_DIFFERENCE:
2181 stats.addValue( similarity.getMaximalDifferenceInCounts() );
2184 stats.addValue( similarity.getMeanSimilarityScore() );
2187 stats.addValue( similarity.getMinimalSimilarityScore() );
2190 stats.addValue( similarity.getMaximalSimilarityScore() );
2192 case MAX_DIFFERENCE:
2193 stats.addValue( similarity.getMaximalDifference() );
2196 stats.addValue( similarity.getMeanSimilarityScore() );
2199 stats.addValue( similarity.getStandardDeviationOfSimilarityScore() );
2202 stats.addValue( similarity.getSpecies().size() );
2205 throw new AssertionError( "Unknown sort field: " + sort_field );
2209 // final HistogramData[] hists = new HistogramData[ 1 ];
2212 // List<HistogramDataItem> data_items = new
2213 // ArrayList<HistogramDataItem>();
2214 // double[] values = stats.getDataAsDoubleArray();
2215 // for( int i = 0; i < values.length; i++ ) {
2216 // HistogramDataItem data_item = new BasicHistogramDataItem( "", values[
2218 // data_items.add( data_item );
2222 // HistogramData hd0 = new HistogramData( "name",
2230 // hists[ 0 ] = hd0;
2232 // final HistogramsFrame hf = new HistogramsFrame( hists );
2233 // hf.setVisible( true );
2235 AsciiHistogram histo = null;
2236 if ( stats.getMin() < stats.getMin() ) {
2237 histo = new AsciiHistogram( stats, histogram_title );
2240 if ( histo != null ) {
2241 System.out.println( histo.toStringBuffer( 20, '|', 40, 5 ) );
2243 System.out.println();
2244 System.out.println( "N : " + stats.getN() );
2245 System.out.println( "Min : " + stats.getMin() );
2246 System.out.println( "Max : " + stats.getMax() );
2247 System.out.println( "Mean : " + stats.arithmeticMean() );
2248 if ( stats.getN() > 1 ) {
2249 System.out.println( "SD : " + stats.sampleStandardDeviation() );
2252 System.out.println( "SD : n/a" );
2254 System.out.println( "Median : " + stats.median() );
2255 if ( stats.getN() > 1 ) {
2256 System.out.println( "Pearsonian skewness : " + stats.pearsonianSkewness() );
2259 System.out.println( "Pearsonian skewness : n/a" );
2262 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
2263 split_writers = new HashMap<Character, Writer>();
2264 split_writers.put( '_', single_writer );
2266 switch ( print_option ) {
2267 case SIMPLE_TAB_DELIMITED:
2270 for( final Character key : split_writers.keySet() ) {
2271 final Writer w = split_writers.get( key );
2272 w.write( "<html>" );
2273 w.write( SurfacingConstants.NL );
2275 addHtmlHead( w, "DCs (" + html_title + ") " + key.toString().toUpperCase() );
2278 addHtmlHead( w, "DCs (" + html_title + ")" );
2280 w.write( SurfacingConstants.NL );
2281 w.write( "<body>" );
2282 w.write( SurfacingConstants.NL );
2283 w.write( html_desc.toString() );
2284 w.write( SurfacingConstants.NL );
2287 w.write( SurfacingConstants.NL );
2288 w.write( "<tt><pre>" );
2289 w.write( SurfacingConstants.NL );
2290 if ( histo != null ) {
2291 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
2292 w.write( SurfacingConstants.NL );
2294 w.write( "</pre></tt>" );
2295 w.write( SurfacingConstants.NL );
2296 w.write( "<table>" );
2297 w.write( SurfacingConstants.NL );
2298 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
2299 w.write( SurfacingConstants.NL );
2300 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
2301 w.write( SurfacingConstants.NL );
2302 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
2303 w.write( SurfacingConstants.NL );
2304 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
2305 w.write( SurfacingConstants.NL );
2306 if ( stats.getN() > 1 ) {
2307 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
2310 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
2312 w.write( SurfacingConstants.NL );
2313 w.write( "<tr><td>Median: </td><td>" + stats.median() + "</td></tr>" );
2314 w.write( SurfacingConstants.NL );
2315 if ( stats.getN() > 1 ) {
2316 w.write( "<tr><td>Pearsonian skewness: </td><td>" + stats.pearsonianSkewness() + "</td></tr>" );
2319 w.write( "<tr><td>Pearsonian skewness: </td><td>n/a</td></tr>" );
2321 w.write( SurfacingConstants.NL );
2322 w.write( "</table>" );
2323 w.write( SurfacingConstants.NL );
2325 w.write( SurfacingConstants.NL );
2327 w.write( SurfacingConstants.NL );
2329 w.write( SurfacingConstants.NL );
2330 w.write( "<table>" );
2331 w.write( SurfacingConstants.NL );
2335 for( final Writer w : split_writers.values() ) {
2336 w.write( SurfacingConstants.NL );
2338 for( final DomainSimilarity similarity : similarities ) {
2339 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2340 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
2342 if ( single_writer != null ) {
2343 single_writer.write( similarity.toStringBuffer( print_option ).toString() );
2346 Writer local_writer = split_writers.get( ( similarity.getDomainId().getId().charAt( 0 ) + "" )
2347 .toLowerCase().charAt( 0 ) );
2348 if ( local_writer == null ) {
2349 local_writer = split_writers.get( '0' );
2351 local_writer.write( similarity.toStringBuffer( print_option ).toString() );
2353 for( final Writer w : split_writers.values() ) {
2354 w.write( SurfacingConstants.NL );
2357 switch ( print_option ) {
2359 for( final Writer w : split_writers.values() ) {
2360 w.write( SurfacingConstants.NL );
2361 w.write( "</table>" );
2362 w.write( SurfacingConstants.NL );
2363 w.write( "</font>" );
2364 w.write( SurfacingConstants.NL );
2365 w.write( "</body>" );
2366 w.write( SurfacingConstants.NL );
2367 w.write( "</html>" );
2368 w.write( SurfacingConstants.NL );
2372 for( final Writer w : split_writers.values() ) {
2378 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
2379 final String domain_0,
2380 final String domain_1 ) throws IOException {
2381 individual_files_writer.write( domain_0 );
2382 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2383 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2384 individual_files_writer.write( domain_1 );
2385 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2389 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
2390 final String filename,
2391 final Format format ) {
2392 final File outfile = new File( filename );
2393 checkForOutputFileWriteability( outfile );
2395 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
2396 matrix.toWriter( out, format );
2400 catch ( final IOException e ) {
2401 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2403 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
2406 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
2407 checkForOutputFileWriteability( matrix_outfile );
2409 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
2410 for( final DistanceMatrix distance_matrix : matrices ) {
2411 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
2412 out.write( ForesterUtil.LINE_SEPARATOR );
2417 catch ( final IOException e ) {
2418 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2420 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
2423 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
2425 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
2426 for( final String pfam : pfams ) {
2427 writer.write( pfam );
2428 writer.write( ForesterUtil.LINE_SEPARATOR );
2431 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote " + pfams.size() + " pfams to [" + outfile_name
2434 catch ( final IOException e ) {
2435 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2439 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
2440 final PhylogenyWriter writer = new PhylogenyWriter();
2442 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
2444 catch ( final IOException e ) {
2445 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
2448 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
2451 public static void writeTaxonomyLinks( final Writer writer, final String species ) throws IOException {
2452 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
2453 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( species );
2454 writer.write( " [" );
2455 if ( matcher.matches() ) {
2456 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_LINK + species
2457 + "\" target=\"taxonomy_window\">uniprot</a>" );
2460 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
2461 + "\" target=\"taxonomy_window\">eol</a>" );
2462 writer.write( "|" );
2463 writer.write( "<a href=\"" + SurfacingConstants.TOL_LINK + species
2464 + "\" target=\"taxonomy_window\">tol</a>" );
2466 writer.write( "]" );
2470 private static void writeToNexus( final String outfile_name,
2471 final CharacterStateMatrix<BinaryStates> matrix,
2472 final Phylogeny phylogeny ) {
2473 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
2474 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
2477 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
2478 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
2479 phylogenies.add( phylogeny );
2481 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
2482 w.write( NexusConstants.NEXUS );
2483 w.write( ForesterUtil.LINE_SEPARATOR );
2484 my_matrix.writeNexusTaxaBlock( w );
2485 my_matrix.writeNexusBinaryChractersBlock( w );
2486 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
2489 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
2491 catch ( final IOException e ) {
2492 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2496 private static void writeToNexus( final String outfile_name,
2497 final DomainParsimonyCalculator domain_parsimony,
2498 final Phylogeny phylogeny ) {
2499 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
2500 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
2502 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
2503 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
2507 public static void domainsPerProteinsStatistics( final String genome,
2508 final List<Protein> protein_list,
2509 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
2510 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
2511 final SortedSet<String> domains_which_are_always_single,
2512 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
2513 final SortedSet<String> domains_which_never_single,
2514 final Writer writer ) {
2515 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
2516 for( final Protein protein : protein_list ) {
2517 final int domains = protein.getNumberOfProteinDomains();
2518 //System.out.println( domains );
2519 stats.addValue( domains );
2520 all_genomes_domains_per_potein_stats.addValue( domains );
2521 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
2522 all_genomes_domains_per_potein_histo.put( domains, 1 );
2525 all_genomes_domains_per_potein_histo.put( domains,
2526 1 + all_genomes_domains_per_potein_histo.get( domains ) );
2528 if ( domains == 1 ) {
2529 final String domain = protein.getProteinDomain( 0 ).getDomainId().getId();
2530 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
2531 if ( domains_which_never_single.contains( domain ) ) {
2532 domains_which_never_single.remove( domain );
2533 domains_which_are_sometimes_single_sometimes_not.add( domain );
2536 domains_which_are_always_single.add( domain );
2540 else if ( domains > 1 ) {
2541 for( final Domain d : protein.getProteinDomains() ) {
2542 final String domain = d.getDomainId().getId();
2543 // System.out.println( domain );
2544 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
2545 if ( domains_which_are_always_single.contains( domain ) ) {
2546 domains_which_are_always_single.remove( domain );
2547 domains_which_are_sometimes_single_sometimes_not.add( domain );
2550 domains_which_never_single.add( domain );
2557 writer.write( genome );
2558 writer.write( "\t" );
2559 if ( stats.getN() >= 1 ) {
2560 writer.write( stats.arithmeticMean() + "" );
2561 writer.write( "\t" );
2562 if ( stats.getN() >= 2 ) {
2563 writer.write( stats.sampleStandardDeviation() + "" );
2568 writer.write( "\t" );
2569 writer.write( stats.median() + "" );
2570 writer.write( "\t" );
2571 writer.write( stats.getN() + "" );
2572 writer.write( "\t" );
2573 writer.write( stats.getMin() + "" );
2574 writer.write( "\t" );
2575 writer.write( stats.getMax() + "" );
2578 writer.write( "\t" );
2579 writer.write( "\t" );
2580 writer.write( "\t" );
2581 writer.write( "0" );
2582 writer.write( "\t" );
2583 writer.write( "\t" );
2585 writer.write( "\n" );
2587 catch ( final IOException e ) {
2588 e.printStackTrace();