3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.surfacing;
29 import java.awt.Color;
30 import java.io.BufferedWriter;
32 import java.io.FileWriter;
33 import java.io.IOException;
34 import java.io.Writer;
35 import java.text.DecimalFormat;
36 import java.text.NumberFormat;
37 import java.util.ArrayList;
38 import java.util.Arrays;
39 import java.util.Collections;
40 import java.util.Comparator;
41 import java.util.HashMap;
42 import java.util.HashSet;
43 import java.util.Iterator;
44 import java.util.List;
46 import java.util.Map.Entry;
47 import java.util.PriorityQueue;
49 import java.util.SortedMap;
50 import java.util.SortedSet;
51 import java.util.TreeMap;
52 import java.util.TreeSet;
53 import java.util.regex.Matcher;
54 import java.util.regex.Pattern;
56 import org.forester.application.surfacing;
57 import org.forester.evoinference.distance.NeighborJoining;
58 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
61 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
62 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
63 import org.forester.evoinference.matrix.distance.BasicSymmetricalDistanceMatrix;
64 import org.forester.evoinference.matrix.distance.DistanceMatrix;
65 import org.forester.go.GoId;
66 import org.forester.go.GoNameSpace;
67 import org.forester.go.GoTerm;
68 import org.forester.go.PfamToGoMapping;
69 import org.forester.io.parsers.nexus.NexusConstants;
70 import org.forester.io.parsers.phyloxml.PhyloXmlUtil;
71 import org.forester.io.parsers.util.ParserUtils;
72 import org.forester.io.writers.PhylogenyWriter;
73 import org.forester.phylogeny.Phylogeny;
74 import org.forester.phylogeny.PhylogenyMethods;
75 import org.forester.phylogeny.PhylogenyNode;
76 import org.forester.phylogeny.PhylogenyNode.NH_CONVERSION_SUPPORT_VALUE_STYLE;
77 import org.forester.phylogeny.data.BinaryCharacters;
78 import org.forester.phylogeny.data.Confidence;
79 import org.forester.phylogeny.data.Taxonomy;
80 import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
81 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
82 import org.forester.protein.BasicDomain;
83 import org.forester.protein.BasicProtein;
84 import org.forester.protein.BinaryDomainCombination;
85 import org.forester.protein.Domain;
86 import org.forester.protein.Protein;
87 import org.forester.species.Species;
88 import org.forester.surfacing.DomainSimilarity.PRINT_OPTION;
89 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
90 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
91 import org.forester.util.AsciiHistogram;
92 import org.forester.util.BasicDescriptiveStatistics;
93 import org.forester.util.BasicTable;
94 import org.forester.util.BasicTableParser;
95 import org.forester.util.CommandLineArguments;
96 import org.forester.util.DescriptiveStatistics;
97 import org.forester.util.ForesterUtil;
98 import org.forester.util.TaxonomyColors;
99 import org.forester.util.TaxonomyGroups;
101 public final class SurfacingUtil {
103 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
104 private final static Map<String, String> _TAXCODE_HEXCOLORSTRING_MAP = new HashMap<String, String>();
105 private final static Map<String, String> _TAXCODE_TAXGROUP_MAP = new HashMap<String, String>();
106 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
109 public int compare( final Domain d1,
111 if ( d1.getPerDomainEvalue() < d2
112 .getPerDomainEvalue() ) {
116 .getPerDomainEvalue() > d2
117 .getPerDomainEvalue() ) {
121 return d1.compareTo( d2 );
125 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
127 private SurfacingUtil() {
128 // Hidden constructor.
131 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
132 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
133 final SortedMap<String, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
134 for( final String domain_id : all_cd.keySet() ) {
135 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
139 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
140 final SortedSet<String> domain_ids ) {
141 final SortedSet<String> domains = genome.getAllDomainIds();
142 for( final String domain : domains ) {
143 domain_ids.add( domain );
147 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
148 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
149 for( final DomainSimilarity similarity : similarities ) {
150 stats.addValue( similarity.getMeanSimilarityScore() );
155 public static void checkForOutputFileWriteability( final File outfile ) {
156 final String error = ForesterUtil.isWritableFile( outfile );
157 if ( !ForesterUtil.isEmpty( error ) ) {
158 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
162 public static void checkWriteabilityForPairwiseComparisons( final DomainSimilarity.PRINT_OPTION domain_similarity_print_option,
163 final String[][] input_file_properties,
164 final String automated_pairwise_comparison_suffix,
165 final File outdir ) {
166 for( int i = 0; i < input_file_properties.length; ++i ) {
167 for( int j = 0; j < i; ++j ) {
168 final String species_i = input_file_properties[ i ][ 1 ];
169 final String species_j = input_file_properties[ j ][ 1 ];
170 String pairwise_similarities_output_file_str = surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX + species_i
171 + "_" + species_j + automated_pairwise_comparison_suffix;
172 switch ( domain_similarity_print_option ) {
174 if ( !pairwise_similarities_output_file_str.endsWith( ".html" ) ) {
175 pairwise_similarities_output_file_str += ".html";
179 final String error = ForesterUtil
180 .isWritableFile( new File( outdir == null ? pairwise_similarities_output_file_str
181 : outdir + ForesterUtil.FILE_SEPARATOR + pairwise_similarities_output_file_str ) );
182 if ( !ForesterUtil.isEmpty( error ) ) {
183 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
189 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
190 final BinaryDomainCombination.DomainCombinationType dc_type,
191 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
192 final boolean get_gains ) {
193 final SortedSet<String> sorted_ids = new TreeSet<String>();
194 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
195 sorted_ids.add( matrix.getIdentifier( i ) );
197 for( final String id : sorted_ids ) {
198 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
199 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
201 && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
202 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
203 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
204 .obtainInstance( matrix.getCharacter( c ) ) );
206 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
207 all_binary_domains_combination_gained
208 .add( DirectedBinaryDomainCombination.obtainInstance( matrix.getCharacter( c ) ) );
211 all_binary_domains_combination_gained
212 .add( BasicBinaryDomainCombination.obtainInstance( matrix.getCharacter( c ) ) );
219 public static Map<String, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
220 final Map<String, List<GoId>> domain_id_to_go_ids_map = new HashMap<String, List<GoId>>( pfam_to_go_mappings
222 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
223 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
224 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
226 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
228 return domain_id_to_go_ids_map;
231 public static Map<String, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
233 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, '\t' );
234 final Map<String, Set<String>> map = new TreeMap<String, Set<String>>();
235 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
236 final String domain_id = primary_table.getValue( 0, r );
237 if ( !map.containsKey( domain_id ) ) {
238 map.put( domain_id, new HashSet<String>() );
240 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
245 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile,
246 final DistanceMatrix distance ) {
247 checkForOutputFileWriteability( nj_tree_outfile );
248 final NeighborJoining nj = NeighborJoining.createInstance();
249 final Phylogeny phylogeny = nj.execute( ( BasicSymmetricalDistanceMatrix ) distance );
250 phylogeny.setName( nj_tree_outfile.getName() );
251 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
255 public static StringBuilder createParametersAsString( final boolean ignore_dufs,
256 final double ie_value_max,
257 final double fs_e_value_max,
258 final int max_allowed_overlap,
259 final boolean no_engulfing_overlaps,
260 final File cutoff_scores_file,
261 final BinaryDomainCombination.DomainCombinationType dc_type ) {
262 final StringBuilder parameters_sb = new StringBuilder();
263 parameters_sb.append( "iE-value: " + ie_value_max );
264 parameters_sb.append( ", FS E-value: " + fs_e_value_max );
265 if ( cutoff_scores_file != null ) {
266 parameters_sb.append( ", Cutoff-scores-file: " + cutoff_scores_file );
269 parameters_sb.append( ", Cutoff-scores-file: not-set" );
271 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
272 parameters_sb.append( ", Max-overlap: " + max_allowed_overlap );
275 parameters_sb.append( ", Max-overlap: not-set" );
277 if ( no_engulfing_overlaps ) {
278 parameters_sb.append( ", Engulfing-overlaps: not-allowed" );
281 parameters_sb.append( ", Engulfing-overlaps: allowed" );
284 parameters_sb.append( ", Ignore-dufs: true" );
287 parameters_sb.append( ", Ignore-dufs: false" );
289 parameters_sb.append( ", DC type (if applicable): " + dc_type );
290 return parameters_sb;
293 public static void createSplitWriters( final File out_dir,
294 final String my_outfile,
295 final Map<Character, Writer> split_writers )
297 split_writers.put( 'a',
298 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
299 + "_domains_A.html" ) ) );
300 split_writers.put( 'b',
301 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
302 + "_domains_B.html" ) ) );
303 split_writers.put( 'c',
304 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
305 + "_domains_C.html" ) ) );
306 split_writers.put( 'd',
307 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
308 + "_domains_D.html" ) ) );
309 split_writers.put( 'e',
310 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
311 + "_domains_E.html" ) ) );
312 split_writers.put( 'f',
313 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
314 + "_domains_F.html" ) ) );
315 split_writers.put( 'g',
316 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
317 + "_domains_G.html" ) ) );
318 split_writers.put( 'h',
319 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
320 + "_domains_H.html" ) ) );
321 split_writers.put( 'i',
322 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
323 + "_domains_I.html" ) ) );
324 split_writers.put( 'j',
325 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
326 + "_domains_J.html" ) ) );
327 split_writers.put( 'k',
328 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
329 + "_domains_K.html" ) ) );
330 split_writers.put( 'l',
331 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
332 + "_domains_L.html" ) ) );
333 split_writers.put( 'm',
334 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
335 + "_domains_M.html" ) ) );
336 split_writers.put( 'n',
337 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
338 + "_domains_N.html" ) ) );
339 split_writers.put( 'o',
340 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
341 + "_domains_O.html" ) ) );
342 split_writers.put( 'p',
343 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
344 + "_domains_P.html" ) ) );
345 split_writers.put( 'q',
346 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
347 + "_domains_Q.html" ) ) );
348 split_writers.put( 'r',
349 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
350 + "_domains_R.html" ) ) );
351 split_writers.put( 's',
352 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
353 + "_domains_S.html" ) ) );
354 split_writers.put( 't',
355 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
356 + "_domains_T.html" ) ) );
357 split_writers.put( 'u',
358 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
359 + "_domains_U.html" ) ) );
360 split_writers.put( 'v',
361 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
362 + "_domains_V.html" ) ) );
363 split_writers.put( 'w',
364 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
365 + "_domains_W.html" ) ) );
366 split_writers.put( 'x',
367 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
368 + "_domains_X.html" ) ) );
369 split_writers.put( 'y',
370 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
371 + "_domains_Y.html" ) ) );
372 split_writers.put( 'z',
373 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
374 + "_domains_Z.html" ) ) );
375 split_writers.put( '0',
376 new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
377 + "_domains_0.html" ) ) );
380 public static Map<String, Integer> createTaxCodeToIdMap( final Phylogeny phy ) {
381 final Map<String, Integer> m = new HashMap<String, Integer>();
382 for( final PhylogenyNodeIterator iter = phy.iteratorExternalForward(); iter.hasNext(); ) {
383 final PhylogenyNode n = iter.next();
384 if ( n.getNodeData().isHasTaxonomy() ) {
385 final Taxonomy t = n.getNodeData().getTaxonomy();
386 final String c = t.getTaxonomyCode();
387 if ( !ForesterUtil.isEmpty( c ) ) {
388 if ( n.getNodeData().getTaxonomy() == null ) {
389 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
391 final String id = n.getNodeData().getTaxonomy().getIdentifier().getValue();
392 if ( ForesterUtil.isEmpty( id ) ) {
393 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
395 if ( m.containsKey( c ) ) {
396 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy code " + c + " is not unique" );
398 final int iid = Integer.valueOf( id );
399 if ( m.containsValue( iid ) ) {
400 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy id " + iid + " is not unique" );
406 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy for node " + n );
412 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
413 final Detailedness detailedness ) {
414 for( final DomainSimilarity domain_similarity : domain_similarities ) {
415 if ( domain_similarity instanceof DomainSimilarity ) {
416 final DomainSimilarity printable_domain_similarity = domain_similarity;
417 printable_domain_similarity.setDetailedness( detailedness );
422 public static void doit( final List<Protein> proteins,
423 final List<String> query_domain_ids_nc_order,
425 final String separator,
426 final String limit_to_species,
427 final Map<String, List<Integer>> average_protein_lengths_by_dc )
429 for( final Protein protein : proteins ) {
430 if ( ForesterUtil.isEmpty( limit_to_species )
431 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
432 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
433 out.write( protein.getSpecies().getSpeciesId() );
434 out.write( separator );
435 out.write( protein.getProteinId().getId() );
436 out.write( separator );
438 final Set<String> visited_domain_ids = new HashSet<String>();
439 boolean first = true;
440 for( final Domain domain : protein.getProteinDomains() ) {
441 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
442 visited_domain_ids.add( domain.getDomainId() );
449 out.write( domain.getDomainId() );
451 out.write( "" + domain.getTotalCount() );
456 out.write( separator );
457 if ( !( ForesterUtil.isEmpty( protein.getDescription() )
458 || protein.getDescription().equals( SurfacingConstants.NONE ) ) ) {
459 out.write( protein.getDescription() );
461 out.write( separator );
462 if ( !( ForesterUtil.isEmpty( protein.getAccession() )
463 || protein.getAccession().equals( SurfacingConstants.NONE ) ) ) {
464 out.write( protein.getAccession() );
466 out.write( SurfacingConstants.NL );
473 public static void domainsPerProteinsStatistics( final String genome,
474 final List<Protein> protein_list,
475 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
476 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
477 final SortedSet<String> domains_which_are_always_single,
478 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
479 final SortedSet<String> domains_which_never_single,
480 final Writer writer ) {
481 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
482 for( final Protein protein : protein_list ) {
483 final int domains = protein.getNumberOfProteinDomains();
484 //System.out.println( domains );
485 stats.addValue( domains );
486 all_genomes_domains_per_potein_stats.addValue( domains );
487 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
488 all_genomes_domains_per_potein_histo.put( domains, 1 );
491 all_genomes_domains_per_potein_histo.put( domains,
492 1 + all_genomes_domains_per_potein_histo.get( domains ) );
494 if ( domains == 1 ) {
495 final String domain = protein.getProteinDomain( 0 ).getDomainId();
496 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
497 if ( domains_which_never_single.contains( domain ) ) {
498 domains_which_never_single.remove( domain );
499 domains_which_are_sometimes_single_sometimes_not.add( domain );
502 domains_which_are_always_single.add( domain );
506 else if ( domains > 1 ) {
507 for( final Domain d : protein.getProteinDomains() ) {
508 final String domain = d.getDomainId();
509 // System.out.println( domain );
510 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
511 if ( domains_which_are_always_single.contains( domain ) ) {
512 domains_which_are_always_single.remove( domain );
513 domains_which_are_sometimes_single_sometimes_not.add( domain );
516 domains_which_never_single.add( domain );
523 writer.write( genome );
524 writer.write( "\t" );
525 if ( stats.getN() >= 1 ) {
526 writer.write( stats.arithmeticMean() + "" );
527 writer.write( "\t" );
528 if ( stats.getN() >= 2 ) {
529 writer.write( stats.sampleStandardDeviation() + "" );
534 writer.write( "\t" );
535 writer.write( stats.median() + "" );
536 writer.write( "\t" );
537 writer.write( stats.getN() + "" );
538 writer.write( "\t" );
539 writer.write( stats.getMin() + "" );
540 writer.write( "\t" );
541 writer.write( stats.getMax() + "" );
544 writer.write( "\t" );
545 writer.write( "\t" );
546 writer.write( "\t" );
548 writer.write( "\t" );
549 writer.write( "\t" );
551 writer.write( "\n" );
553 catch ( final IOException e ) {
558 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
559 final int number_of_genomes,
560 final DomainLengthsTable domain_lengths_table,
563 final DecimalFormat df = new DecimalFormat( "#.00" );
564 checkForOutputFileWriteability( outfile );
565 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
566 out.write( "MEAN BASED STATISTICS PER SPECIES" );
567 out.write( ForesterUtil.LINE_SEPARATOR );
568 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
569 out.write( ForesterUtil.LINE_SEPARATOR );
570 out.write( ForesterUtil.LINE_SEPARATOR );
571 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
572 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
573 out.write( ForesterUtil.LINE_SEPARATOR );
574 for( final DomainLengths domain_lengths : domain_lengths_list ) {
575 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
576 if ( species_list.size() > 0 ) {
577 out.write( domain_lengths.getDomainId() + "\t" );
578 for( final Species species : species_list ) {
579 out.write( species + "\t" );
581 out.write( ForesterUtil.LINE_SEPARATOR );
584 out.write( ForesterUtil.LINE_SEPARATOR );
585 out.write( ForesterUtil.LINE_SEPARATOR );
586 out.write( "OUTLIER SPECIES (Z 1.0)" );
587 out.write( ForesterUtil.LINE_SEPARATOR );
588 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
589 .calculateMeanBasedStatisticsForAllSpecies();
590 out.write( stats_for_all_species.asSummary() );
591 out.write( ForesterUtil.LINE_SEPARATOR );
592 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
593 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
594 out.write( ForesterUtil.LINE_SEPARATOR );
595 final double population_sd = stats_for_all_species.sampleStandardDeviation();
596 final double population_mean = stats_for_all_species.arithmeticMean();
597 for( final Species species : domain_lengths_table.getSpecies() ) {
598 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
599 final double z = ( x - population_mean ) / population_sd;
600 out.write( species + "\t" + z );
601 out.write( ForesterUtil.LINE_SEPARATOR );
603 out.write( ForesterUtil.LINE_SEPARATOR );
604 for( final Species species : domain_lengths_table.getSpecies() ) {
605 final DescriptiveStatistics stats_for_species = domain_lengths_table
606 .calculateMeanBasedStatisticsForSpecies( species );
607 final double x = stats_for_species.arithmeticMean();
608 final double z = ( x - population_mean ) / population_sd;
609 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
610 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
611 out.write( ForesterUtil.LINE_SEPARATOR );
619 * Warning: This side-effects 'all_bin_domain_combinations_encountered'!
623 * @param all_bin_domain_combinations_changed
624 * @param sum_of_all_domains_encountered
625 * @param all_bin_domain_combinations_encountered
626 * @param is_gains_analysis
627 * @param protein_length_stats_by_dc
628 * @throws IOException
630 public static void executeFitchGainsAnalysis( final File output_file,
631 final List<BinaryDomainCombination> all_bin_domain_combinations_changed,
632 final int sum_of_all_domains_encountered,
633 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered,
634 final boolean is_gains_analysis )
636 checkForOutputFileWriteability( output_file );
637 final Writer out = ForesterUtil.createBufferedWriter( output_file );
638 final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
639 .listToSortedCountsMap( all_bin_domain_combinations_changed );
640 final SortedSet<String> all_domains_in_combination_changed_more_than_once = new TreeSet<String>();
641 final SortedSet<String> all_domains_in_combination_changed_only_once = new TreeSet<String>();
644 for( final Object bdc_object : bdc_to_counts.keySet() ) {
645 final BinaryDomainCombination bdc = ( BinaryDomainCombination ) bdc_object;
646 final int count = bdc_to_counts.get( bdc_object );
648 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "count < 1 " );
650 out.write( bdc + "\t" + count + ForesterUtil.LINE_SEPARATOR );
652 all_domains_in_combination_changed_more_than_once.add( bdc.getId0() );
653 all_domains_in_combination_changed_more_than_once.add( bdc.getId1() );
656 else if ( count == 1 ) {
657 all_domains_in_combination_changed_only_once.add( bdc.getId0() );
658 all_domains_in_combination_changed_only_once.add( bdc.getId1() );
662 final int all = all_bin_domain_combinations_encountered.size();
664 if ( !is_gains_analysis ) {
665 all_bin_domain_combinations_encountered.removeAll( all_bin_domain_combinations_changed );
666 never_lost = all_bin_domain_combinations_encountered.size();
667 for( final BinaryDomainCombination bdc : all_bin_domain_combinations_encountered ) {
668 out.write( bdc + "\t" + "0" + ForesterUtil.LINE_SEPARATOR );
671 if ( is_gains_analysis ) {
672 out.write( "Sum of all distinct domain combinations appearing once : " + one
673 + ForesterUtil.LINE_SEPARATOR );
674 out.write( "Sum of all distinct domain combinations appearing more than once : " + above_one
675 + ForesterUtil.LINE_SEPARATOR );
676 out.write( "Sum of all distinct domains in combinations apppearing only once : "
677 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
678 out.write( "Sum of all distinct domains in combinations apppearing more than once: "
679 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
682 out.write( "Sum of all distinct domain combinations never lost : " + never_lost
683 + ForesterUtil.LINE_SEPARATOR );
684 out.write( "Sum of all distinct domain combinations lost once : " + one
685 + ForesterUtil.LINE_SEPARATOR );
686 out.write( "Sum of all distinct domain combinations lost more than once : " + above_one
687 + ForesterUtil.LINE_SEPARATOR );
688 out.write( "Sum of all distinct domains in combinations lost only once : "
689 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
690 out.write( "Sum of all distinct domains in combinations lost more than once: "
691 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
693 out.write( "All binary combinations : " + all
694 + ForesterUtil.LINE_SEPARATOR );
695 out.write( "All domains : "
696 + sum_of_all_domains_encountered );
699 .programMessage( surfacing.PRG_NAME,
700 "Wrote fitch domain combination dynamics counts analysis to \"" + output_file + "\"" );
705 * @param all_binary_domains_combination_lost_fitch
706 * @param use_last_in_fitch_parsimony
707 * @param perform_dc_fich
708 * @param consider_directedness_and_adjacency_for_bin_combinations
709 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
710 * which were gained under unweighted (Fitch) parsimony.
712 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
713 final boolean radomize_fitch_parsimony,
714 final String outfile_name,
715 final DomainParsimonyCalculator domain_parsimony,
716 final Phylogeny phylogeny,
717 final Map<String, List<GoId>> domain_id_to_go_ids_map,
718 final Map<GoId, GoTerm> go_id_to_term_map,
719 final GoNameSpace go_namespace_limit,
720 final String parameters_str,
721 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
722 final SortedSet<String> positive_filter,
723 final boolean output_binary_domain_combinations_for_graphs,
724 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
725 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
726 final BinaryDomainCombination.DomainCombinationType dc_type,
727 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
728 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
729 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain,
730 final Map<String, Integer> tax_code_to_id_map,
731 final boolean write_to_nexus,
732 final boolean use_last_in_fitch_parsimony,
733 final boolean perform_dc_fich ) {
734 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
735 final String date_time = ForesterUtil.getCurrentDateTime();
736 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
737 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
738 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
739 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
740 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
741 if ( write_to_nexus ) {
742 writeToNexus( outfile_name, domain_parsimony, phylogeny );
746 Phylogeny local_phylogeny_l = phylogeny.copy();
747 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
748 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
751 domain_parsimony.executeDolloParsimonyOnDomainPresence();
753 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(),
754 outfile_name + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS,
756 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(),
757 outfile_name + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS,
759 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
760 CharacterStateMatrix.GainLossStates.GAIN,
761 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
763 ForesterUtil.LINE_SEPARATOR,
765 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
766 CharacterStateMatrix.GainLossStates.LOSS,
767 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
769 ForesterUtil.LINE_SEPARATOR,
771 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
773 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D,
775 ForesterUtil.LINE_SEPARATOR,
778 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
782 domain_parsimony.getGainLossMatrix(),
783 CharacterStateMatrix.GainLossStates.GAIN,
784 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
786 ForesterUtil.LINE_SEPARATOR,
787 "Dollo Parsimony | Gains | Domains",
789 domain_id_to_secondary_features_maps,
790 all_pfams_encountered,
791 all_pfams_gained_as_domains,
793 tax_code_to_id_map );
794 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
798 domain_parsimony.getGainLossMatrix(),
799 CharacterStateMatrix.GainLossStates.LOSS,
800 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
802 ForesterUtil.LINE_SEPARATOR,
803 "Dollo Parsimony | Losses | Domains",
805 domain_id_to_secondary_features_maps,
806 all_pfams_encountered,
807 all_pfams_lost_as_domains,
809 tax_code_to_id_map );
810 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
814 domain_parsimony.getGainLossMatrix(),
816 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
818 ForesterUtil.LINE_SEPARATOR,
819 "Dollo Parsimony | Present | Domains",
821 domain_id_to_secondary_features_maps,
822 all_pfams_encountered,
825 tax_code_to_id_map );
826 preparePhylogeny( local_phylogeny_l,
829 "Dollo parsimony on domain presence/absence",
830 "dollo_on_domains_" + outfile_name,
832 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l,
833 outfile_name + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
835 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
836 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
838 catch ( final IOException e ) {
840 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
842 if ( perform_dc_fich && ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) ) {
843 // FITCH DOMAIN COMBINATIONS
844 // -------------------------
845 local_phylogeny_l = phylogeny.copy();
846 String randomization = "no";
847 if ( radomize_fitch_parsimony ) {
849 .executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
850 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
853 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( use_last_in_fitch_parsimony );
855 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(),
857 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS,
859 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(),
861 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS,
863 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
864 CharacterStateMatrix.GainLossStates.GAIN,
865 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
867 ForesterUtil.LINE_SEPARATOR,
869 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
870 CharacterStateMatrix.GainLossStates.LOSS,
872 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
874 ForesterUtil.LINE_SEPARATOR,
876 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
879 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC,
881 ForesterUtil.LINE_SEPARATOR,
883 if ( all_binary_domains_combination_gained_fitch != null ) {
884 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony
885 .getGainLossMatrix(), dc_type, all_binary_domains_combination_gained_fitch, true );
887 if ( all_binary_domains_combination_lost_fitch != null ) {
888 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony
889 .getGainLossMatrix(), dc_type, all_binary_domains_combination_lost_fitch, false );
891 if ( output_binary_domain_combinations_for_graphs ) {
892 SurfacingUtil.writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
893 .getGainLossMatrix(),
896 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
898 ForesterUtil.LINE_SEPARATOR,
899 BinaryDomainCombination.OutputFormat.DOT );
902 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
906 domain_parsimony.getGainLossMatrix(),
907 CharacterStateMatrix.GainLossStates.GAIN,
908 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
910 ForesterUtil.LINE_SEPARATOR,
911 "Fitch Parsimony | Gains | Domain Combinations",
914 all_pfams_encountered,
915 all_pfams_gained_as_dom_combinations,
917 tax_code_to_id_map );
918 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
922 domain_parsimony.getGainLossMatrix(),
923 CharacterStateMatrix.GainLossStates.LOSS,
924 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
926 ForesterUtil.LINE_SEPARATOR,
927 "Fitch Parsimony | Losses | Domain Combinations",
930 all_pfams_encountered,
931 all_pfams_lost_as_dom_combinations,
933 tax_code_to_id_map );
934 // writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
935 // go_id_to_term_map,
936 // go_namespace_limit,
938 // domain_parsimony.getGainLossMatrix(),
940 // outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
942 // ForesterUtil.LINE_SEPARATOR,
943 // "Fitch Parsimony | Present | Domain Combinations",
946 // all_pfams_encountered,
948 // "_fitch_present_dc",
949 // tax_code_to_id_map );
950 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
953 all_pfams_encountered );
954 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX,
955 all_pfams_gained_as_domains );
956 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
957 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
958 all_pfams_gained_as_dom_combinations );
959 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX,
960 all_pfams_lost_as_dom_combinations );
961 preparePhylogeny( local_phylogeny_l,
964 "Fitch parsimony on binary domain combination presence/absence randomization: "
966 "fitch_on_binary_domain_combinations_" + outfile_name,
969 .writePhylogenyToFile( local_phylogeny_l,
971 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
972 calculateIndependentDomainCombinationGains( local_phylogeny_l,
974 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
976 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
978 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
980 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
981 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
982 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
983 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
984 protein_length_stats_by_dc,
985 domain_number_stats_by_dc,
986 domain_length_stats_by_domain );
990 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
991 final DomainParsimonyCalculator secondary_features_parsimony,
992 final Phylogeny phylogeny,
993 final String parameters_str,
994 final Map<Species, MappingResults> mapping_results_map,
995 final boolean use_last_in_fitch_parsimony ) {
996 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
997 final String date_time = ForesterUtil.getCurrentDateTime();
998 System.out.println();
999 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
1000 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
1002 Phylogeny local_phylogeny_copy = phylogeny.copy();
1003 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
1004 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(),
1005 outfile_name + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES,
1007 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(),
1009 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES,
1011 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1012 CharacterStateMatrix.GainLossStates.GAIN,
1014 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
1016 ForesterUtil.LINE_SEPARATOR,
1018 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1019 CharacterStateMatrix.GainLossStates.LOSS,
1021 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
1023 ForesterUtil.LINE_SEPARATOR,
1025 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
1028 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
1030 ForesterUtil.LINE_SEPARATOR,
1032 preparePhylogeny( local_phylogeny_copy,
1033 secondary_features_parsimony,
1035 "Dollo parsimony on secondary feature presence/absence",
1036 "dollo_on_secondary_features_" + outfile_name,
1039 .writePhylogenyToFile( local_phylogeny_copy,
1040 outfile_name + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
1041 // FITCH DOMAIN COMBINATIONS
1042 // -------------------------
1043 local_phylogeny_copy = phylogeny.copy();
1044 final String randomization = "no";
1045 secondary_features_parsimony
1046 .executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( use_last_in_fitch_parsimony );
1047 preparePhylogeny( local_phylogeny_copy,
1048 secondary_features_parsimony,
1050 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
1052 "fitch_on_binary_domain_combinations_" + outfile_name,
1055 .writePhylogenyToFile( local_phylogeny_copy,
1057 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
1058 calculateIndependentDomainCombinationGains( local_phylogeny_copy,
1060 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX,
1062 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX,
1064 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX,
1066 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX,
1067 outfile_name + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt",
1068 outfile_name + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt",
1075 public static void executePlusMinusAnalysis( final File output_file,
1076 final List<String> plus_minus_analysis_high_copy_base,
1077 final List<String> plus_minus_analysis_high_copy_target,
1078 final List<String> plus_minus_analysis_low_copy,
1079 final List<GenomeWideCombinableDomains> gwcd_list,
1080 final SortedMap<Species, List<Protein>> protein_lists_per_species,
1081 final Map<String, List<GoId>> domain_id_to_go_ids_map,
1082 final Map<GoId, GoTerm> go_id_to_term_map,
1083 final List<Object> plus_minus_analysis_numbers ) {
1084 final Set<String> all_spec = new HashSet<String>();
1085 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1086 all_spec.add( gwcd.getSpecies().getSpeciesId() );
1088 final File html_out_dom = new File( output_file + surfacing.PLUS_MINUS_DOM_SUFFIX_HTML );
1089 final File plain_out_dom = new File( output_file + surfacing.PLUS_MINUS_DOM_SUFFIX );
1090 final File html_out_dc = new File( output_file + surfacing.PLUS_MINUS_DC_SUFFIX_HTML );
1091 final File all_domains_go_ids_out_dom = new File( output_file + surfacing.PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX );
1092 final File passing_domains_go_ids_out_dom = new File( output_file
1093 + surfacing.PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX );
1094 final File proteins_file_base = new File( output_file + "" );
1095 final int min_diff = ( ( Integer ) plus_minus_analysis_numbers.get( 0 ) ).intValue();
1096 final double factor = ( ( Double ) plus_minus_analysis_numbers.get( 1 ) ).doubleValue();
1098 DomainCountsDifferenceUtil.calculateCopyNumberDifferences( gwcd_list,
1099 protein_lists_per_species,
1100 plus_minus_analysis_high_copy_base,
1101 plus_minus_analysis_high_copy_target,
1102 plus_minus_analysis_low_copy,
1108 domain_id_to_go_ids_map,
1110 all_domains_go_ids_out_dom,
1111 passing_domains_go_ids_out_dom,
1112 proteins_file_base );
1114 catch ( final IOException e ) {
1115 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1117 ForesterUtil.programMessage( surfacing.PRG_NAME,
1118 "Wrote plus minus domain analysis results to \"" + html_out_dom + "\"" );
1119 ForesterUtil.programMessage( surfacing.PRG_NAME,
1120 "Wrote plus minus domain analysis results to \"" + plain_out_dom + "\"" );
1121 ForesterUtil.programMessage( surfacing.PRG_NAME,
1122 "Wrote plus minus domain analysis results to \"" + html_out_dc + "\"" );
1123 ForesterUtil.programMessage( surfacing.PRG_NAME,
1124 "Wrote plus minus domain analysis based passing GO ids to \""
1125 + passing_domains_go_ids_out_dom + "\"" );
1126 ForesterUtil.programMessage( surfacing.PRG_NAME,
1127 "Wrote plus minus domain analysis based all GO ids to \""
1128 + all_domains_go_ids_out_dom + "\"" );
1131 public static void extractProteinNames( final List<Protein> proteins,
1132 final List<String> query_domain_ids_nc_order,
1134 final String separator,
1135 final String limit_to_species )
1136 throws IOException {
1137 for( final Protein protein : proteins ) {
1138 if ( ForesterUtil.isEmpty( limit_to_species )
1139 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1140 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1141 out.write( protein.getSpecies().getSpeciesId() );
1142 out.write( separator );
1143 out.write( protein.getProteinId().getId() );
1144 out.write( separator );
1146 final Set<String> visited_domain_ids = new HashSet<String>();
1147 boolean first = true;
1148 for( final Domain domain : protein.getProteinDomains() ) {
1149 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1150 visited_domain_ids.add( domain.getDomainId() );
1157 out.write( domain.getDomainId() );
1159 out.write( "" + domain.getTotalCount() );
1164 out.write( separator );
1165 if ( !( ForesterUtil.isEmpty( protein.getDescription() )
1166 || protein.getDescription().equals( SurfacingConstants.NONE ) ) ) {
1167 out.write( protein.getDescription() );
1169 out.write( separator );
1170 if ( !( ForesterUtil.isEmpty( protein.getAccession() )
1171 || protein.getAccession().equals( SurfacingConstants.NONE ) ) ) {
1172 out.write( protein.getAccession() );
1174 out.write( SurfacingConstants.NL );
1181 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
1182 final String domain_id,
1184 final String separator,
1185 final String limit_to_species,
1186 final double domain_e_cutoff )
1187 throws IOException {
1188 //System.out.println( "Per domain E-value: " + domain_e_cutoff );
1189 for( final Species species : protein_lists_per_species.keySet() ) {
1190 //System.out.println( species + ":" );
1191 for( final Protein protein : protein_lists_per_species.get( species ) ) {
1192 if ( ForesterUtil.isEmpty( limit_to_species )
1193 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1194 final List<Domain> domains = protein.getProteinDomains( domain_id );
1195 if ( domains.size() > 0 ) {
1196 out.write( protein.getSpecies().getSpeciesId() );
1197 out.write( separator );
1198 out.write( protein.getProteinId().getId() );
1199 out.write( separator );
1200 out.write( domain_id.toString() );
1201 out.write( separator );
1203 for( final Domain domain : domains ) {
1204 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1206 out.write( domain.getFrom() + "-" + domain.getTo() );
1207 if ( prev_to >= 0 ) {
1208 final int l = domain.getFrom() - prev_to;
1209 // System.out.println( l );
1211 prev_to = domain.getTo();
1215 out.write( separator );
1216 final List<Domain> domain_list = new ArrayList<Domain>();
1217 for( final Domain domain : protein.getProteinDomains() ) {
1218 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1219 domain_list.add( domain );
1222 final Domain domain_ary[] = new Domain[ domain_list.size() ];
1223 for( int i = 0; i < domain_list.size(); ++i ) {
1224 domain_ary[ i ] = domain_list.get( i );
1226 Arrays.sort( domain_ary, new DomainComparator( true ) );
1228 boolean first = true;
1229 for( final Domain domain : domain_ary ) {
1236 out.write( domain.getDomainId().toString() );
1237 out.write( ":" + domain.getFrom() + "-" + domain.getTo() );
1238 out.write( ":" + domain.getPerDomainEvalue() );
1241 if ( !( ForesterUtil.isEmpty( protein.getDescription() )
1242 || protein.getDescription().equals( SurfacingConstants.NONE ) ) ) {
1243 out.write( protein.getDescription() );
1245 out.write( separator );
1246 if ( !( ForesterUtil.isEmpty( protein.getAccession() )
1247 || protein.getAccession().equals( SurfacingConstants.NONE ) ) ) {
1248 out.write( protein.getAccession() );
1250 out.write( SurfacingConstants.NL );
1258 public static SortedSet<String> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
1259 final SortedSet<String> all_domains_ids = new TreeSet<String>();
1260 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1261 final Set<String> all_domains = gwcd.getAllDomainIds();
1262 // for( final Domain domain : all_domains ) {
1263 all_domains_ids.addAll( all_domains );
1266 return all_domains_ids;
1269 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
1270 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
1271 for( final Protein protein_domain_collection : protein_domain_collections ) {
1272 for( final Object name : protein_domain_collection.getProteinDomains() ) {
1273 final BasicDomain protein_domain = ( BasicDomain ) name;
1274 final String id = protein_domain.getDomainId();
1275 if ( map.containsKey( id ) ) {
1276 map.put( id, map.get( id ) + 1 );
1286 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
1287 final PhylogenyNodeIterator it = p.iteratorPostorder();
1289 while ( it.hasNext() ) {
1290 final PhylogenyNode n = it.next();
1291 if ( ForesterUtil.isEmpty( n.getName() )
1292 && ( !n.getNodeData().isHasTaxonomy()
1293 || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) )
1294 && ( !n.getNodeData().isHasTaxonomy()
1295 || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) ) {
1296 if ( n.getParent() != null ) {
1297 names.append( " " );
1298 names.append( n.getParent().getName() );
1300 final List l = n.getAllExternalDescendants();
1301 for( final Object object : l ) {
1302 System.out.println( l.toString() );
1310 public static void log( final String msg, final Writer w ) {
1313 w.write( ForesterUtil.LINE_SEPARATOR );
1315 catch ( final IOException e ) {
1316 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1320 public static Phylogeny[] obtainAndPreProcessIntrees( final File[] intree_files,
1321 final int number_of_genomes,
1322 final String[][] input_file_properties ) {
1323 final Phylogeny[] intrees = new Phylogeny[ intree_files.length ];
1325 for( final File intree_file : intree_files ) {
1326 Phylogeny intree = null;
1327 final String error = ForesterUtil.isReadableFile( intree_file );
1328 if ( !ForesterUtil.isEmpty( error ) ) {
1329 ForesterUtil.fatalError( surfacing.PRG_NAME,
1330 "cannot read input tree file [" + intree_file + "]: " + error );
1333 final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
1334 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
1335 if ( p_array.length < 1 ) {
1336 ForesterUtil.fatalError( surfacing.PRG_NAME,
1337 "file [" + intree_file
1338 + "] does not contain any phylogeny in phyloXML format" );
1340 else if ( p_array.length > 1 ) {
1341 ForesterUtil.fatalError( surfacing.PRG_NAME,
1342 "file [" + intree_file
1343 + "] contains more than one phylogeny in phyloXML format" );
1345 intree = p_array[ 0 ];
1347 catch ( final Exception e ) {
1348 ForesterUtil.fatalError( surfacing.PRG_NAME,
1349 "failed to read input tree from file [" + intree_file + "]: " + error );
1351 if ( ( intree == null ) || intree.isEmpty() ) {
1352 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
1354 if ( !intree.isRooted() ) {
1355 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
1357 final StringBuilder parent_names = new StringBuilder();
1358 final int nodes_lacking_name = getNumberOfNodesLackingName( intree, parent_names );
1359 if ( nodes_lacking_name > 0 ) {
1360 ForesterUtil.fatalError( surfacing.PRG_NAME,
1361 "input tree [" + intree_file + "] has " + nodes_lacking_name
1362 + " node(s) lacking a name [parent names:" + parent_names + "]" );
1364 preparePhylogenyForParsimonyAnalyses( intree, input_file_properties );
1365 if ( !intree.isCompletelyBinary() ) {
1366 ForesterUtil.printWarningMessage( surfacing.PRG_NAME,
1367 "input tree [" + intree_file + "] is not completely binary" );
1369 intrees[ i++ ] = intree;
1374 public static Phylogeny obtainFirstIntree( final File intree_file ) {
1375 Phylogeny intree = null;
1376 final String error = ForesterUtil.isReadableFile( intree_file );
1377 if ( !ForesterUtil.isEmpty( error ) ) {
1378 ForesterUtil.fatalError( surfacing.PRG_NAME,
1379 "cannot read input tree file [" + intree_file + "]: " + error );
1382 final Phylogeny[] phys = ParserBasedPhylogenyFactory.getInstance()
1383 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
1384 if ( phys.length < 1 ) {
1386 .fatalError( surfacing.PRG_NAME,
1387 "file [" + intree_file + "] does not contain any phylogeny in phyloXML format" );
1389 else if ( phys.length > 1 ) {
1391 .fatalError( surfacing.PRG_NAME,
1392 "file [" + intree_file + "] contains more than one phylogeny in phyloXML format" );
1396 catch ( final Exception e ) {
1397 ForesterUtil.fatalError( surfacing.PRG_NAME,
1398 "failed to read input tree from file [" + intree_file + "]: " + error );
1400 if ( ( intree == null ) || intree.isEmpty() ) {
1401 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
1403 if ( !intree.isRooted() ) {
1404 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
1409 public static String obtainHexColorStringDependingOnTaxonomyGroup( final String tax_code, final Phylogeny phy )
1410 throws IllegalArgumentException {
1411 if ( !_TAXCODE_HEXCOLORSTRING_MAP.containsKey( tax_code ) ) {
1412 if ( ( phy != null ) && !phy.isEmpty() ) {
1413 // final List<PhylogenyNode> nodes = phy.getNodesViaTaxonomyCode( tax_code );
1415 // if ( ( nodes == null ) || nodes.isEmpty() ) {
1416 // throw new IllegalArgumentException( "code " + tax_code + " is not found" );
1418 // if ( nodes.size() != 1 ) {
1419 // throw new IllegalArgumentException( "code " + tax_code + " is not unique" );
1421 // PhylogenyNode n = nodes.get( 0 );
1422 // while ( n != null ) {
1423 // if ( n.getNodeData().isHasTaxonomy()
1424 // && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1425 // c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( n.getNodeData().getTaxonomy()
1426 // .getScientificName(), tax_code );
1428 // if ( ( c == null ) && !ForesterUtil.isEmpty( n.getName() ) ) {
1429 // c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( n.getName(), tax_code );
1431 // if ( c != null ) {
1434 // n = n.getParent();
1436 final String group = obtainTaxonomyGroup( tax_code, phy );
1437 final Color c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( group );
1439 throw new IllegalArgumentException( "no color found for taxonomy group \"" + group
1440 + "\" for code \"" + tax_code + "\"" );
1442 final String hex = String.format( "#%02x%02x%02x", c.getRed(), c.getGreen(), c.getBlue() );
1443 _TAXCODE_HEXCOLORSTRING_MAP.put( tax_code, hex );
1446 throw new IllegalArgumentException( "unable to obtain color for code " + tax_code
1447 + " (tree is null or empty and code is not in map)" );
1450 return _TAXCODE_HEXCOLORSTRING_MAP.get( tax_code );
1453 public static String obtainTaxonomyGroup( final String tax_code, final Phylogeny species_tree )
1454 throws IllegalArgumentException {
1455 if ( !_TAXCODE_TAXGROUP_MAP.containsKey( tax_code ) ) {
1456 if ( ( species_tree != null ) && !species_tree.isEmpty() ) {
1457 final List<PhylogenyNode> nodes = species_tree.getNodesViaTaxonomyCode( tax_code );
1458 if ( ( nodes == null ) || nodes.isEmpty() ) {
1459 throw new IllegalArgumentException( "code " + tax_code + " is not found" );
1461 if ( nodes.size() != 1 ) {
1462 throw new IllegalArgumentException( "code " + tax_code + " is not unique" );
1464 PhylogenyNode n = nodes.get( 0 );
1465 String group = null;
1466 while ( n != null ) {
1467 if ( n.getNodeData().isHasTaxonomy()
1468 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1469 group = ForesterUtil
1470 .obtainNormalizedTaxonomyGroup( n.getNodeData().getTaxonomy().getScientificName() );
1472 if ( ForesterUtil.isEmpty( group ) && !ForesterUtil.isEmpty( n.getName() ) ) {
1473 group = ForesterUtil.obtainNormalizedTaxonomyGroup( n.getName() );
1475 if ( !ForesterUtil.isEmpty( group ) ) {
1480 if ( ForesterUtil.isEmpty( group ) ) {
1481 group = TaxonomyGroups.OTHER;
1483 _TAXCODE_TAXGROUP_MAP.put( tax_code, group );
1486 throw new IllegalArgumentException( "unable to obtain group for code " + tax_code
1487 + " (tree is null or empty and code is not in map)" );
1490 return _TAXCODE_TAXGROUP_MAP.get( tax_code );
1493 public static void performDomainArchitectureAnalysis( final SortedMap<String, Set<String>> domain_architecutures,
1494 final SortedMap<String, Integer> domain_architecuture_counts,
1495 final int min_count,
1496 final File da_counts_outfile,
1497 final File unique_da_outfile ) {
1498 checkForOutputFileWriteability( da_counts_outfile );
1499 checkForOutputFileWriteability( unique_da_outfile );
1501 final BufferedWriter da_counts_out = new BufferedWriter( new FileWriter( da_counts_outfile ) );
1502 final BufferedWriter unique_da_out = new BufferedWriter( new FileWriter( unique_da_outfile ) );
1503 final Iterator<Entry<String, Integer>> it = domain_architecuture_counts.entrySet().iterator();
1504 while ( it.hasNext() ) {
1505 final Map.Entry<String, Integer> e = it.next();
1506 final String da = e.getKey();
1507 final int count = e.getValue();
1508 if ( count >= min_count ) {
1509 da_counts_out.write( da );
1510 da_counts_out.write( "\t" );
1511 da_counts_out.write( String.valueOf( count ) );
1512 da_counts_out.write( ForesterUtil.LINE_SEPARATOR );
1515 final Iterator<Entry<String, Set<String>>> it2 = domain_architecutures.entrySet().iterator();
1516 while ( it2.hasNext() ) {
1517 final Map.Entry<String, Set<String>> e2 = it2.next();
1518 final String genome = e2.getKey();
1519 final Set<String> das = e2.getValue();
1520 if ( das.contains( da ) ) {
1521 unique_da_out.write( genome );
1522 unique_da_out.write( "\t" );
1523 unique_da_out.write( da );
1524 unique_da_out.write( ForesterUtil.LINE_SEPARATOR );
1529 unique_da_out.close();
1530 da_counts_out.close();
1532 catch ( final IOException e ) {
1533 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1535 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + da_counts_outfile + "\"" );
1536 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + unique_da_outfile + "\"" );
1540 public static void preparePhylogeny( final Phylogeny p,
1541 final DomainParsimonyCalculator domain_parsimony,
1542 final String date_time,
1543 final String method,
1545 final String parameters_str ) {
1546 domain_parsimony.decoratePhylogenyWithDomains( p );
1547 final StringBuilder desc = new StringBuilder();
1548 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1549 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1550 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1551 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1552 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1553 desc.append( "[Parameters: " + parameters_str + "]" );
1555 p.setDescription( desc.toString() );
1556 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1557 p.setRerootable( false );
1558 p.setRooted( true );
1561 public static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
1562 final String[][] input_file_properties ) {
1563 final String[] genomes = new String[ input_file_properties.length ];
1564 for( int i = 0; i < input_file_properties.length; ++i ) {
1565 if ( intree.getNodes( input_file_properties[ i ][ 1 ] ).size() > 1 ) {
1566 ForesterUtil.fatalError( surfacing.PRG_NAME,
1567 "node named [" + input_file_properties[ i ][ 1 ]
1568 + "] is not unique in input tree " + intree.getName() );
1570 genomes[ i ] = input_file_properties[ i ][ 1 ];
1573 final PhylogenyNodeIterator it = intree.iteratorPostorder();
1574 while ( it.hasNext() ) {
1575 final PhylogenyNode n = it.next();
1576 if ( ForesterUtil.isEmpty( n.getName() ) ) {
1577 if ( n.getNodeData().isHasTaxonomy()
1578 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
1579 n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
1581 else if ( n.getNodeData().isHasTaxonomy()
1582 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1583 n.setName( n.getNodeData().getTaxonomy().getScientificName() );
1585 else if ( n.getNodeData().isHasTaxonomy()
1586 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
1587 n.setName( n.getNodeData().getTaxonomy().getCommonName() );
1591 .fatalError( surfacing.PRG_NAME,
1592 "node with no name, scientific name, common name, or taxonomy code present" );
1596 final List<String> igns = PhylogenyMethods.deleteExternalNodesPositiveSelection( genomes, intree );
1597 if ( igns.size() > 0 ) {
1598 System.out.println( "Not using the following " + igns.size() + " nodes:" );
1599 for( int i = 0; i < igns.size(); ++i ) {
1600 System.out.println( " " + i + ": " + igns.get( i ) );
1602 System.out.println( "--" );
1604 //Test for node names:
1605 final SortedSet<String> not_found = new TreeSet<String>();
1606 final SortedSet<String> not_unique = new TreeSet<String>();
1607 for( final String[] input_file_propertie : input_file_properties ) {
1608 final String name = input_file_propertie[ 1 ];
1609 final List<PhylogenyNode> nodes = intree.getNodes( name );
1610 if ( ( nodes == null ) || ( nodes.size() < 1 ) ) {
1611 not_found.add( name );
1613 if ( nodes.size() > 1 ) {
1614 not_unique.add( name );
1617 if ( not_found.size() > 0 ) {
1618 ForesterUtil.fatalError( surfacing.PRG_NAME,
1619 "the following " + not_found.size()
1620 + " node(s) are not present in the input tree: " + not_found );
1622 if ( not_unique.size() > 0 ) {
1623 ForesterUtil.fatalError( surfacing.PRG_NAME,
1624 "the following " + not_unique.size()
1625 + " node(s) are not unique in the input tree: " + not_unique );
1629 public static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
1630 final Writer log_writer ) {
1632 for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
1633 sum += entry.getValue();
1635 final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
1636 ForesterUtil.programMessage( surfacing.PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
1637 log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
1640 public static void processFilter( final File filter_file, final SortedSet<String> filter ) {
1641 SortedSet<String> filter_str = null;
1643 filter_str = ForesterUtil.file2set( filter_file );
1645 catch ( final IOException e ) {
1646 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1648 if ( filter_str != null ) {
1649 for( final String string : filter_str ) {
1650 filter.add( string );
1653 if ( surfacing.VERBOSE ) {
1654 System.out.println( "Filter:" );
1655 for( final String domainId : filter ) {
1656 System.out.println( domainId );
1661 public static String[][] processInputGenomesFile( final File input_genomes ) {
1662 String[][] input_file_properties = null;
1664 input_file_properties = ForesterUtil.file22dArray( input_genomes );
1666 catch ( final IOException e ) {
1667 ForesterUtil.fatalError( surfacing.PRG_NAME,
1668 "genomes files is to be in the following format \"<hmmpfam output file> <species>\": "
1669 + e.getLocalizedMessage() );
1671 final Set<String> specs = new HashSet<String>();
1672 final Set<String> paths = new HashSet<String>();
1673 for( int i = 0; i < input_file_properties.length; ++i ) {
1674 if ( !PhyloXmlUtil.TAXOMONY_CODE_PATTERN.matcher( input_file_properties[ i ][ 1 ] ).matches() ) {
1675 ForesterUtil.fatalError( surfacing.PRG_NAME,
1676 "illegal format for species code: " + input_file_properties[ i ][ 1 ] );
1678 if ( specs.contains( input_file_properties[ i ][ 1 ] ) ) {
1679 ForesterUtil.fatalError( surfacing.PRG_NAME,
1680 "species code " + input_file_properties[ i ][ 1 ] + " is not unique" );
1682 specs.add( input_file_properties[ i ][ 1 ] );
1683 if ( paths.contains( input_file_properties[ i ][ 0 ] ) ) {
1684 ForesterUtil.fatalError( surfacing.PRG_NAME,
1685 "path " + input_file_properties[ i ][ 0 ] + " is not unique" );
1687 paths.add( input_file_properties[ i ][ 0 ] );
1688 final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
1689 if ( !ForesterUtil.isEmpty( error ) ) {
1690 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
1693 return input_file_properties;
1696 public static void processPlusMinusAnalysisOption( final CommandLineArguments cla,
1697 final List<String> high_copy_base,
1698 final List<String> high_copy_target,
1699 final List<String> low_copy,
1700 final List<Object> numbers ) {
1701 if ( cla.isOptionSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
1702 if ( !cla.isOptionValueSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
1703 ForesterUtil.fatalError( surfacing.PRG_NAME,
1704 "no value for 'plus-minus' file: -" + surfacing.PLUS_MINUS_ANALYSIS_OPTION
1707 final File plus_minus_file = new File( cla.getOptionValue( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) );
1708 final String msg = ForesterUtil.isReadableFile( plus_minus_file );
1709 if ( !ForesterUtil.isEmpty( msg ) ) {
1710 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + plus_minus_file + "\": " + msg );
1712 processPlusMinusFile( plus_minus_file, high_copy_base, high_copy_target, low_copy, numbers );
1716 // First numbers is minimal difference, second is factor.
1717 public static void processPlusMinusFile( final File plus_minus_file,
1718 final List<String> high_copy_base,
1719 final List<String> high_copy_target,
1720 final List<String> low_copy,
1721 final List<Object> numbers ) {
1722 Set<String> species_set = null;
1723 int min_diff = surfacing.PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT;
1724 double factor = surfacing.PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT;
1726 species_set = ForesterUtil.file2set( plus_minus_file );
1728 catch ( final IOException e ) {
1729 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1731 if ( species_set != null ) {
1732 for( final String species : species_set ) {
1733 final String species_trimmed = species.substring( 1 );
1734 if ( species.startsWith( "+" ) ) {
1735 if ( low_copy.contains( species_trimmed ) ) {
1736 ForesterUtil.fatalError( surfacing.PRG_NAME,
1737 "species/genome names can not appear with both '+' and '-' suffix, as appears the case for: \""
1738 + species_trimmed + "\"" );
1740 high_copy_base.add( species_trimmed );
1742 else if ( species.startsWith( "*" ) ) {
1743 if ( low_copy.contains( species_trimmed ) ) {
1744 ForesterUtil.fatalError( surfacing.PRG_NAME,
1745 "species/genome names can not appear with both '*' and '-' suffix, as appears the case for: \""
1746 + species_trimmed + "\"" );
1748 high_copy_target.add( species_trimmed );
1750 else if ( species.startsWith( "-" ) ) {
1751 if ( high_copy_base.contains( species_trimmed ) || high_copy_target.contains( species_trimmed ) ) {
1752 ForesterUtil.fatalError( surfacing.PRG_NAME,
1753 "species/genome names can not appear with both '+' or '*' and '-' suffix, as appears the case for: \""
1754 + species_trimmed + "\"" );
1756 low_copy.add( species_trimmed );
1758 else if ( species.startsWith( "$D" ) ) {
1760 min_diff = Integer.parseInt( species.substring( 3 ) );
1762 catch ( final NumberFormatException e ) {
1763 ForesterUtil.fatalError( surfacing.PRG_NAME,
1764 "could not parse integer value for minimal difference from: \""
1765 + species.substring( 3 ) + "\"" );
1768 else if ( species.startsWith( "$F" ) ) {
1770 factor = Double.parseDouble( species.substring( 3 ) );
1772 catch ( final NumberFormatException e ) {
1773 ForesterUtil.fatalError( surfacing.PRG_NAME,
1774 "could not parse double value for factor from: \""
1775 + species.substring( 3 ) + "\"" );
1778 else if ( species.startsWith( "#" ) ) {
1782 ForesterUtil.fatalError( surfacing.PRG_NAME,
1783 "species/genome names in 'plus minus' file must begin with '*' (high copy target genome), '+' (high copy base genomes), '-' (low copy genomes), '$D=<integer>' minimal Difference (default is 1), '$F=<double>' factor (default is 1.0), double), or '#' (ignore) suffix, encountered: \""
1786 numbers.add( new Integer( min_diff + "" ) );
1787 numbers.add( new Double( factor + "" ) );
1791 ForesterUtil.fatalError( surfacing.PRG_NAME, "'plus minus' file [" + plus_minus_file + "] appears empty" );
1796 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1800 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1801 final String protein_id,
1802 final String separator ) {
1803 final StringBuffer sb = new StringBuffer();
1804 if ( protein.getSpecies() == null ) {
1805 throw new IllegalArgumentException( "species must not be null" );
1807 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1808 throw new IllegalArgumentException( "species id must not be empty" );
1810 final List<Domain> domains = protein.getProteinDomains();
1811 if ( domains.size() > 1 ) {
1812 final Map<String, Integer> counts = new HashMap<String, Integer>();
1813 for( final Domain domain : domains ) {
1814 final String id = domain.getDomainId();
1815 if ( counts.containsKey( id ) ) {
1816 counts.put( id, counts.get( id ) + 1 );
1819 counts.put( id, 1 );
1822 final Set<String> dcs = new HashSet<String>();
1823 for( int i = 1; i < domains.size(); ++i ) {
1824 for( int j = 0; j < i; ++j ) {
1825 Domain domain_n = domains.get( i );
1826 Domain domain_c = domains.get( j );
1827 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1828 domain_n = domains.get( j );
1829 domain_c = domains.get( i );
1831 final String dc = domain_n.getDomainId() + domain_c.getDomainId();
1832 if ( !dcs.contains( dc ) ) {
1834 sb.append( protein.getSpecies() );
1835 sb.append( separator );
1836 sb.append( protein_id );
1837 sb.append( separator );
1838 sb.append( domain_n.getDomainId() );
1839 sb.append( separator );
1840 sb.append( domain_c.getDomainId() );
1841 sb.append( separator );
1842 sb.append( domain_n.getPerDomainEvalue() );
1843 sb.append( separator );
1844 sb.append( domain_c.getPerDomainEvalue() );
1845 sb.append( separator );
1846 sb.append( counts.get( domain_n.getDomainId() ) );
1847 sb.append( separator );
1848 sb.append( counts.get( domain_c.getDomainId() ) );
1849 sb.append( ForesterUtil.LINE_SEPARATOR );
1854 else if ( domains.size() == 1 ) {
1855 sb.append( protein.getSpecies() );
1856 sb.append( separator );
1857 sb.append( protein_id );
1858 sb.append( separator );
1859 sb.append( domains.get( 0 ).getDomainId() );
1860 sb.append( separator );
1861 sb.append( separator );
1862 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1863 sb.append( separator );
1864 sb.append( separator );
1866 sb.append( separator );
1867 sb.append( ForesterUtil.LINE_SEPARATOR );
1870 sb.append( protein.getSpecies() );
1871 sb.append( separator );
1872 sb.append( protein_id );
1873 sb.append( separator );
1874 sb.append( separator );
1875 sb.append( separator );
1876 sb.append( separator );
1877 sb.append( separator );
1878 sb.append( separator );
1879 sb.append( ForesterUtil.LINE_SEPARATOR );
1884 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1885 final List<Domain> domains = new ArrayList<Domain>();
1886 for( final Domain d : protein.getProteinDomains() ) {
1889 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1893 public static int storeDomainArchitectures( final String genome,
1894 final SortedMap<String, Set<String>> domain_architecutures,
1895 final List<Protein> protein_list,
1896 final Map<String, Integer> distinct_domain_architecuture_counts ) {
1897 final Set<String> da = new HashSet<String>();
1898 domain_architecutures.put( genome, da );
1899 for( final Protein protein : protein_list ) {
1900 final String da_str = ( ( BasicProtein ) protein ).toDomainArchitectureString( "~", 3, "=" );
1901 if ( !da.contains( da_str ) ) {
1902 if ( !distinct_domain_architecuture_counts.containsKey( da_str ) ) {
1903 distinct_domain_architecuture_counts.put( da_str, 1 );
1906 distinct_domain_architecuture_counts.put( da_str,
1907 distinct_domain_architecuture_counts.get( da_str ) + 1 );
1915 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1916 final boolean get_gains,
1917 final String outdir,
1918 final String suffix_for_filename )
1919 throws IOException {
1920 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1922 state = CharacterStateMatrix.GainLossStates.LOSS;
1924 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1928 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1929 final PhylogenyNode node = it.next();
1930 if ( !node.isExternal() ) {
1931 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1932 if ( domains.size() > 0 ) {
1933 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1934 + node.getName() + suffix_for_filename );
1935 for( final String domain : domains ) {
1936 writer.write( domain );
1937 writer.write( ForesterUtil.LINE_SEPARATOR );
1945 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1946 final File output_dir,
1947 final GenomeWideCombinableDomains gwcd,
1949 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1950 File dc_outfile_dot = new File( input_file_properties[ i ][ 1 ]
1951 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1952 if ( output_dir != null ) {
1953 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1955 checkForOutputFileWriteability( dc_outfile_dot );
1956 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1958 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1959 for( final BinaryDomainCombination bdc : binary_combinations ) {
1960 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1962 out_dot.write( SurfacingConstants.NL );
1966 catch ( final IOException e ) {
1967 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1969 if ( input_file_properties[ i ].length == 3 ) {
1971 .programMessage( surfacing.PRG_NAME,
1972 "Wrote binary domain combination for \"" + input_file_properties[ i ][ 0 ] + "\" ("
1973 + input_file_properties[ i ][ 1 ] + ", " + input_file_properties[ i ][ 2 ]
1974 + ") to: \"" + dc_outfile_dot + "\"" );
1977 ForesterUtil.programMessage( surfacing.PRG_NAME,
1978 "Wrote binary domain combination for \"" + input_file_properties[ i ][ 0 ]
1979 + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
1980 + dc_outfile_dot + "\"" );
1984 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1985 final CharacterStateMatrix.GainLossStates state,
1986 final String filename,
1987 final String indentifier_characters_separator,
1988 final String character_separator,
1989 final Map<String, String> descriptions ) {
1990 final File outfile = new File( filename );
1991 checkForOutputFileWriteability( outfile );
1992 final SortedSet<String> sorted_ids = new TreeSet<String>();
1993 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1994 sorted_ids.add( matrix.getIdentifier( i ) );
1997 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1998 for( final String id : sorted_ids ) {
1999 out.write( indentifier_characters_separator );
2000 out.write( "#" + id );
2001 out.write( indentifier_characters_separator );
2002 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
2004 // using null to indicate either UNCHANGED_PRESENT or GAIN.
2005 if ( ( matrix.getState( id, c ) == state ) || ( ( state == null )
2006 && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
2008 c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
2009 out.write( matrix.getCharacter( c ) );
2010 if ( ( descriptions != null ) && !descriptions.isEmpty()
2011 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
2013 out.write( descriptions.get( matrix.getCharacter( c ) ) );
2015 out.write( character_separator );
2022 catch ( final IOException e ) {
2023 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2025 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
2028 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
2029 final CharacterStateMatrix.GainLossStates state,
2030 final String filename,
2031 final String indentifier_characters_separator,
2032 final String character_separator,
2033 final BinaryDomainCombination.OutputFormat bc_output_format ) {
2034 final File outfile = new File( filename );
2035 checkForOutputFileWriteability( outfile );
2036 final SortedSet<String> sorted_ids = new TreeSet<String>();
2037 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
2038 sorted_ids.add( matrix.getIdentifier( i ) );
2041 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
2042 for( final String id : sorted_ids ) {
2043 out.write( indentifier_characters_separator );
2044 out.write( "#" + id );
2045 out.write( indentifier_characters_separator );
2046 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
2048 // using null to indicate either UNCHANGED_PRESENT or GAIN.
2049 if ( ( matrix.getState( id, c ) == state ) || ( ( state == null )
2050 && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
2052 c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
2053 BinaryDomainCombination bdc = null;
2055 bdc = BasicBinaryDomainCombination.obtainInstance( matrix.getCharacter( c ) );
2057 catch ( final Exception e ) {
2058 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2060 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
2061 out.write( character_separator );
2068 catch ( final IOException e ) {
2069 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2071 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
2074 public static void writeBinaryStatesMatrixToList( final Map<String, List<GoId>> domain_id_to_go_ids_map,
2075 final Map<GoId, GoTerm> go_id_to_term_map,
2076 final GoNameSpace go_namespace_limit,
2077 final boolean domain_combinations,
2078 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
2079 final CharacterStateMatrix.GainLossStates state,
2080 final String filename,
2081 final String indentifier_characters_separator,
2082 final String character_separator,
2083 final String title_for_html,
2084 final String prefix_for_html,
2085 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
2086 final SortedSet<String> all_pfams_encountered,
2087 final SortedSet<String> pfams_gained_or_lost,
2088 final String suffix_for_per_node_events_file,
2089 final Map<String, Integer> tax_code_to_id_map ) {
2090 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
2091 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
2093 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
2094 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
2096 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
2097 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
2099 final File outfile = new File( filename );
2100 checkForOutputFileWriteability( outfile );
2101 final SortedSet<String> sorted_ids = new TreeSet<String>();
2102 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
2103 sorted_ids.add( matrix.getIdentifier( i ) );
2106 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
2107 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
2108 domain_combinations,
2111 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
2112 File per_node_go_mapped_domain_gain_loss_outfile = null;
2113 int per_node_counter = 0;
2114 out.write( "<html>" );
2115 out.write( SurfacingConstants.NL );
2116 writeHtmlHead( out, title_for_html );
2117 out.write( SurfacingConstants.NL );
2118 out.write( "<body>" );
2119 out.write( SurfacingConstants.NL );
2120 out.write( "<h1>" );
2121 out.write( SurfacingConstants.NL );
2122 out.write( title_for_html );
2123 out.write( SurfacingConstants.NL );
2124 out.write( "</h1>" );
2125 out.write( SurfacingConstants.NL );
2126 out.write( "<table>" );
2127 out.write( SurfacingConstants.NL );
2128 for( final String id : sorted_ids ) {
2129 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
2130 if ( matcher.matches() ) {
2133 out.write( "<tr>" );
2134 out.write( "<td>" );
2135 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
2136 out.write( "</td>" );
2137 out.write( "</tr>" );
2138 out.write( SurfacingConstants.NL );
2140 out.write( "</table>" );
2141 out.write( SurfacingConstants.NL );
2142 for( final String id : sorted_ids ) {
2143 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
2144 if ( matcher.matches() ) {
2147 out.write( SurfacingConstants.NL );
2148 out.write( "<h2>" );
2149 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
2150 writeTaxonomyLinks( out, id, tax_code_to_id_map );
2151 out.write( "</h2>" );
2152 out.write( SurfacingConstants.NL );
2153 out.write( "<table>" );
2154 out.write( SurfacingConstants.NL );
2155 out.write( "<tr>" );
2156 out.write( "<td><b>" );
2157 out.write( "Pfam domain(s)" );
2158 out.write( "</b></td><td><b>" );
2159 out.write( "GO term acc" );
2160 out.write( "</b></td><td><b>" );
2161 out.write( "GO term" );
2162 out.write( "</b></td><td><b>" );
2163 out.write( "GO namespace" );
2164 out.write( "</b></td>" );
2165 out.write( "</tr>" );
2166 out.write( SurfacingConstants.NL );
2167 out.write( "</tr>" );
2168 out.write( SurfacingConstants.NL );
2169 per_node_counter = 0;
2170 if ( matrix.getNumberOfCharacters() > 0 ) {
2171 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2172 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
2173 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
2174 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
2175 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
2178 per_node_go_mapped_domain_gain_loss_outfile = null;
2179 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
2181 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
2183 // using null to indicate either UNCHANGED_PRESENT or GAIN.
2184 if ( ( matrix.getState( id, c ) == state ) || ( ( state == null )
2185 && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT )
2186 || ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
2187 final String character = matrix.getCharacter( c );
2188 String domain_0 = "";
2189 String domain_1 = "";
2190 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
2191 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
2192 if ( s.length != 2 ) {
2193 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
2194 + character + "]" );
2200 domain_0 = character;
2202 writeDomainData( domain_id_to_go_ids_map,
2209 character_separator,
2210 domain_id_to_secondary_features_maps,
2212 all_pfams_encountered.add( domain_0 );
2213 if ( pfams_gained_or_lost != null ) {
2214 pfams_gained_or_lost.add( domain_0 );
2216 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2217 all_pfams_encountered.add( domain_1 );
2218 if ( pfams_gained_or_lost != null ) {
2219 pfams_gained_or_lost.add( domain_1 );
2222 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
2223 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
2230 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
2231 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
2232 if ( per_node_counter < 1 ) {
2233 per_node_go_mapped_domain_gain_loss_outfile.delete();
2235 per_node_counter = 0;
2237 out.write( "</table>" );
2238 out.write( SurfacingConstants.NL );
2239 out.write( "<hr>" );
2240 out.write( SurfacingConstants.NL );
2241 } // for( final String id : sorted_ids ) {
2242 out.write( "</body>" );
2243 out.write( SurfacingConstants.NL );
2244 out.write( "</html>" );
2245 out.write( SurfacingConstants.NL );
2249 catch ( final IOException e ) {
2250 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2252 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
2255 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
2256 final File output_dir,
2257 final Writer per_genome_domain_promiscuity_statistics_writer,
2258 final GenomeWideCombinableDomains gwcd,
2260 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
2261 File dc_outfile = new File( input_file_properties[ i ][ 1 ]
2262 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
2263 if ( output_dir != null ) {
2264 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
2266 checkForOutputFileWriteability( dc_outfile );
2268 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
2269 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
2272 catch ( final IOException e ) {
2273 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2275 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
2277 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 1 ] + "\t" );
2278 per_genome_domain_promiscuity_statistics_writer
2279 .write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
2280 if ( stats.getN() < 2 ) {
2281 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
2284 per_genome_domain_promiscuity_statistics_writer
2285 .write( FORMATTER_3.format( stats.sampleStandardDeviation() ) + "\t" );
2287 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
2288 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
2289 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
2290 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
2291 final SortedSet<String> mpds = gwcd.getMostPromiscuosDomain();
2292 for( final String mpd : mpds ) {
2293 per_genome_domain_promiscuity_statistics_writer.write( mpd + " " );
2295 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
2297 catch ( final IOException e ) {
2298 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2300 if ( input_file_properties[ i ].length == 3 ) {
2301 ForesterUtil.programMessage( surfacing.PRG_NAME,
2302 "Wrote domain combination counts for \"" + input_file_properties[ i ][ 0 ]
2303 + "\" (" + input_file_properties[ i ][ 1 ] + ", "
2304 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
2307 ForesterUtil.programMessage( surfacing.PRG_NAME,
2308 "Wrote domain combination counts for \"" + input_file_properties[ i ][ 0 ]
2309 + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \"" + dc_outfile
2314 public static void writeDomainSimilaritiesToFile( final StringBuilder html_desc,
2315 final StringBuilder html_title,
2316 final Writer simple_tab_writer,
2317 final Writer single_writer,
2318 Map<Character, Writer> split_writers,
2319 final SortedSet<DomainSimilarity> similarities,
2320 final boolean treat_as_binary,
2321 final List<Species> species_order,
2322 final DomainSimilarity.PRINT_OPTION print_option,
2323 final DomainSimilarity.DomainSimilarityScoring scoring,
2324 final boolean verbose,
2325 final Map<String, Integer> tax_code_to_id_map,
2326 final Phylogeny phy,
2327 final Set<String> pos_filter_doms )
2328 throws IOException {
2329 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
2330 split_writers = new HashMap<Character, Writer>();
2331 split_writers.put( '_', single_writer );
2333 switch ( print_option ) {
2334 case SIMPLE_TAB_DELIMITED:
2337 for( final Character key : split_writers.keySet() ) {
2338 final Writer w = split_writers.get( key );
2339 w.write( "<html>" );
2340 w.write( SurfacingConstants.NL );
2342 writeHtmlHead( w, "DC analysis (" + html_title + ") " + key.toString().toUpperCase() );
2345 writeHtmlHead( w, "DC analysis (" + html_title + ")" );
2347 w.write( SurfacingConstants.NL );
2348 w.write( "<body>" );
2349 w.write( SurfacingConstants.NL );
2350 w.write( html_desc.toString() );
2351 w.write( SurfacingConstants.NL );
2353 w.write( SurfacingConstants.NL );
2355 w.write( SurfacingConstants.NL );
2356 w.write( "<table>" );
2357 w.write( SurfacingConstants.NL );
2358 w.write( "<tr><td><b>Domains:</b></td></tr>" );
2359 w.write( SurfacingConstants.NL );
2364 for( final DomainSimilarity similarity : similarities ) {
2365 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2366 ( similarity ).setSpeciesOrder( species_order );
2368 if ( single_writer != null ) {
2369 if ( !ForesterUtil.isEmpty( pos_filter_doms )
2370 && pos_filter_doms.contains( similarity.getDomainId() ) ) {
2371 single_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId()
2372 + "\"><span style=\"color:#00ff00\">" + similarity.getDomainId()
2373 + "</span></a></b></td></tr>" );
2376 single_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
2377 + similarity.getDomainId() + "</a></b></td></tr>" );
2379 single_writer.write( SurfacingConstants.NL );
2382 Writer local_writer = split_writers
2383 .get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase().charAt( 0 ) );
2384 if ( local_writer == null ) {
2385 local_writer = split_writers.get( '0' );
2387 if ( !ForesterUtil.isEmpty( pos_filter_doms )
2388 && pos_filter_doms.contains( similarity.getDomainId() ) ) {
2389 local_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId()
2390 + "\"><span style=\"color:#00ff00\">" + similarity.getDomainId()
2391 + "</span></a></b></td></tr>" );
2394 local_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
2395 + similarity.getDomainId() + "</a></b></td></tr>" );
2397 local_writer.write( SurfacingConstants.NL );
2400 for( final Writer w : split_writers.values() ) {
2401 w.write( "</table>" );
2402 w.write( SurfacingConstants.NL );
2404 w.write( SurfacingConstants.NL );
2406 w.write( "<table>" );
2407 w.write( SurfacingConstants.NL );
2408 w.write( "<tr><td><b>" );
2409 w.write( "Species group colors:" );
2410 w.write( "</b></td></tr>" );
2411 w.write( SurfacingConstants.NL );
2412 writeColorLabels( "Deuterostomia", TaxonomyColors.DEUTEROSTOMIA_COLOR, w );
2413 writeColorLabels( "Protostomia", TaxonomyColors.PROTOSTOMIA_COLOR, w );
2414 writeColorLabels( "Cnidaria", TaxonomyColors.CNIDARIA_COLOR, w );
2415 writeColorLabels( "Placozoa", TaxonomyColors.PLACOZOA_COLOR, w );
2416 writeColorLabels( "Ctenophora (comb jellies)", TaxonomyColors.CTENOPHORA_COLOR, w );
2417 writeColorLabels( "Porifera (sponges)", TaxonomyColors.PORIFERA_COLOR, w );
2418 writeColorLabels( "Choanoflagellida", TaxonomyColors.CHOANOFLAGELLIDA, w );
2419 writeColorLabels( "Ichthyosporea & Filasterea", TaxonomyColors.ICHTHYOSPOREA_AND_FILASTEREA, w );
2420 writeColorLabels( "Dikarya (Ascomycota & Basidiomycota, so-called \"higher fungi\")",
2421 TaxonomyColors.DIKARYA_COLOR,
2423 writeColorLabels( "other Fungi", TaxonomyColors.OTHER_FUNGI_COLOR, w );
2424 writeColorLabels( "Nucleariidae and Fonticula group",
2425 TaxonomyColors.NUCLEARIIDAE_AND_FONTICULA_GROUP_COLOR,
2427 writeColorLabels( "Amoebozoa", TaxonomyColors.AMOEBOZOA_COLOR, w );
2428 writeColorLabels( "Embryophyta (plants)", TaxonomyColors.EMBRYOPHYTA_COLOR, w );
2429 writeColorLabels( "Chlorophyta (green algae)", TaxonomyColors.CHLOROPHYTA_COLOR, w );
2430 writeColorLabels( "Rhodophyta (red algae)", TaxonomyColors.RHODOPHYTA_COLOR, w );
2431 writeColorLabels( "Glaucocystophyce (Glaucophyta)", TaxonomyColors.GLAUCOPHYTA_COLOR, w );
2432 writeColorLabels( "Hacrobia (Cryptophyta & Haptophyceae & Centroheliozoa)",
2433 TaxonomyColors.HACROBIA_COLOR,
2435 writeColorLabels( "Stramenopiles (Chromophyta, heterokonts)", TaxonomyColors.STRAMENOPILES_COLOR, w );
2436 writeColorLabels( "Alveolata", TaxonomyColors.ALVEOLATA_COLOR, w );
2437 writeColorLabels( "Rhizaria", TaxonomyColors.RHIZARIA_COLOR, w );
2438 writeColorLabels( "Excavata", TaxonomyColors.EXCAVATA_COLOR, w );
2439 writeColorLabels( "Apusozoa", TaxonomyColors.APUSOZOA_COLOR, w );
2440 writeColorLabels( "Archaea", TaxonomyColors.ARCHAEA_COLOR, w );
2441 writeColorLabels( "Bacteria", TaxonomyColors.BACTERIA_COLOR, w );
2442 w.write( "</table>" );
2443 w.write( SurfacingConstants.NL );
2446 w.write( SurfacingConstants.NL );
2447 w.write( "<table>" );
2448 w.write( SurfacingConstants.NL );
2451 for( final DomainSimilarity similarity : similarities ) {
2452 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2453 ( similarity ).setSpeciesOrder( species_order );
2455 if ( simple_tab_writer != null ) {
2456 simple_tab_writer.write( similarity
2457 .toStringBuffer( PRINT_OPTION.SIMPLE_TAB_DELIMITED, tax_code_to_id_map, null ).toString() );
2459 if ( single_writer != null ) {
2460 single_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map, phy ).toString() );
2461 single_writer.write( SurfacingConstants.NL );
2464 Writer local_writer = split_writers
2465 .get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase().charAt( 0 ) );
2466 if ( local_writer == null ) {
2467 local_writer = split_writers.get( '0' );
2469 local_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map, phy ).toString() );
2470 local_writer.write( SurfacingConstants.NL );
2473 switch ( print_option ) {
2475 for( final Writer w : split_writers.values() ) {
2476 w.write( SurfacingConstants.NL );
2477 w.write( "</table>" );
2478 w.write( SurfacingConstants.NL );
2479 w.write( "</font>" );
2480 w.write( SurfacingConstants.NL );
2481 w.write( "</body>" );
2482 w.write( SurfacingConstants.NL );
2483 w.write( "</html>" );
2484 w.write( SurfacingConstants.NL );
2490 for( final Writer w : split_writers.values() ) {
2495 public static void writeHtmlHead( final Writer w, final String title ) throws IOException {
2496 w.write( SurfacingConstants.NL );
2497 w.write( "<head>" );
2498 w.write( "<title>" );
2500 w.write( "</title>" );
2501 w.write( SurfacingConstants.NL );
2502 w.write( "<style>" );
2503 w.write( SurfacingConstants.NL );
2504 w.write( "a:visited { color : #000066; text-decoration : none; }" );
2505 w.write( SurfacingConstants.NL );
2506 w.write( "a:link { color : #000066; text-decoration : none; }" );
2507 w.write( SurfacingConstants.NL );
2508 w.write( "a:active { color : ##000066; text-decoration : none; }" );
2509 w.write( SurfacingConstants.NL );
2510 w.write( "a:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; }" );
2511 w.write( SurfacingConstants.NL );
2513 w.write( "a.pl:visited { color : #505050; text-decoration : none; font-size: 7px;}" );
2514 w.write( SurfacingConstants.NL );
2515 w.write( "a.pl:link { color : #505050; text-decoration : none; font-size: 7px;}" );
2516 w.write( SurfacingConstants.NL );
2517 w.write( "a.pl:active { color : #505050; text-decoration : none; font-size: 7px;}" );
2518 w.write( SurfacingConstants.NL );
2519 w.write( "a.pl:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; font-size: 7px;}" );
2520 w.write( SurfacingConstants.NL );
2522 w.write( "a.ps:visited { color : #707070; text-decoration : none; font-size: 7px;}" );
2523 w.write( SurfacingConstants.NL );
2524 w.write( "a.ps:link { color : #707070; text-decoration : none; font-size: 7px;}" );
2525 w.write( SurfacingConstants.NL );
2526 w.write( "a.ps:active { color : #707070; text-decoration : none; font-size: 7px;}" );
2527 w.write( SurfacingConstants.NL );
2528 w.write( "a.ps:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; font-size: 7px;}" );
2529 w.write( SurfacingConstants.NL );
2531 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
2532 w.write( SurfacingConstants.NL );
2533 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
2534 w.write( SurfacingConstants.NL );
2535 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
2536 w.write( SurfacingConstants.NL );
2537 w.write( "</style>" );
2538 w.write( SurfacingConstants.NL );
2539 w.write( "</head>" );
2540 w.write( SurfacingConstants.NL );
2543 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
2544 final String filename,
2545 final Format format ) {
2546 final File outfile = new File( filename );
2547 checkForOutputFileWriteability( outfile );
2549 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
2550 matrix.toWriter( out, format );
2554 catch ( final IOException e ) {
2555 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2557 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
2560 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
2561 checkForOutputFileWriteability( matrix_outfile );
2563 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
2564 for( final DistanceMatrix distance_matrix : matrices ) {
2565 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
2566 out.write( ForesterUtil.LINE_SEPARATOR );
2571 catch ( final IOException e ) {
2572 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2574 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
2577 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
2578 final PhylogenyWriter writer = new PhylogenyWriter();
2580 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
2582 catch ( final IOException e ) {
2583 ForesterUtil.printWarningMessage( surfacing.PRG_NAME,
2584 "failed to write phylogeny to \"" + filename + "\": " + e );
2586 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
2589 public static void writePresentToNexus( final File output_file,
2590 final File positive_filter_file,
2591 final SortedSet<String> filter,
2592 final List<GenomeWideCombinableDomains> gwcd_list ) {
2594 writeMatrixToFile( DomainParsimonyCalculator
2595 .createMatrixOfDomainPresenceOrAbsence( gwcd_list, positive_filter_file == null ? null : filter ),
2596 output_file + surfacing.DOMAINS_PRESENT_NEXUS,
2597 Format.NEXUS_BINARY );
2598 writeMatrixToFile( DomainParsimonyCalculator
2599 .createMatrixOfBinaryDomainCombinationPresenceOrAbsence( gwcd_list ),
2600 output_file + surfacing.BDC_PRESENT_NEXUS,
2601 Format.NEXUS_BINARY );
2603 catch ( final Exception e ) {
2604 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2608 public static void writeProteinListsForAllSpecies( final File output_dir,
2609 final SortedMap<Species, List<Protein>> protein_lists_per_species,
2610 final List<GenomeWideCombinableDomains> gwcd_list,
2611 final double domain_e_cutoff,
2612 final Set<String> pos_filter_doms ) {
2613 final SortedSet<String> all_domains = new TreeSet<String>();
2614 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
2615 all_domains.addAll( gwcd.getAllDomainIds() );
2617 for( final String domain : all_domains ) {
2618 if ( !ForesterUtil.isEmpty( pos_filter_doms ) && !pos_filter_doms.contains( domain ) ) {
2621 final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain
2622 + surfacing.SEQ_EXTRACT_SUFFIX );
2623 checkForOutputFileWriteability( out );
2625 final Writer proteins_file_writer = new BufferedWriter( new FileWriter( out ) );
2626 extractProteinNames( protein_lists_per_species,
2628 proteins_file_writer,
2630 surfacing.LIMIT_SPEC_FOR_PROT_EX,
2632 proteins_file_writer.close();
2634 catch ( final IOException e ) {
2635 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2637 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote proteins list to \"" + out + "\"" );
2641 public static void writeTaxonomyLinks( final Writer writer,
2642 final String species,
2643 final Map<String, Integer> tax_code_to_id_map )
2644 throws IOException {
2645 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
2646 writer.write( " [" );
2647 if ( ( tax_code_to_id_map != null ) && tax_code_to_id_map.containsKey( species ) ) {
2648 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_TAXONOMY_ID_LINK
2649 + tax_code_to_id_map.get( species ) + "\" target=\"taxonomy_window\">uniprot</a>" );
2652 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
2653 + "\" target=\"taxonomy_window\">eol</a>" );
2654 writer.write( "|" );
2655 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_SCHOLAR_SEARCH + species
2656 + "\" target=\"taxonomy_window\">scholar</a>" );
2657 writer.write( "|" );
2658 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_WEB_SEARCH_LINK + species
2659 + "\" target=\"taxonomy_window\">google</a>" );
2661 writer.write( "]" );
2665 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
2666 if ( map.containsKey( s ) ) {
2667 map.put( s, map.get( s ) + 1 );
2674 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
2675 final String outfilename_for_counts,
2676 final String outfilename_for_dc,
2677 final String outfilename_for_dc_for_go_mapping,
2678 final String outfilename_for_dc_for_go_mapping_unique,
2679 final String outfilename_for_rank_counts,
2680 final String outfilename_for_ancestor_species_counts,
2681 final String outfilename_for_protein_stats,
2682 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
2683 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
2684 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
2687 // if ( protein_length_stats_by_dc != null ) {
2688 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
2689 // System.out.print( entry.getKey().toString() );
2690 // System.out.print( ": " );
2691 // double[] a = entry.getValue().getDataAsDoubleArray();
2692 // for( int i = 0; i < a.length; i++ ) {
2693 // System.out.print( a[ i ] + " " );
2695 // System.out.println();
2698 // if ( domain_number_stats_by_dc != null ) {
2699 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
2700 // System.out.print( entry.getKey().toString() );
2701 // System.out.print( ": " );
2702 // double[] a = entry.getValue().getDataAsDoubleArray();
2703 // for( int i = 0; i < a.length; i++ ) {
2704 // System.out.print( a[ i ] + " " );
2706 // System.out.println();
2710 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
2711 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
2712 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
2713 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
2714 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
2715 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
2716 final PhylogenyNode n = it.next();
2717 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
2718 for( final String dc : gained_dc ) {
2719 if ( dc_gain_counts.containsKey( dc ) ) {
2720 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
2723 dc_gain_counts.put( dc, 1 );
2727 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
2728 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
2729 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
2730 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
2731 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
2732 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
2733 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
2734 final Set<String> dcs = dc_gain_counts.keySet();
2735 final SortedSet<String> more_than_once = new TreeSet<String>();
2736 DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
2737 DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
2738 DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
2739 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
2740 long gained_multiple_times_domain_length_sum = 0;
2741 long gained_once_domain_length_sum = 0;
2742 long gained_multiple_times_domain_length_count = 0;
2743 long gained_once_domain_length_count = 0;
2744 for( final String dc : dcs ) {
2745 final int count = dc_gain_counts.get( dc );
2746 if ( histogram.containsKey( count ) ) {
2747 histogram.put( count, histogram.get( count ) + 1 );
2748 domain_lists.get( count ).append( ", " + dc );
2749 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
2750 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
2753 histogram.put( count, 1 );
2754 domain_lists.put( count, new StringBuilder( dc ) );
2755 final PriorityQueue<String> q = new PriorityQueue<String>();
2756 q.addAll( splitDomainCombination( dc ) );
2757 domain_lists_go.put( count, q );
2758 final SortedSet<String> set = new TreeSet<String>();
2759 set.addAll( splitDomainCombination( dc ) );
2760 domain_lists_go_unique.put( count, set );
2762 if ( protein_length_stats_by_dc != null ) {
2763 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
2764 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
2766 dc_reapp_counts_to_protein_length_stats.get( count )
2767 .addValue( protein_length_stats_by_dc.get( dc ).arithmeticMean() );
2769 if ( domain_number_stats_by_dc != null ) {
2770 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
2771 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
2773 dc_reapp_counts_to_domain_number_stats.get( count )
2774 .addValue( domain_number_stats_by_dc.get( dc ).arithmeticMean() );
2776 if ( domain_length_stats_by_domain != null ) {
2777 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
2778 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
2780 final String[] ds = dc.split( "=" );
2781 dc_reapp_counts_to_domain_lengths_stats.get( count )
2782 .addValue( domain_length_stats_by_domain.get( ds[ 0 ] ).arithmeticMean() );
2783 dc_reapp_counts_to_domain_lengths_stats.get( count )
2784 .addValue( domain_length_stats_by_domain.get( ds[ 1 ] ).arithmeticMean() );
2787 more_than_once.add( dc );
2788 if ( protein_length_stats_by_dc != null ) {
2789 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2790 for( final double element : s.getData() ) {
2791 gained_multiple_times_lengths_stats.addValue( element );
2794 if ( domain_number_stats_by_dc != null ) {
2795 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2796 for( final double element : s.getData() ) {
2797 gained_multiple_times_domain_count_stats.addValue( element );
2800 if ( domain_length_stats_by_domain != null ) {
2801 final String[] ds = dc.split( "=" );
2802 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2803 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2804 for( final double element : s0.getData() ) {
2805 gained_multiple_times_domain_length_sum += element;
2806 ++gained_multiple_times_domain_length_count;
2808 for( final double element : s1.getData() ) {
2809 gained_multiple_times_domain_length_sum += element;
2810 ++gained_multiple_times_domain_length_count;
2815 if ( protein_length_stats_by_dc != null ) {
2816 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2817 for( final double element : s.getData() ) {
2818 gained_once_lengths_stats.addValue( element );
2821 if ( domain_number_stats_by_dc != null ) {
2822 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2823 for( final double element : s.getData() ) {
2824 gained_once_domain_count_stats.addValue( element );
2827 if ( domain_length_stats_by_domain != null ) {
2828 final String[] ds = dc.split( "=" );
2829 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2830 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2831 for( final double element : s0.getData() ) {
2832 gained_once_domain_length_sum += element;
2833 ++gained_once_domain_length_count;
2835 for( final double element : s1.getData() ) {
2836 gained_once_domain_length_sum += element;
2837 ++gained_once_domain_length_count;
2842 final Set<Integer> histogram_keys = histogram.keySet();
2843 for( final Integer histogram_key : histogram_keys ) {
2844 final int count = histogram.get( histogram_key );
2845 final StringBuilder dc = domain_lists.get( histogram_key );
2846 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
2847 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
2848 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2849 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
2850 Arrays.sort( sorted );
2851 for( final Object domain : sorted ) {
2852 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
2854 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2855 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
2856 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
2861 out_dc_for_go_mapping.close();
2862 out_dc_for_go_mapping_unique.close();
2863 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
2864 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
2865 for( final String dc : more_than_once ) {
2866 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
2867 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
2868 final PhylogenyNode n = it.next();
2869 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
2873 for( int i = 0; i < ( nodes.size() - 1 ); ++i ) {
2874 for( int j = i + 1; j < nodes.size(); ++j ) {
2875 final PhylogenyNode lca = PhylogenyMethods.calculateLCA( nodes.get( i ), nodes.get( j ) );
2876 String rank = "unknown";
2877 if ( lca.getNodeData().isHasTaxonomy()
2878 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
2879 rank = lca.getNodeData().getTaxonomy().getRank();
2881 addToCountMap( lca_rank_counts, rank );
2883 if ( lca.getNodeData().isHasTaxonomy()
2884 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
2885 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
2887 else if ( lca.getNodeData().isHasTaxonomy()
2888 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
2889 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
2892 lca_species = lca.getName();
2894 addToCountMap( lca_ancestor_species_counts, lca_species );
2898 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
2899 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
2900 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
2901 ForesterUtil.map2writer( out_for_ancestor_species_counts,
2902 lca_ancestor_species_counts,
2904 ForesterUtil.LINE_SEPARATOR );
2905 out_for_rank_counts.close();
2906 out_for_ancestor_species_counts.close();
2907 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats ) && ( ( domain_length_stats_by_domain != null )
2908 || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
2909 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
2910 w.write( "Domain Lengths: " );
2912 if ( domain_length_stats_by_domain != null ) {
2913 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
2915 w.write( entry.getKey().toString() );
2916 w.write( "\t" + entry.getValue().arithmeticMean() );
2917 w.write( "\t" + entry.getValue().median() );
2924 w.write( "Protein Lengths: " );
2926 if ( protein_length_stats_by_dc != null ) {
2927 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
2929 w.write( entry.getKey().toString() );
2930 w.write( "\t" + entry.getValue().arithmeticMean() );
2931 w.write( "\t" + entry.getValue().median() );
2938 w.write( "Number of domains: " );
2940 if ( domain_number_stats_by_dc != null ) {
2941 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
2943 w.write( entry.getKey().toString() );
2944 w.write( "\t" + entry.getValue().arithmeticMean() );
2945 w.write( "\t" + entry.getValue().median() );
2952 w.write( "Gained once, domain lengths:" );
2954 w.write( "N: " + gained_once_domain_length_count );
2956 w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
2959 w.write( "Gained multiple times, domain lengths:" );
2961 w.write( "N: " + gained_multiple_times_domain_length_count );
2963 w.write( "Avg: " + ( ( double ) gained_multiple_times_domain_length_sum
2964 / gained_multiple_times_domain_length_count ) );
2969 w.write( "Gained once, protein lengths:" );
2971 w.write( gained_once_lengths_stats.toString() );
2972 gained_once_lengths_stats = null;
2975 w.write( "Gained once, domain counts:" );
2977 w.write( gained_once_domain_count_stats.toString() );
2978 gained_once_domain_count_stats = null;
2981 w.write( "Gained multiple times, protein lengths:" );
2983 w.write( gained_multiple_times_lengths_stats.toString() );
2984 gained_multiple_times_lengths_stats = null;
2987 w.write( "Gained multiple times, domain counts:" );
2989 w.write( gained_multiple_times_domain_count_stats.toString() );
2994 catch ( final IOException e ) {
2995 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2997 ForesterUtil.programMessage( surfacing.PRG_NAME,
2998 "Wrote independent domain combination gains fitch counts to ["
2999 + outfilename_for_counts + "]" );
3000 ForesterUtil.programMessage( surfacing.PRG_NAME,
3001 "Wrote independent domain combination gains fitch lists to [" + outfilename_for_dc
3003 ForesterUtil.programMessage( surfacing.PRG_NAME,
3004 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
3005 + outfilename_for_dc_for_go_mapping + "]" );
3006 ForesterUtil.programMessage( surfacing.PRG_NAME,
3007 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
3008 + outfilename_for_dc_for_go_mapping_unique + "]" );
3011 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
3012 final boolean get_gains ) {
3013 final SortedSet<String> domains = new TreeSet<String>();
3014 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
3015 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
3017 domains.addAll( chars.getGainedCharacters() );
3020 domains.addAll( chars.getLostCharacters() );
3026 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
3027 final boolean domain_combinations,
3028 final CharacterStateMatrix.GainLossStates state,
3029 final String outfile ) {
3030 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
3031 + ForesterUtil.FILE_SEPARATOR + base_dir );
3032 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
3033 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
3035 if ( domain_combinations ) {
3036 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
3037 + ForesterUtil.FILE_SEPARATOR + "DC" );
3040 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
3041 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
3043 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
3044 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
3046 if ( state == GainLossStates.GAIN ) {
3047 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
3048 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
3050 else if ( state == GainLossStates.LOSS ) {
3051 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
3052 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
3055 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
3056 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
3058 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
3059 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
3061 return per_node_go_mapped_domain_gain_loss_files_base_dir;
3064 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
3065 final SortedMap<String, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
3066 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
3067 for( final String domain_id : cds.keySet() ) {
3068 final CombinableDomains cd = cds.get( domain_id );
3069 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
3071 return binary_combinations;
3074 private static void printSomeStats( final DescriptiveStatistics stats, final AsciiHistogram histo, final Writer w )
3075 throws IOException {
3078 w.write( SurfacingConstants.NL );
3079 w.write( "<tt><pre>" );
3080 w.write( SurfacingConstants.NL );
3081 if ( histo != null ) {
3082 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
3083 w.write( SurfacingConstants.NL );
3085 w.write( "</pre></tt>" );
3086 w.write( SurfacingConstants.NL );
3087 w.write( "<table>" );
3088 w.write( SurfacingConstants.NL );
3089 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
3090 w.write( SurfacingConstants.NL );
3091 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
3092 w.write( SurfacingConstants.NL );
3093 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
3094 w.write( SurfacingConstants.NL );
3095 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
3096 w.write( SurfacingConstants.NL );
3097 if ( stats.getN() > 1 ) {
3098 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
3101 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
3103 w.write( SurfacingConstants.NL );
3104 w.write( "</table>" );
3105 w.write( SurfacingConstants.NL );
3107 w.write( SurfacingConstants.NL );
3110 private static List<String> splitDomainCombination( final String dc ) {
3111 final String[] s = dc.split( "=" );
3112 if ( s.length != 2 ) {
3113 ForesterUtil.printErrorMessage( surfacing.PRG_NAME,
3114 "Stringyfied domain combination has illegal format: " + dc );
3117 final List<String> l = new ArrayList<String>( 2 );
3123 private static void writeAllEncounteredPfamsToFile( final Map<String, List<GoId>> domain_id_to_go_ids_map,
3124 final Map<GoId, GoTerm> go_id_to_term_map,
3125 final String outfile_name,
3126 final SortedSet<String> all_pfams_encountered ) {
3127 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
3128 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
3129 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
3130 final File encountered_pfams_summary_file = new File( outfile_name
3131 + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
3132 int biological_process_counter = 0;
3133 int cellular_component_counter = 0;
3134 int molecular_function_counter = 0;
3135 int pfams_with_mappings_counter = 0;
3136 int pfams_without_mappings_counter = 0;
3137 int pfams_without_mappings_to_bp_or_mf_counter = 0;
3138 int pfams_with_mappings_to_bp_or_mf_counter = 0;
3140 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
3141 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
3142 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
3143 summary_writer.write( "# Pfam to GO mapping summary" );
3144 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3145 summary_writer.write( "# Actual summary is at the end of this file." );
3146 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3147 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
3148 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3149 for( final String pfam : all_pfams_encountered ) {
3150 all_pfams_encountered_writer.write( pfam );
3151 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
3152 final String domain_id = new String( pfam );
3153 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
3154 ++pfams_with_mappings_counter;
3155 all_pfams_encountered_with_go_annotation_writer.write( pfam );
3156 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
3157 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
3158 boolean maps_to_bp = false;
3159 boolean maps_to_cc = false;
3160 boolean maps_to_mf = false;
3161 for( final GoId go_id : go_ids ) {
3162 final GoTerm go_term = go_id_to_term_map.get( go_id );
3163 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
3166 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
3169 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
3174 ++biological_process_counter;
3177 ++cellular_component_counter;
3180 ++molecular_function_counter;
3182 if ( maps_to_bp || maps_to_mf ) {
3183 ++pfams_with_mappings_to_bp_or_mf_counter;
3186 ++pfams_without_mappings_to_bp_or_mf_counter;
3190 ++pfams_without_mappings_to_bp_or_mf_counter;
3191 ++pfams_without_mappings_counter;
3192 summary_writer.write( pfam );
3193 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3196 all_pfams_encountered_writer.close();
3197 all_pfams_encountered_with_go_annotation_writer.close();
3198 ForesterUtil.programMessage( surfacing.PRG_NAME,
3199 "Wrote all [" + all_pfams_encountered.size() + "] encountered Pfams to: \""
3200 + all_pfams_encountered_file + "\"" );
3201 ForesterUtil.programMessage( surfacing.PRG_NAME,
3202 "Wrote all [" + pfams_with_mappings_counter
3203 + "] encountered Pfams with GO mappings to: \""
3204 + all_pfams_encountered_with_go_annotation_file + "\"" );
3205 ForesterUtil.programMessage( surfacing.PRG_NAME,
3206 "Wrote summary (including all [" + pfams_without_mappings_counter
3207 + "] encountered Pfams without GO mappings) to: \""
3208 + encountered_pfams_summary_file + "\"" );
3209 ForesterUtil.programMessage( surfacing.PRG_NAME,
3210 "Sum of Pfams encountered : " + all_pfams_encountered.size() );
3211 ForesterUtil.programMessage( surfacing.PRG_NAME,
3212 "Pfams without a mapping : " + pfams_without_mappings_counter
3213 + " [" + ( ( 100 * pfams_without_mappings_counter )
3214 / all_pfams_encountered.size() )
3216 ForesterUtil.programMessage( surfacing.PRG_NAME,
3217 "Pfams without mapping to proc. or func. : "
3218 + pfams_without_mappings_to_bp_or_mf_counter + " ["
3219 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter )
3220 / all_pfams_encountered.size() )
3223 .programMessage( surfacing.PRG_NAME,
3224 "Pfams with a mapping : " + pfams_with_mappings_counter + " ["
3225 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() )
3227 ForesterUtil.programMessage( surfacing.PRG_NAME,
3228 "Pfams with a mapping to proc. or func. : "
3229 + pfams_with_mappings_to_bp_or_mf_counter + " ["
3230 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter )
3231 / all_pfams_encountered.size() )
3234 .programMessage( surfacing.PRG_NAME,
3235 "Pfams with mapping to biological process: " + biological_process_counter + " ["
3236 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() )
3239 .programMessage( surfacing.PRG_NAME,
3240 "Pfams with mapping to molecular function: " + molecular_function_counter + " ["
3241 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() )
3244 .programMessage( surfacing.PRG_NAME,
3245 "Pfams with mapping to cellular component: " + cellular_component_counter + " ["
3246 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() )
3248 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3249 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
3250 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3251 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter + " ["
3252 + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3253 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3254 summary_writer.write( "# Pfams without mapping to proc. or func. : "
3255 + pfams_without_mappings_to_bp_or_mf_counter + " ["
3256 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3257 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3258 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
3259 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3260 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3261 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
3262 + pfams_with_mappings_to_bp_or_mf_counter + " ["
3263 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3264 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3265 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
3266 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
3267 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3268 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
3269 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
3270 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3271 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
3272 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
3273 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3274 summary_writer.close();
3276 catch ( final IOException e ) {
3277 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
3281 private final static void writeColorLabels( final String l, final Color c, final Writer w ) throws IOException {
3282 w.write( "<tr><td><b><span style=\"color:" );
3283 w.write( String.format( "#%02x%02x%02x", c.getRed(), c.getGreen(), c.getBlue() ) );
3286 w.write( "</span></b></td></tr>" );
3287 w.write( SurfacingConstants.NL );
3290 private static void writeDomainData( final Map<String, List<GoId>> domain_id_to_go_ids_map,
3291 final Map<GoId, GoTerm> go_id_to_term_map,
3292 final GoNameSpace go_namespace_limit,
3294 final String domain_0,
3295 final String domain_1,
3296 final String prefix_for_html,
3297 final String character_separator_for_non_html_output,
3298 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
3299 final Set<GoId> all_go_ids )
3300 throws IOException {
3301 boolean any_go_annotation_present = false;
3302 boolean first_has_no_go = false;
3303 int domain_count = 2; // To distinguish between domains and binary domain combinations.
3304 if ( ForesterUtil.isEmpty( domain_1 ) ) {
3307 // The following has a difficult to understand logic.
3308 for( int d = 0; d < domain_count; ++d ) {
3309 List<GoId> go_ids = null;
3310 boolean go_annotation_present = false;
3312 if ( domain_id_to_go_ids_map.containsKey( domain_0 ) ) {
3313 go_annotation_present = true;
3314 any_go_annotation_present = true;
3315 go_ids = domain_id_to_go_ids_map.get( domain_0 );
3318 first_has_no_go = true;
3322 if ( domain_id_to_go_ids_map.containsKey( domain_1 ) ) {
3323 go_annotation_present = true;
3324 any_go_annotation_present = true;
3325 go_ids = domain_id_to_go_ids_map.get( domain_1 );
3328 if ( go_annotation_present ) {
3329 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
3330 for( final GoId go_id : go_ids ) {
3331 out.write( "<tr>" );
3334 writeDomainIdsToHtml( out,
3338 domain_id_to_secondary_features_maps );
3341 out.write( "<td></td>" );
3343 if ( !go_id_to_term_map.containsKey( go_id ) ) {
3344 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
3346 final GoTerm go_term = go_id_to_term_map.get( go_id );
3347 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
3348 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
3349 final String go_id_str = go_id.getId();
3350 out.write( "<td>" );
3351 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
3352 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
3353 out.write( "</td><td>" );
3354 out.write( go_term.getName() );
3355 if ( domain_count == 2 ) {
3356 out.write( " (" + d + ")" );
3358 out.write( "</td><td>" );
3359 // out.write( top );
3360 // out.write( "</td><td>" );
3362 out.write( go_term.getGoNameSpace().toShortString() );
3364 out.write( "</td>" );
3365 if ( all_go_ids != null ) {
3366 all_go_ids.add( go_id );
3370 out.write( "<td>" );
3371 out.write( "</td><td>" );
3372 out.write( "</td><td>" );
3373 out.write( "</td><td>" );
3374 out.write( "</td>" );
3376 out.write( "</tr>" );
3377 out.write( SurfacingConstants.NL );
3380 } // for( int d = 0; d < domain_count; ++d )
3381 if ( !any_go_annotation_present ) {
3382 out.write( "<tr>" );
3383 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
3384 out.write( "<td>" );
3385 out.write( "</td><td>" );
3386 out.write( "</td><td>" );
3387 out.write( "</td><td>" );
3388 out.write( "</td>" );
3389 out.write( "</tr>" );
3390 out.write( SurfacingConstants.NL );
3394 private static void writeDomainIdsToHtml( final Writer out,
3395 final String domain_0,
3396 final String domain_1,
3397 final String prefix_for_detailed_html,
3398 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps )
3399 throws IOException {
3400 out.write( "<td>" );
3401 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
3402 out.write( prefix_for_detailed_html );
3405 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
3406 out.write( "</td>" );
3409 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
3410 final String domain_0,
3411 final String domain_1 )
3412 throws IOException {
3413 individual_files_writer.write( domain_0 );
3414 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
3415 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
3416 individual_files_writer.write( domain_1 );
3417 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
3421 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
3423 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
3424 for( final String pfam : pfams ) {
3425 writer.write( pfam );
3426 writer.write( ForesterUtil.LINE_SEPARATOR );
3429 ForesterUtil.programMessage( surfacing.PRG_NAME,
3430 "Wrote " + pfams.size() + " pfams to [" + outfile_name + "]" );
3432 catch ( final IOException e ) {
3433 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
3437 private static void writeToNexus( final String outfile_name,
3438 final CharacterStateMatrix<BinaryStates> matrix,
3439 final Phylogeny phylogeny ) {
3440 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
3441 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
3444 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
3445 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
3446 phylogenies.add( phylogeny );
3448 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
3449 w.write( NexusConstants.NEXUS );
3450 w.write( ForesterUtil.LINE_SEPARATOR );
3451 my_matrix.writeNexusTaxaBlock( w );
3452 my_matrix.writeNexusBinaryChractersBlock( w );
3453 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
3456 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
3458 catch ( final IOException e ) {
3459 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
3463 private static void writeToNexus( final String outfile_name,
3464 final DomainParsimonyCalculator domain_parsimony,
3465 final Phylogeny phylogeny ) {
3466 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
3467 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
3469 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
3470 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
3474 final static class DomainComparator implements Comparator<Domain> {
3476 final private boolean _ascending;
3478 public DomainComparator( final boolean ascending ) {
3479 _ascending = ascending;
3483 public final int compare( final Domain d0, final Domain d1 ) {
3484 if ( d0.getFrom() < d1.getFrom() ) {
3485 return _ascending ? -1 : 1;
3487 else if ( d0.getFrom() > d1.getFrom() ) {
3488 return _ascending ? 1 : -1;