3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.surfacing;
29 import java.awt.Color;
30 import java.io.BufferedWriter;
32 import java.io.FileWriter;
33 import java.io.IOException;
34 import java.io.Writer;
35 import java.text.DecimalFormat;
36 import java.text.NumberFormat;
37 import java.util.ArrayList;
38 import java.util.Arrays;
39 import java.util.Collections;
40 import java.util.Comparator;
41 import java.util.HashMap;
42 import java.util.HashSet;
43 import java.util.Iterator;
44 import java.util.List;
46 import java.util.Map.Entry;
47 import java.util.PriorityQueue;
49 import java.util.SortedMap;
50 import java.util.SortedSet;
51 import java.util.TreeMap;
52 import java.util.TreeSet;
53 import java.util.regex.Matcher;
54 import java.util.regex.Pattern;
56 import org.forester.application.surfacing;
57 import org.forester.evoinference.distance.NeighborJoining;
58 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
61 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
62 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
63 import org.forester.evoinference.matrix.distance.BasicSymmetricalDistanceMatrix;
64 import org.forester.evoinference.matrix.distance.DistanceMatrix;
65 import org.forester.go.GoId;
66 import org.forester.go.GoNameSpace;
67 import org.forester.go.GoTerm;
68 import org.forester.go.PfamToGoMapping;
69 import org.forester.io.parsers.nexus.NexusConstants;
70 import org.forester.io.parsers.phyloxml.PhyloXmlUtil;
71 import org.forester.io.parsers.util.ParserUtils;
72 import org.forester.io.writers.PhylogenyWriter;
73 import org.forester.phylogeny.Phylogeny;
74 import org.forester.phylogeny.PhylogenyMethods;
75 import org.forester.phylogeny.PhylogenyNode;
76 import org.forester.phylogeny.PhylogenyNode.NH_CONVERSION_SUPPORT_VALUE_STYLE;
77 import org.forester.phylogeny.data.BinaryCharacters;
78 import org.forester.phylogeny.data.Confidence;
79 import org.forester.phylogeny.data.Taxonomy;
80 import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
81 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
82 import org.forester.protein.BasicDomain;
83 import org.forester.protein.BasicProtein;
84 import org.forester.protein.BinaryDomainCombination;
85 import org.forester.protein.Domain;
86 import org.forester.protein.Protein;
87 import org.forester.species.Species;
88 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
89 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
90 import org.forester.surfacing.PrintableDomainSimilarity.PRINT_OPTION;
91 import org.forester.util.AsciiHistogram;
92 import org.forester.util.BasicDescriptiveStatistics;
93 import org.forester.util.BasicTable;
94 import org.forester.util.BasicTableParser;
95 import org.forester.util.CommandLineArguments;
96 import org.forester.util.DescriptiveStatistics;
97 import org.forester.util.ForesterUtil;
98 import org.forester.util.TaxonomyColors;
100 public final class SurfacingUtil {
102 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
103 private final static Map<String, String> _TAXCODE_HEXCOLORSTRING_MAP = new HashMap<String, String>();
106 private final static Map<String, String> _TAXCODE_TAXGROUP_MAP = new HashMap<String, String>();
109 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
112 public int compare( final Domain d1,
114 if ( d1.getPerSequenceEvalue() < d2
115 .getPerSequenceEvalue() ) {
119 .getPerSequenceEvalue() > d2
120 .getPerSequenceEvalue() ) {
124 return d1.compareTo( d2 );
128 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
130 private SurfacingUtil() {
131 // Hidden constructor.
134 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
135 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
136 final SortedMap<String, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
137 for( final String domain_id : all_cd.keySet() ) {
138 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
142 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
143 final SortedSet<String> domain_ids ) {
144 final SortedSet<String> domains = genome.getAllDomainIds();
145 for( final String domain : domains ) {
146 domain_ids.add( domain );
150 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
151 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
152 for( final DomainSimilarity similarity : similarities ) {
153 stats.addValue( similarity.getMeanSimilarityScore() );
158 public static void checkForOutputFileWriteability( final File outfile ) {
159 final String error = ForesterUtil.isWritableFile( outfile );
160 if ( !ForesterUtil.isEmpty( error ) ) {
161 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
165 public static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
166 final String[][] input_file_properties,
167 final String automated_pairwise_comparison_suffix,
168 final File outdir ) {
169 for( int i = 0; i < input_file_properties.length; ++i ) {
170 for( int j = 0; j < i; ++j ) {
171 final String species_i = input_file_properties[ i ][ 1 ];
172 final String species_j = input_file_properties[ j ][ 1 ];
173 String pairwise_similarities_output_file_str = surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX + species_i
174 + "_" + species_j + automated_pairwise_comparison_suffix;
175 switch ( domain_similarity_print_option ) {
177 if ( !pairwise_similarities_output_file_str.endsWith( ".html" ) ) {
178 pairwise_similarities_output_file_str += ".html";
182 final String error = ForesterUtil
183 .isWritableFile( new File( outdir == null ? pairwise_similarities_output_file_str : outdir
184 + ForesterUtil.FILE_SEPARATOR + pairwise_similarities_output_file_str ) );
185 if ( !ForesterUtil.isEmpty( error ) ) {
186 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
192 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
193 final BinaryDomainCombination.DomainCombinationType dc_type,
194 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
195 final boolean get_gains ) {
196 final SortedSet<String> sorted_ids = new TreeSet<String>();
197 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
198 sorted_ids.add( matrix.getIdentifier( i ) );
200 for( final String id : sorted_ids ) {
201 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
202 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
203 || ( !get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
204 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
205 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
206 .createInstance( matrix.getCharacter( c ) ) );
208 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
209 all_binary_domains_combination_gained.add( DirectedBinaryDomainCombination
210 .createInstance( matrix.getCharacter( c ) ) );
213 all_binary_domains_combination_gained.add( BasicBinaryDomainCombination.createInstance( matrix
214 .getCharacter( c ) ) );
221 public static Map<String, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
222 final Map<String, List<GoId>> domain_id_to_go_ids_map = new HashMap<String, List<GoId>>( pfam_to_go_mappings.size() );
223 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
224 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
225 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
227 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
229 return domain_id_to_go_ids_map;
232 public static Map<String, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
234 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, '\t' );
235 final Map<String, Set<String>> map = new TreeMap<String, Set<String>>();
236 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
237 final String domain_id = primary_table.getValue( 0, r );
238 if ( !map.containsKey( domain_id ) ) {
239 map.put( domain_id, new HashSet<String>() );
241 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
246 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile, final DistanceMatrix distance ) {
247 checkForOutputFileWriteability( nj_tree_outfile );
248 final NeighborJoining nj = NeighborJoining.createInstance();
249 final Phylogeny phylogeny = nj.execute( ( BasicSymmetricalDistanceMatrix ) distance );
250 phylogeny.setName( nj_tree_outfile.getName() );
251 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
255 public static StringBuilder createParametersAsString( final boolean ignore_dufs,
256 final double e_value_max,
257 final int max_allowed_overlap,
258 final boolean no_engulfing_overlaps,
259 final File cutoff_scores_file,
260 final BinaryDomainCombination.DomainCombinationType dc_type ) {
261 final StringBuilder parameters_sb = new StringBuilder();
262 parameters_sb.append( "E-value: " + e_value_max );
263 if ( cutoff_scores_file != null ) {
264 parameters_sb.append( ", Cutoff-scores-file: " + cutoff_scores_file );
267 parameters_sb.append( ", Cutoff-scores-file: not-set" );
269 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
270 parameters_sb.append( ", Max-overlap: " + max_allowed_overlap );
273 parameters_sb.append( ", Max-overlap: not-set" );
275 if ( no_engulfing_overlaps ) {
276 parameters_sb.append( ", Engulfing-overlaps: not-allowed" );
279 parameters_sb.append( ", Engulfing-overlaps: allowed" );
282 parameters_sb.append( ", Ignore-dufs: true" );
285 parameters_sb.append( ", Ignore-dufs: false" );
287 parameters_sb.append( ", DC type (if applicable): " + dc_type );
288 return parameters_sb;
291 public static void createSplitWriters( final File out_dir,
292 final String my_outfile,
293 final Map<Character, Writer> split_writers ) throws IOException {
294 split_writers.put( 'a', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
295 + "_domains_A.html" ) ) );
296 split_writers.put( 'b', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
297 + "_domains_B.html" ) ) );
298 split_writers.put( 'c', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
299 + "_domains_C.html" ) ) );
300 split_writers.put( 'd', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
301 + "_domains_D.html" ) ) );
302 split_writers.put( 'e', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
303 + "_domains_E.html" ) ) );
304 split_writers.put( 'f', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
305 + "_domains_F.html" ) ) );
306 split_writers.put( 'g', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
307 + "_domains_G.html" ) ) );
308 split_writers.put( 'h', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
309 + "_domains_H.html" ) ) );
310 split_writers.put( 'i', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
311 + "_domains_I.html" ) ) );
312 split_writers.put( 'j', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
313 + "_domains_J.html" ) ) );
314 split_writers.put( 'k', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
315 + "_domains_K.html" ) ) );
316 split_writers.put( 'l', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
317 + "_domains_L.html" ) ) );
318 split_writers.put( 'm', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
319 + "_domains_M.html" ) ) );
320 split_writers.put( 'n', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
321 + "_domains_N.html" ) ) );
322 split_writers.put( 'o', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
323 + "_domains_O.html" ) ) );
324 split_writers.put( 'p', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
325 + "_domains_P.html" ) ) );
326 split_writers.put( 'q', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
327 + "_domains_Q.html" ) ) );
328 split_writers.put( 'r', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
329 + "_domains_R.html" ) ) );
330 split_writers.put( 's', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
331 + "_domains_S.html" ) ) );
332 split_writers.put( 't', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
333 + "_domains_T.html" ) ) );
334 split_writers.put( 'u', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
335 + "_domains_U.html" ) ) );
336 split_writers.put( 'v', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
337 + "_domains_V.html" ) ) );
338 split_writers.put( 'w', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
339 + "_domains_W.html" ) ) );
340 split_writers.put( 'x', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
341 + "_domains_X.html" ) ) );
342 split_writers.put( 'y', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
343 + "_domains_Y.html" ) ) );
344 split_writers.put( 'z', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
345 + "_domains_Z.html" ) ) );
346 split_writers.put( '0', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
347 + "_domains_0.html" ) ) );
350 public static Map<String, Integer> createTaxCodeToIdMap( final Phylogeny phy ) {
351 final Map<String, Integer> m = new HashMap<String, Integer>();
352 for( final PhylogenyNodeIterator iter = phy.iteratorExternalForward(); iter.hasNext(); ) {
353 final PhylogenyNode n = iter.next();
354 if ( n.getNodeData().isHasTaxonomy() ) {
355 final Taxonomy t = n.getNodeData().getTaxonomy();
356 final String c = t.getTaxonomyCode();
357 if ( !ForesterUtil.isEmpty( c ) ) {
358 if ( n.getNodeData().getTaxonomy() == null ) {
359 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
361 final String id = n.getNodeData().getTaxonomy().getIdentifier().getValue();
362 if ( ForesterUtil.isEmpty( id ) ) {
363 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
365 if ( m.containsKey( c ) ) {
366 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy code " + c + " is not unique" );
368 final int iid = Integer.valueOf( id );
369 if ( m.containsValue( iid ) ) {
370 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy id " + iid + " is not unique" );
376 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy for node " + n );
382 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
383 final Detailedness detailedness ) {
384 for( final DomainSimilarity domain_similarity : domain_similarities ) {
385 if ( domain_similarity instanceof PrintableDomainSimilarity ) {
386 final PrintableDomainSimilarity printable_domain_similarity = ( PrintableDomainSimilarity ) domain_similarity;
387 printable_domain_similarity.setDetailedness( detailedness );
392 public static void doit( final List<Protein> proteins,
393 final List<String> query_domain_ids_nc_order,
395 final String separator,
396 final String limit_to_species,
397 final Map<String, List<Integer>> average_protein_lengths_by_dc ) throws IOException {
398 for( final Protein protein : proteins ) {
399 if ( ForesterUtil.isEmpty( limit_to_species )
400 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
401 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
402 out.write( protein.getSpecies().getSpeciesId() );
403 out.write( separator );
404 out.write( protein.getProteinId().getId() );
405 out.write( separator );
407 final Set<String> visited_domain_ids = new HashSet<String>();
408 boolean first = true;
409 for( final Domain domain : protein.getProteinDomains() ) {
410 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
411 visited_domain_ids.add( domain.getDomainId() );
418 out.write( domain.getDomainId() );
420 out.write( "" + domain.getTotalCount() );
425 out.write( separator );
426 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
427 .equals( SurfacingConstants.NONE ) ) ) {
428 out.write( protein.getDescription() );
430 out.write( separator );
431 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
432 .equals( SurfacingConstants.NONE ) ) ) {
433 out.write( protein.getAccession() );
435 out.write( SurfacingConstants.NL );
442 public static void domainsPerProteinsStatistics( final String genome,
443 final List<Protein> protein_list,
444 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
445 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
446 final SortedSet<String> domains_which_are_always_single,
447 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
448 final SortedSet<String> domains_which_never_single,
449 final Writer writer ) {
450 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
451 for( final Protein protein : protein_list ) {
452 final int domains = protein.getNumberOfProteinDomains();
453 //System.out.println( domains );
454 stats.addValue( domains );
455 all_genomes_domains_per_potein_stats.addValue( domains );
456 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
457 all_genomes_domains_per_potein_histo.put( domains, 1 );
460 all_genomes_domains_per_potein_histo.put( domains,
461 1 + all_genomes_domains_per_potein_histo.get( domains ) );
463 if ( domains == 1 ) {
464 final String domain = protein.getProteinDomain( 0 ).getDomainId();
465 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
466 if ( domains_which_never_single.contains( domain ) ) {
467 domains_which_never_single.remove( domain );
468 domains_which_are_sometimes_single_sometimes_not.add( domain );
471 domains_which_are_always_single.add( domain );
475 else if ( domains > 1 ) {
476 for( final Domain d : protein.getProteinDomains() ) {
477 final String domain = d.getDomainId();
478 // System.out.println( domain );
479 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
480 if ( domains_which_are_always_single.contains( domain ) ) {
481 domains_which_are_always_single.remove( domain );
482 domains_which_are_sometimes_single_sometimes_not.add( domain );
485 domains_which_never_single.add( domain );
492 writer.write( genome );
493 writer.write( "\t" );
494 if ( stats.getN() >= 1 ) {
495 writer.write( stats.arithmeticMean() + "" );
496 writer.write( "\t" );
497 if ( stats.getN() >= 2 ) {
498 writer.write( stats.sampleStandardDeviation() + "" );
503 writer.write( "\t" );
504 writer.write( stats.median() + "" );
505 writer.write( "\t" );
506 writer.write( stats.getN() + "" );
507 writer.write( "\t" );
508 writer.write( stats.getMin() + "" );
509 writer.write( "\t" );
510 writer.write( stats.getMax() + "" );
513 writer.write( "\t" );
514 writer.write( "\t" );
515 writer.write( "\t" );
517 writer.write( "\t" );
518 writer.write( "\t" );
520 writer.write( "\n" );
522 catch ( final IOException e ) {
527 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
528 final int number_of_genomes,
529 final DomainLengthsTable domain_lengths_table,
530 final File outfile ) throws IOException {
531 final DecimalFormat df = new DecimalFormat( "#.00" );
532 checkForOutputFileWriteability( outfile );
533 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
534 out.write( "MEAN BASED STATISTICS PER SPECIES" );
535 out.write( ForesterUtil.LINE_SEPARATOR );
536 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
537 out.write( ForesterUtil.LINE_SEPARATOR );
538 out.write( ForesterUtil.LINE_SEPARATOR );
539 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
540 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
541 out.write( ForesterUtil.LINE_SEPARATOR );
542 for( final DomainLengths domain_lengths : domain_lengths_list ) {
543 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
544 if ( species_list.size() > 0 ) {
545 out.write( domain_lengths.getDomainId() + "\t" );
546 for( final Species species : species_list ) {
547 out.write( species + "\t" );
549 out.write( ForesterUtil.LINE_SEPARATOR );
552 out.write( ForesterUtil.LINE_SEPARATOR );
553 out.write( ForesterUtil.LINE_SEPARATOR );
554 out.write( "OUTLIER SPECIES (Z 1.0)" );
555 out.write( ForesterUtil.LINE_SEPARATOR );
556 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
557 .calculateMeanBasedStatisticsForAllSpecies();
558 out.write( stats_for_all_species.asSummary() );
559 out.write( ForesterUtil.LINE_SEPARATOR );
560 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
561 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
562 out.write( ForesterUtil.LINE_SEPARATOR );
563 final double population_sd = stats_for_all_species.sampleStandardDeviation();
564 final double population_mean = stats_for_all_species.arithmeticMean();
565 for( final Species species : domain_lengths_table.getSpecies() ) {
566 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
567 final double z = ( x - population_mean ) / population_sd;
568 out.write( species + "\t" + z );
569 out.write( ForesterUtil.LINE_SEPARATOR );
571 out.write( ForesterUtil.LINE_SEPARATOR );
572 for( final Species species : domain_lengths_table.getSpecies() ) {
573 final DescriptiveStatistics stats_for_species = domain_lengths_table
574 .calculateMeanBasedStatisticsForSpecies( species );
575 final double x = stats_for_species.arithmeticMean();
576 final double z = ( x - population_mean ) / population_sd;
577 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
578 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
579 out.write( ForesterUtil.LINE_SEPARATOR );
587 * Warning: This side-effects 'all_bin_domain_combinations_encountered'!
591 * @param all_bin_domain_combinations_changed
592 * @param sum_of_all_domains_encountered
593 * @param all_bin_domain_combinations_encountered
594 * @param is_gains_analysis
595 * @param protein_length_stats_by_dc
596 * @throws IOException
598 public static void executeFitchGainsAnalysis( final File output_file,
599 final List<BinaryDomainCombination> all_bin_domain_combinations_changed,
600 final int sum_of_all_domains_encountered,
601 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered,
602 final boolean is_gains_analysis ) throws IOException {
603 checkForOutputFileWriteability( output_file );
604 final Writer out = ForesterUtil.createBufferedWriter( output_file );
605 final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
606 .listToSortedCountsMap( all_bin_domain_combinations_changed );
607 final SortedSet<String> all_domains_in_combination_changed_more_than_once = new TreeSet<String>();
608 final SortedSet<String> all_domains_in_combination_changed_only_once = new TreeSet<String>();
611 for( final Object bdc_object : bdc_to_counts.keySet() ) {
612 final BinaryDomainCombination bdc = ( BinaryDomainCombination ) bdc_object;
613 final int count = bdc_to_counts.get( bdc_object );
615 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "count < 1 " );
617 out.write( bdc + "\t" + count + ForesterUtil.LINE_SEPARATOR );
619 all_domains_in_combination_changed_more_than_once.add( bdc.getId0() );
620 all_domains_in_combination_changed_more_than_once.add( bdc.getId1() );
623 else if ( count == 1 ) {
624 all_domains_in_combination_changed_only_once.add( bdc.getId0() );
625 all_domains_in_combination_changed_only_once.add( bdc.getId1() );
629 final int all = all_bin_domain_combinations_encountered.size();
631 if ( !is_gains_analysis ) {
632 all_bin_domain_combinations_encountered.removeAll( all_bin_domain_combinations_changed );
633 never_lost = all_bin_domain_combinations_encountered.size();
634 for( final BinaryDomainCombination bdc : all_bin_domain_combinations_encountered ) {
635 out.write( bdc + "\t" + "0" + ForesterUtil.LINE_SEPARATOR );
638 if ( is_gains_analysis ) {
639 out.write( "Sum of all distinct domain combinations appearing once : " + one
640 + ForesterUtil.LINE_SEPARATOR );
641 out.write( "Sum of all distinct domain combinations appearing more than once : " + above_one
642 + ForesterUtil.LINE_SEPARATOR );
643 out.write( "Sum of all distinct domains in combinations apppearing only once : "
644 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
645 out.write( "Sum of all distinct domains in combinations apppearing more than once: "
646 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
649 out.write( "Sum of all distinct domain combinations never lost : " + never_lost
650 + ForesterUtil.LINE_SEPARATOR );
651 out.write( "Sum of all distinct domain combinations lost once : " + one
652 + ForesterUtil.LINE_SEPARATOR );
653 out.write( "Sum of all distinct domain combinations lost more than once : " + above_one
654 + ForesterUtil.LINE_SEPARATOR );
655 out.write( "Sum of all distinct domains in combinations lost only once : "
656 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
657 out.write( "Sum of all distinct domains in combinations lost more than once: "
658 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
660 out.write( "All binary combinations : " + all
661 + ForesterUtil.LINE_SEPARATOR );
662 out.write( "All domains : "
663 + sum_of_all_domains_encountered );
665 ForesterUtil.programMessage( surfacing.PRG_NAME,
666 "Wrote fitch domain combination dynamics counts analysis to \"" + output_file
672 * @param all_binary_domains_combination_lost_fitch
673 * @param use_last_in_fitch_parsimony
674 * @param consider_directedness_and_adjacency_for_bin_combinations
675 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
676 * which were gained under unweighted (Fitch) parsimony.
678 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
679 final boolean radomize_fitch_parsimony,
680 final String outfile_name,
681 final DomainParsimonyCalculator domain_parsimony,
682 final Phylogeny phylogeny,
683 final Map<String, List<GoId>> domain_id_to_go_ids_map,
684 final Map<GoId, GoTerm> go_id_to_term_map,
685 final GoNameSpace go_namespace_limit,
686 final String parameters_str,
687 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
688 final SortedSet<String> positive_filter,
689 final boolean output_binary_domain_combinations_for_graphs,
690 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
691 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
692 final BinaryDomainCombination.DomainCombinationType dc_type,
693 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
694 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
695 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain,
696 final Map<String, Integer> tax_code_to_id_map,
697 final boolean write_to_nexus,
698 final boolean use_last_in_fitch_parsimony ) {
699 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
700 final String date_time = ForesterUtil.getCurrentDateTime();
701 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
702 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
703 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
704 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
705 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
706 if ( write_to_nexus ) {
707 writeToNexus( outfile_name, domain_parsimony, phylogeny );
711 Phylogeny local_phylogeny_l = phylogeny.copy();
712 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
713 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
716 domain_parsimony.executeDolloParsimonyOnDomainPresence();
718 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
719 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
720 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
721 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
722 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
723 CharacterStateMatrix.GainLossStates.GAIN,
724 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
726 ForesterUtil.LINE_SEPARATOR,
728 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
729 CharacterStateMatrix.GainLossStates.LOSS,
730 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
732 ForesterUtil.LINE_SEPARATOR,
734 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
735 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D, sep, ForesterUtil.LINE_SEPARATOR, null );
737 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
741 domain_parsimony.getGainLossMatrix(),
742 CharacterStateMatrix.GainLossStates.GAIN,
743 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
745 ForesterUtil.LINE_SEPARATOR,
746 "Dollo Parsimony | Gains | Domains",
748 domain_id_to_secondary_features_maps,
749 all_pfams_encountered,
750 all_pfams_gained_as_domains,
752 tax_code_to_id_map );
753 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
757 domain_parsimony.getGainLossMatrix(),
758 CharacterStateMatrix.GainLossStates.LOSS,
759 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
761 ForesterUtil.LINE_SEPARATOR,
762 "Dollo Parsimony | Losses | Domains",
764 domain_id_to_secondary_features_maps,
765 all_pfams_encountered,
766 all_pfams_lost_as_domains,
768 tax_code_to_id_map );
769 // writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
770 // go_id_to_term_map,
771 // go_namespace_limit,
773 // domain_parsimony.getGainLossMatrix(),
775 // outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
777 // ForesterUtil.LINE_SEPARATOR,
778 // "Dollo Parsimony | Present | Domains",
780 // domain_id_to_secondary_features_maps,
781 // all_pfams_encountered,
783 // "_dollo_present_d",
784 // tax_code_to_id_map );
785 preparePhylogeny( local_phylogeny_l,
788 "Dollo parsimony on domain presence/absence",
789 "dollo_on_domains_" + outfile_name,
791 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
792 + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
794 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
795 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
797 catch ( final IOException e ) {
799 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
801 if ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) {
802 // FITCH DOMAIN COMBINATIONS
803 // -------------------------
804 local_phylogeny_l = phylogeny.copy();
805 String randomization = "no";
806 if ( radomize_fitch_parsimony ) {
807 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
808 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
811 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( use_last_in_fitch_parsimony );
813 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
814 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
815 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
816 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
818 .writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
819 CharacterStateMatrix.GainLossStates.GAIN,
820 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
822 ForesterUtil.LINE_SEPARATOR,
824 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
825 CharacterStateMatrix.GainLossStates.LOSS,
827 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
829 ForesterUtil.LINE_SEPARATOR,
831 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
832 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC, sep, ForesterUtil.LINE_SEPARATOR, null );
833 if ( all_binary_domains_combination_gained_fitch != null ) {
834 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
836 all_binary_domains_combination_gained_fitch,
839 if ( all_binary_domains_combination_lost_fitch != null ) {
840 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
842 all_binary_domains_combination_lost_fitch,
845 if ( output_binary_domain_combinations_for_graphs ) {
847 .writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
848 .getGainLossMatrix(),
851 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
853 ForesterUtil.LINE_SEPARATOR,
854 BinaryDomainCombination.OutputFormat.DOT );
857 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
861 domain_parsimony.getGainLossMatrix(),
862 CharacterStateMatrix.GainLossStates.GAIN,
863 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
865 ForesterUtil.LINE_SEPARATOR,
866 "Fitch Parsimony | Gains | Domain Combinations",
869 all_pfams_encountered,
870 all_pfams_gained_as_dom_combinations,
872 tax_code_to_id_map );
873 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
877 domain_parsimony.getGainLossMatrix(),
878 CharacterStateMatrix.GainLossStates.LOSS,
879 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
881 ForesterUtil.LINE_SEPARATOR,
882 "Fitch Parsimony | Losses | Domain Combinations",
885 all_pfams_encountered,
886 all_pfams_lost_as_dom_combinations,
888 tax_code_to_id_map );
889 // writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
890 // go_id_to_term_map,
891 // go_namespace_limit,
893 // domain_parsimony.getGainLossMatrix(),
895 // outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
897 // ForesterUtil.LINE_SEPARATOR,
898 // "Fitch Parsimony | Present | Domain Combinations",
901 // all_pfams_encountered,
903 // "_fitch_present_dc",
904 // tax_code_to_id_map );
905 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
908 all_pfams_encountered );
909 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX, all_pfams_gained_as_domains );
910 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
911 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
912 all_pfams_gained_as_dom_combinations );
913 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX, all_pfams_lost_as_dom_combinations );
914 preparePhylogeny( local_phylogeny_l,
917 "Fitch parsimony on binary domain combination presence/absence randomization: "
919 "fitch_on_binary_domain_combinations_" + outfile_name,
921 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
922 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
923 calculateIndependentDomainCombinationGains( local_phylogeny_l,
925 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
927 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
929 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
931 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
932 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
933 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
934 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
935 protein_length_stats_by_dc,
936 domain_number_stats_by_dc,
937 domain_length_stats_by_domain );
941 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
942 final DomainParsimonyCalculator secondary_features_parsimony,
943 final Phylogeny phylogeny,
944 final String parameters_str,
945 final Map<Species, MappingResults> mapping_results_map,
946 final boolean use_last_in_fitch_parsimony ) {
947 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
948 final String date_time = ForesterUtil.getCurrentDateTime();
949 System.out.println();
950 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
951 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
953 Phylogeny local_phylogeny_copy = phylogeny.copy();
954 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
955 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(), outfile_name
956 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
957 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(), outfile_name
958 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
960 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
961 CharacterStateMatrix.GainLossStates.GAIN,
963 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
965 ForesterUtil.LINE_SEPARATOR,
968 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
969 CharacterStateMatrix.GainLossStates.LOSS,
971 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
973 ForesterUtil.LINE_SEPARATOR,
976 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
979 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
981 ForesterUtil.LINE_SEPARATOR,
983 preparePhylogeny( local_phylogeny_copy,
984 secondary_features_parsimony,
986 "Dollo parsimony on secondary feature presence/absence",
987 "dollo_on_secondary_features_" + outfile_name,
989 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
990 + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
991 // FITCH DOMAIN COMBINATIONS
992 // -------------------------
993 local_phylogeny_copy = phylogeny.copy();
994 final String randomization = "no";
995 secondary_features_parsimony
996 .executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( use_last_in_fitch_parsimony );
997 preparePhylogeny( local_phylogeny_copy,
998 secondary_features_parsimony,
1000 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
1002 "fitch_on_binary_domain_combinations_" + outfile_name,
1004 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1005 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
1006 calculateIndependentDomainCombinationGains( local_phylogeny_copy, outfile_name
1007 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX, outfile_name
1008 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX, outfile_name
1009 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX, outfile_name
1010 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX, outfile_name
1011 + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt", outfile_name
1012 + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt", null, null, null, null );
1015 public static void executePlusMinusAnalysis( final File output_file,
1016 final List<String> plus_minus_analysis_high_copy_base,
1017 final List<String> plus_minus_analysis_high_copy_target,
1018 final List<String> plus_minus_analysis_low_copy,
1019 final List<GenomeWideCombinableDomains> gwcd_list,
1020 final SortedMap<Species, List<Protein>> protein_lists_per_species,
1021 final Map<String, List<GoId>> domain_id_to_go_ids_map,
1022 final Map<GoId, GoTerm> go_id_to_term_map,
1023 final List<Object> plus_minus_analysis_numbers ) {
1024 final Set<String> all_spec = new HashSet<String>();
1025 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1026 all_spec.add( gwcd.getSpecies().getSpeciesId() );
1028 final File html_out_dom = new File( output_file + surfacing.PLUS_MINUS_DOM_SUFFIX_HTML );
1029 final File plain_out_dom = new File( output_file + surfacing.PLUS_MINUS_DOM_SUFFIX );
1030 final File html_out_dc = new File( output_file + surfacing.PLUS_MINUS_DC_SUFFIX_HTML );
1031 final File all_domains_go_ids_out_dom = new File( output_file + surfacing.PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX );
1032 final File passing_domains_go_ids_out_dom = new File( output_file
1033 + surfacing.PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX );
1034 final File proteins_file_base = new File( output_file + "" );
1035 final int min_diff = ( ( Integer ) plus_minus_analysis_numbers.get( 0 ) ).intValue();
1036 final double factor = ( ( Double ) plus_minus_analysis_numbers.get( 1 ) ).doubleValue();
1038 DomainCountsDifferenceUtil.calculateCopyNumberDifferences( gwcd_list,
1039 protein_lists_per_species,
1040 plus_minus_analysis_high_copy_base,
1041 plus_minus_analysis_high_copy_target,
1042 plus_minus_analysis_low_copy,
1048 domain_id_to_go_ids_map,
1050 all_domains_go_ids_out_dom,
1051 passing_domains_go_ids_out_dom,
1052 proteins_file_base );
1054 catch ( final IOException e ) {
1055 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1057 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
1058 + html_out_dom + "\"" );
1059 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
1060 + plain_out_dom + "\"" );
1061 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \"" + html_out_dc
1063 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based passing GO ids to \""
1064 + passing_domains_go_ids_out_dom + "\"" );
1065 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based all GO ids to \""
1066 + all_domains_go_ids_out_dom + "\"" );
1069 public static void extractProteinNames( final List<Protein> proteins,
1070 final List<String> query_domain_ids_nc_order,
1072 final String separator,
1073 final String limit_to_species ) throws IOException {
1074 for( final Protein protein : proteins ) {
1075 if ( ForesterUtil.isEmpty( limit_to_species )
1076 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1077 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1078 out.write( protein.getSpecies().getSpeciesId() );
1079 out.write( separator );
1080 out.write( protein.getProteinId().getId() );
1081 out.write( separator );
1083 final Set<String> visited_domain_ids = new HashSet<String>();
1084 boolean first = true;
1085 for( final Domain domain : protein.getProteinDomains() ) {
1086 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1087 visited_domain_ids.add( domain.getDomainId() );
1094 out.write( domain.getDomainId() );
1096 out.write( "" + domain.getTotalCount() );
1101 out.write( separator );
1102 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1103 .equals( SurfacingConstants.NONE ) ) ) {
1104 out.write( protein.getDescription() );
1106 out.write( separator );
1107 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1108 .equals( SurfacingConstants.NONE ) ) ) {
1109 out.write( protein.getAccession() );
1111 out.write( SurfacingConstants.NL );
1118 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
1119 final String domain_id,
1121 final String separator,
1122 final String limit_to_species,
1123 final double domain_e_cutoff ) throws IOException {
1124 //System.out.println( "Per domain E-value: " + domain_e_cutoff );
1125 for( final Species species : protein_lists_per_species.keySet() ) {
1126 //System.out.println( species + ":" );
1127 for( final Protein protein : protein_lists_per_species.get( species ) ) {
1128 if ( ForesterUtil.isEmpty( limit_to_species )
1129 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1130 final List<Domain> domains = protein.getProteinDomains( domain_id );
1131 if ( domains.size() > 0 ) {
1132 out.write( protein.getSpecies().getSpeciesId() );
1133 out.write( separator );
1134 out.write( protein.getProteinId().getId() );
1135 out.write( separator );
1136 out.write( domain_id.toString() );
1137 out.write( separator );
1139 for( final Domain domain : domains ) {
1140 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1142 out.write( domain.getFrom() + "-" + domain.getTo() );
1143 if ( prev_to >= 0 ) {
1144 final int l = domain.getFrom() - prev_to;
1145 // System.out.println( l );
1147 prev_to = domain.getTo();
1151 out.write( separator );
1152 final List<Domain> domain_list = new ArrayList<Domain>();
1153 for( final Domain domain : protein.getProteinDomains() ) {
1154 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1155 domain_list.add( domain );
1158 final Domain domain_ary[] = new Domain[ domain_list.size() ];
1159 for( int i = 0; i < domain_list.size(); ++i ) {
1160 domain_ary[ i ] = domain_list.get( i );
1162 Arrays.sort( domain_ary, new DomainComparator( true ) );
1164 boolean first = true;
1165 for( final Domain domain : domain_ary ) {
1172 out.write( domain.getDomainId().toString() );
1173 out.write( ":" + domain.getFrom() + "-" + domain.getTo() );
1174 out.write( ":" + domain.getPerDomainEvalue() );
1177 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1178 .equals( SurfacingConstants.NONE ) ) ) {
1179 out.write( protein.getDescription() );
1181 out.write( separator );
1182 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1183 .equals( SurfacingConstants.NONE ) ) ) {
1184 out.write( protein.getAccession() );
1186 out.write( SurfacingConstants.NL );
1194 public static SortedSet<String> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
1195 final SortedSet<String> all_domains_ids = new TreeSet<String>();
1196 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1197 final Set<String> all_domains = gwcd.getAllDomainIds();
1198 // for( final Domain domain : all_domains ) {
1199 all_domains_ids.addAll( all_domains );
1202 return all_domains_ids;
1205 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
1206 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
1207 for( final Protein protein_domain_collection : protein_domain_collections ) {
1208 for( final Object name : protein_domain_collection.getProteinDomains() ) {
1209 final BasicDomain protein_domain = ( BasicDomain ) name;
1210 final String id = protein_domain.getDomainId();
1211 if ( map.containsKey( id ) ) {
1212 map.put( id, map.get( id ) + 1 );
1222 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
1223 final PhylogenyNodeIterator it = p.iteratorPostorder();
1225 while ( it.hasNext() ) {
1226 final PhylogenyNode n = it.next();
1227 if ( ForesterUtil.isEmpty( n.getName() )
1228 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1229 .getScientificName() ) )
1230 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1231 .getCommonName() ) ) ) {
1232 if ( n.getParent() != null ) {
1233 names.append( " " );
1234 names.append( n.getParent().getName() );
1236 final List l = n.getAllExternalDescendants();
1237 for( final Object object : l ) {
1238 System.out.println( l.toString() );
1246 public static void log( final String msg, final Writer w ) {
1249 w.write( ForesterUtil.LINE_SEPARATOR );
1251 catch ( final IOException e ) {
1252 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1256 public static Phylogeny[] obtainAndPreProcessIntrees( final File[] intree_files,
1257 final int number_of_genomes,
1258 final String[][] input_file_properties ) {
1259 final Phylogeny[] intrees = new Phylogeny[ intree_files.length ];
1261 for( final File intree_file : intree_files ) {
1262 Phylogeny intree = null;
1263 final String error = ForesterUtil.isReadableFile( intree_file );
1264 if ( !ForesterUtil.isEmpty( error ) ) {
1265 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: "
1269 final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
1270 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
1271 if ( p_array.length < 1 ) {
1272 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1273 + "] does not contain any phylogeny in phyloXML format" );
1275 else if ( p_array.length > 1 ) {
1276 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1277 + "] contains more than one phylogeny in phyloXML format" );
1279 intree = p_array[ 0 ];
1281 catch ( final Exception e ) {
1282 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file
1285 if ( ( intree == null ) || intree.isEmpty() ) {
1286 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
1288 if ( !intree.isRooted() ) {
1289 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
1291 if ( intree.getNumberOfExternalNodes() < number_of_genomes ) {
1292 ForesterUtil.fatalError( surfacing.PRG_NAME,
1293 "number of external nodes [" + intree.getNumberOfExternalNodes()
1294 + "] of input tree [" + intree_file
1295 + "] is smaller than the number of genomes the be analyzed ["
1296 + number_of_genomes + "]" );
1298 final StringBuilder parent_names = new StringBuilder();
1299 final int nodes_lacking_name = getNumberOfNodesLackingName( intree, parent_names );
1300 if ( nodes_lacking_name > 0 ) {
1301 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] has "
1302 + nodes_lacking_name + " node(s) lacking a name [parent names:" + parent_names + "]" );
1304 preparePhylogenyForParsimonyAnalyses( intree, input_file_properties );
1305 if ( !intree.isCompletelyBinary() ) {
1306 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "input tree [" + intree_file
1307 + "] is not completely binary" );
1309 intrees[ i++ ] = intree;
1314 public static Phylogeny obtainFirstIntree( final File intree_file ) {
1315 Phylogeny intree = null;
1316 final String error = ForesterUtil.isReadableFile( intree_file );
1317 if ( !ForesterUtil.isEmpty( error ) ) {
1318 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: " + error );
1321 final Phylogeny[] phys = ParserBasedPhylogenyFactory.getInstance()
1322 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
1323 if ( phys.length < 1 ) {
1324 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1325 + "] does not contain any phylogeny in phyloXML format" );
1327 else if ( phys.length > 1 ) {
1328 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1329 + "] contains more than one phylogeny in phyloXML format" );
1333 catch ( final Exception e ) {
1334 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file + "]: "
1337 if ( ( intree == null ) || intree.isEmpty() ) {
1338 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
1340 if ( !intree.isRooted() ) {
1341 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
1346 public static String obtainHexColorStringDependingOnTaxonomyGroup( final String tax_code, final Phylogeny phy )
1347 throws IllegalArgumentException {
1348 if ( !_TAXCODE_HEXCOLORSTRING_MAP.containsKey( tax_code ) ) {
1349 if ( ( phy != null ) && !phy.isEmpty() ) {
1350 // final List<PhylogenyNode> nodes = phy.getNodesViaTaxonomyCode( tax_code );
1352 // if ( ( nodes == null ) || nodes.isEmpty() ) {
1353 // throw new IllegalArgumentException( "code " + tax_code + " is not found" );
1355 // if ( nodes.size() != 1 ) {
1356 // throw new IllegalArgumentException( "code " + tax_code + " is not unique" );
1358 // PhylogenyNode n = nodes.get( 0 );
1359 // while ( n != null ) {
1360 // if ( n.getNodeData().isHasTaxonomy()
1361 // && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1362 // c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( n.getNodeData().getTaxonomy()
1363 // .getScientificName(), tax_code );
1365 // if ( ( c == null ) && !ForesterUtil.isEmpty( n.getName() ) ) {
1366 // c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( n.getName(), tax_code );
1368 // if ( c != null ) {
1371 // n = n.getParent();
1373 final String group = obtainTaxonomyGroup( tax_code, phy );
1374 Color c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( group );
1376 throw new IllegalArgumentException( "no color found for taxonomy code \"" + tax_code + "\"" );
1378 final String hex = String.format( "#%02x%02x%02x", c.getRed(), c.getGreen(), c.getBlue() );
1379 _TAXCODE_HEXCOLORSTRING_MAP.put( tax_code, hex );
1382 throw new IllegalArgumentException( "unable to obtain color for code " + tax_code
1383 + " (tree is null or empty and code is not in map)" );
1386 return _TAXCODE_HEXCOLORSTRING_MAP.get( tax_code );
1390 public static String obtainTaxonomyGroup( final String tax_code, final Phylogeny species_tree )
1391 throws IllegalArgumentException {
1392 if ( !_TAXCODE_TAXGROUP_MAP.containsKey( tax_code ) ) {
1393 if ( ( species_tree != null ) && !species_tree.isEmpty() ) {
1394 final List<PhylogenyNode> nodes = species_tree.getNodesViaTaxonomyCode( tax_code );
1396 if ( ( nodes == null ) || nodes.isEmpty() ) {
1397 throw new IllegalArgumentException( "code " + tax_code + " is not found" );
1399 if ( nodes.size() != 1 ) {
1400 throw new IllegalArgumentException( "code " + tax_code + " is not unique" );
1402 PhylogenyNode n = nodes.get( 0 );
1403 String group = null;
1405 while ( n != null ) {
1406 if ( n.getNodeData().isHasTaxonomy()
1407 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1408 group = ForesterUtil.obtainNormalizedTaxonomyGroup( n.getNodeData().getTaxonomy()
1409 .getScientificName() );
1412 if ( ForesterUtil.isEmpty( group ) && !ForesterUtil.isEmpty( n.getName() ) ) {
1413 group = ForesterUtil.obtainNormalizedTaxonomyGroup( n.getName() );
1416 if ( !ForesterUtil.isEmpty( group ) ) {
1422 if ( ForesterUtil.isEmpty( group ) ) {
1423 throw new IllegalArgumentException( "no group found for taxonomy code \"" + tax_code + "\"" );
1426 _TAXCODE_TAXGROUP_MAP.put( tax_code, group );
1429 throw new IllegalArgumentException( "unable to obtain group for code " + tax_code
1430 + " (tree is null or empty and code is not in map)" );
1433 return _TAXCODE_TAXGROUP_MAP.get( tax_code );
1439 public static void performDomainArchitectureAnalysis( final SortedMap<String, Set<String>> domain_architecutures,
1440 final SortedMap<String, Integer> domain_architecuture_counts,
1441 final int min_count,
1442 final File da_counts_outfile,
1443 final File unique_da_outfile ) {
1444 checkForOutputFileWriteability( da_counts_outfile );
1445 checkForOutputFileWriteability( unique_da_outfile );
1447 final BufferedWriter da_counts_out = new BufferedWriter( new FileWriter( da_counts_outfile ) );
1448 final BufferedWriter unique_da_out = new BufferedWriter( new FileWriter( unique_da_outfile ) );
1449 final Iterator<Entry<String, Integer>> it = domain_architecuture_counts.entrySet().iterator();
1450 while ( it.hasNext() ) {
1451 final Map.Entry<String, Integer> e = it.next();
1452 final String da = e.getKey();
1453 final int count = e.getValue();
1454 if ( count >= min_count ) {
1455 da_counts_out.write( da );
1456 da_counts_out.write( "\t" );
1457 da_counts_out.write( String.valueOf( count ) );
1458 da_counts_out.write( ForesterUtil.LINE_SEPARATOR );
1461 final Iterator<Entry<String, Set<String>>> it2 = domain_architecutures.entrySet().iterator();
1462 while ( it2.hasNext() ) {
1463 final Map.Entry<String, Set<String>> e2 = it2.next();
1464 final String genome = e2.getKey();
1465 final Set<String> das = e2.getValue();
1466 if ( das.contains( da ) ) {
1467 unique_da_out.write( genome );
1468 unique_da_out.write( "\t" );
1469 unique_da_out.write( da );
1470 unique_da_out.write( ForesterUtil.LINE_SEPARATOR );
1475 unique_da_out.close();
1476 da_counts_out.close();
1478 catch ( final IOException e ) {
1479 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1481 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + da_counts_outfile + "\"" );
1482 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + unique_da_outfile + "\"" );
1486 public static void preparePhylogeny( final Phylogeny p,
1487 final DomainParsimonyCalculator domain_parsimony,
1488 final String date_time,
1489 final String method,
1491 final String parameters_str ) {
1492 domain_parsimony.decoratePhylogenyWithDomains( p );
1493 final StringBuilder desc = new StringBuilder();
1494 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1495 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1496 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1497 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1498 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1499 desc.append( "[Parameters: " + parameters_str + "]" );
1501 p.setDescription( desc.toString() );
1502 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1503 p.setRerootable( false );
1504 p.setRooted( true );
1507 public static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
1508 final String[][] input_file_properties ) {
1509 final String[] genomes = new String[ input_file_properties.length ];
1510 for( int i = 0; i < input_file_properties.length; ++i ) {
1511 if ( intree.getNodes( input_file_properties[ i ][ 1 ] ).size() > 1 ) {
1512 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
1513 + "] is not unique in input tree " + intree.getName() );
1515 genomes[ i ] = input_file_properties[ i ][ 1 ];
1518 final PhylogenyNodeIterator it = intree.iteratorPostorder();
1519 while ( it.hasNext() ) {
1520 final PhylogenyNode n = it.next();
1521 if ( ForesterUtil.isEmpty( n.getName() ) ) {
1522 if ( n.getNodeData().isHasTaxonomy()
1523 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
1524 n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
1526 else if ( n.getNodeData().isHasTaxonomy()
1527 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1528 n.setName( n.getNodeData().getTaxonomy().getScientificName() );
1530 else if ( n.getNodeData().isHasTaxonomy()
1531 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
1532 n.setName( n.getNodeData().getTaxonomy().getCommonName() );
1536 .fatalError( surfacing.PRG_NAME,
1537 "node with no name, scientific name, common name, or taxonomy code present" );
1542 final List<String> igns = PhylogenyMethods.deleteExternalNodesPositiveSelection( genomes, intree );
1543 if ( igns.size() > 0 ) {
1544 System.out.println( "Not using the following " + igns.size() + " nodes:" );
1545 for( int i = 0; i < igns.size(); ++i ) {
1546 System.out.println( " " + i + ": " + igns.get( i ) );
1548 System.out.println( "--" );
1550 for( final String[] input_file_propertie : input_file_properties ) {
1552 intree.getNode( input_file_propertie[ 1 ] );
1554 catch ( final IllegalArgumentException e ) {
1555 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
1556 + "] not present/not unique in input tree" );
1561 public static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
1562 final Writer log_writer ) {
1564 for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
1565 sum += entry.getValue();
1567 final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
1568 ForesterUtil.programMessage( surfacing.PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
1569 log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
1572 public static void processFilter( final File filter_file, final SortedSet<String> filter ) {
1573 SortedSet<String> filter_str = null;
1575 filter_str = ForesterUtil.file2set( filter_file );
1577 catch ( final IOException e ) {
1578 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1580 if ( filter_str != null ) {
1581 for( final String string : filter_str ) {
1582 filter.add( string );
1585 if ( surfacing.VERBOSE ) {
1586 System.out.println( "Filter:" );
1587 for( final String domainId : filter ) {
1588 System.out.println( domainId );
1593 public static String[][] processInputGenomesFile( final File input_genomes ) {
1594 String[][] input_file_properties = null;
1596 input_file_properties = ForesterUtil.file22dArray( input_genomes );
1598 catch ( final IOException e ) {
1599 ForesterUtil.fatalError( surfacing.PRG_NAME,
1600 "genomes files is to be in the following format \"<hmmpfam output file> <species>\": "
1601 + e.getLocalizedMessage() );
1603 final Set<String> specs = new HashSet<String>();
1604 final Set<String> paths = new HashSet<String>();
1605 for( int i = 0; i < input_file_properties.length; ++i ) {
1606 if ( !PhyloXmlUtil.TAXOMONY_CODE_PATTERN.matcher( input_file_properties[ i ][ 1 ] ).matches() ) {
1607 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for species code: "
1608 + input_file_properties[ i ][ 1 ] );
1610 if ( specs.contains( input_file_properties[ i ][ 1 ] ) ) {
1611 ForesterUtil.fatalError( surfacing.PRG_NAME, "species code " + input_file_properties[ i ][ 1 ]
1612 + " is not unique" );
1614 specs.add( input_file_properties[ i ][ 1 ] );
1615 if ( paths.contains( input_file_properties[ i ][ 0 ] ) ) {
1616 ForesterUtil.fatalError( surfacing.PRG_NAME, "path " + input_file_properties[ i ][ 0 ]
1617 + " is not unique" );
1619 paths.add( input_file_properties[ i ][ 0 ] );
1620 final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
1621 if ( !ForesterUtil.isEmpty( error ) ) {
1622 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
1625 return input_file_properties;
1628 public static void processPlusMinusAnalysisOption( final CommandLineArguments cla,
1629 final List<String> high_copy_base,
1630 final List<String> high_copy_target,
1631 final List<String> low_copy,
1632 final List<Object> numbers ) {
1633 if ( cla.isOptionSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
1634 if ( !cla.isOptionValueSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
1635 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for 'plus-minus' file: -"
1636 + surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>" );
1638 final File plus_minus_file = new File( cla.getOptionValue( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) );
1639 final String msg = ForesterUtil.isReadableFile( plus_minus_file );
1640 if ( !ForesterUtil.isEmpty( msg ) ) {
1641 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + plus_minus_file + "\": " + msg );
1643 processPlusMinusFile( plus_minus_file, high_copy_base, high_copy_target, low_copy, numbers );
1647 // First numbers is minimal difference, second is factor.
1648 public static void processPlusMinusFile( final File plus_minus_file,
1649 final List<String> high_copy_base,
1650 final List<String> high_copy_target,
1651 final List<String> low_copy,
1652 final List<Object> numbers ) {
1653 Set<String> species_set = null;
1654 int min_diff = surfacing.PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT;
1655 double factor = surfacing.PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT;
1657 species_set = ForesterUtil.file2set( plus_minus_file );
1659 catch ( final IOException e ) {
1660 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1662 if ( species_set != null ) {
1663 for( final String species : species_set ) {
1664 final String species_trimmed = species.substring( 1 );
1665 if ( species.startsWith( "+" ) ) {
1666 if ( low_copy.contains( species_trimmed ) ) {
1667 ForesterUtil.fatalError( surfacing.PRG_NAME,
1668 "species/genome names can not appear with both '+' and '-' suffix, as appears the case for: \""
1669 + species_trimmed + "\"" );
1671 high_copy_base.add( species_trimmed );
1673 else if ( species.startsWith( "*" ) ) {
1674 if ( low_copy.contains( species_trimmed ) ) {
1675 ForesterUtil.fatalError( surfacing.PRG_NAME,
1676 "species/genome names can not appear with both '*' and '-' suffix, as appears the case for: \""
1677 + species_trimmed + "\"" );
1679 high_copy_target.add( species_trimmed );
1681 else if ( species.startsWith( "-" ) ) {
1682 if ( high_copy_base.contains( species_trimmed ) || high_copy_target.contains( species_trimmed ) ) {
1683 ForesterUtil.fatalError( surfacing.PRG_NAME,
1684 "species/genome names can not appear with both '+' or '*' and '-' suffix, as appears the case for: \""
1685 + species_trimmed + "\"" );
1687 low_copy.add( species_trimmed );
1689 else if ( species.startsWith( "$D" ) ) {
1691 min_diff = Integer.parseInt( species.substring( 3 ) );
1693 catch ( final NumberFormatException e ) {
1694 ForesterUtil.fatalError( surfacing.PRG_NAME,
1695 "could not parse integer value for minimal difference from: \""
1696 + species.substring( 3 ) + "\"" );
1699 else if ( species.startsWith( "$F" ) ) {
1701 factor = Double.parseDouble( species.substring( 3 ) );
1703 catch ( final NumberFormatException e ) {
1704 ForesterUtil.fatalError( surfacing.PRG_NAME, "could not parse double value for factor from: \""
1705 + species.substring( 3 ) + "\"" );
1708 else if ( species.startsWith( "#" ) ) {
1713 .fatalError( surfacing.PRG_NAME,
1714 "species/genome names in 'plus minus' file must begin with '*' (high copy target genome), '+' (high copy base genomes), '-' (low copy genomes), '$D=<integer>' minimal Difference (default is 1), '$F=<double>' factor (default is 1.0), double), or '#' (ignore) suffix, encountered: \""
1717 numbers.add( new Integer( min_diff + "" ) );
1718 numbers.add( new Double( factor + "" ) );
1722 ForesterUtil.fatalError( surfacing.PRG_NAME, "'plus minus' file [" + plus_minus_file + "] appears empty" );
1727 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1731 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1732 final String protein_id,
1733 final String separator ) {
1734 final StringBuffer sb = new StringBuffer();
1735 if ( protein.getSpecies() == null ) {
1736 throw new IllegalArgumentException( "species must not be null" );
1738 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1739 throw new IllegalArgumentException( "species id must not be empty" );
1741 final List<Domain> domains = protein.getProteinDomains();
1742 if ( domains.size() > 1 ) {
1743 final Map<String, Integer> counts = new HashMap<String, Integer>();
1744 for( final Domain domain : domains ) {
1745 final String id = domain.getDomainId();
1746 if ( counts.containsKey( id ) ) {
1747 counts.put( id, counts.get( id ) + 1 );
1750 counts.put( id, 1 );
1753 final Set<String> dcs = new HashSet<String>();
1754 for( int i = 1; i < domains.size(); ++i ) {
1755 for( int j = 0; j < i; ++j ) {
1756 Domain domain_n = domains.get( i );
1757 Domain domain_c = domains.get( j );
1758 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1759 domain_n = domains.get( j );
1760 domain_c = domains.get( i );
1762 final String dc = domain_n.getDomainId() + domain_c.getDomainId();
1763 if ( !dcs.contains( dc ) ) {
1765 sb.append( protein.getSpecies() );
1766 sb.append( separator );
1767 sb.append( protein_id );
1768 sb.append( separator );
1769 sb.append( domain_n.getDomainId() );
1770 sb.append( separator );
1771 sb.append( domain_c.getDomainId() );
1772 sb.append( separator );
1773 sb.append( domain_n.getPerDomainEvalue() );
1774 sb.append( separator );
1775 sb.append( domain_c.getPerDomainEvalue() );
1776 sb.append( separator );
1777 sb.append( counts.get( domain_n.getDomainId() ) );
1778 sb.append( separator );
1779 sb.append( counts.get( domain_c.getDomainId() ) );
1780 sb.append( ForesterUtil.LINE_SEPARATOR );
1785 else if ( domains.size() == 1 ) {
1786 sb.append( protein.getSpecies() );
1787 sb.append( separator );
1788 sb.append( protein_id );
1789 sb.append( separator );
1790 sb.append( domains.get( 0 ).getDomainId() );
1791 sb.append( separator );
1792 sb.append( separator );
1793 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1794 sb.append( separator );
1795 sb.append( separator );
1797 sb.append( separator );
1798 sb.append( ForesterUtil.LINE_SEPARATOR );
1801 sb.append( protein.getSpecies() );
1802 sb.append( separator );
1803 sb.append( protein_id );
1804 sb.append( separator );
1805 sb.append( separator );
1806 sb.append( separator );
1807 sb.append( separator );
1808 sb.append( separator );
1809 sb.append( separator );
1810 sb.append( ForesterUtil.LINE_SEPARATOR );
1815 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1816 final List<Domain> domains = new ArrayList<Domain>();
1817 for( final Domain d : protein.getProteinDomains() ) {
1820 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1824 public static int storeDomainArchitectures( final String genome,
1825 final SortedMap<String, Set<String>> domain_architecutures,
1826 final List<Protein> protein_list,
1827 final Map<String, Integer> distinct_domain_architecuture_counts ) {
1828 final Set<String> da = new HashSet<String>();
1829 domain_architecutures.put( genome, da );
1830 for( final Protein protein : protein_list ) {
1831 final String da_str = ( ( BasicProtein ) protein ).toDomainArchitectureString( "~", 3, "=" );
1832 if ( !da.contains( da_str ) ) {
1833 if ( !distinct_domain_architecuture_counts.containsKey( da_str ) ) {
1834 distinct_domain_architecuture_counts.put( da_str, 1 );
1837 distinct_domain_architecuture_counts.put( da_str,
1838 distinct_domain_architecuture_counts.get( da_str ) + 1 );
1846 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1847 final boolean get_gains,
1848 final String outdir,
1849 final String suffix_for_filename ) throws IOException {
1850 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1852 state = CharacterStateMatrix.GainLossStates.LOSS;
1854 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1858 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1859 final PhylogenyNode node = it.next();
1860 if ( !node.isExternal() ) {
1861 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1862 if ( domains.size() > 0 ) {
1863 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1864 + node.getName() + suffix_for_filename );
1865 for( final String domain : domains ) {
1866 writer.write( domain );
1867 writer.write( ForesterUtil.LINE_SEPARATOR );
1875 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1876 final File output_dir,
1877 final GenomeWideCombinableDomains gwcd,
1879 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1880 File dc_outfile_dot = new File( input_file_properties[ i ][ 1 ]
1881 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1882 if ( output_dir != null ) {
1883 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1885 checkForOutputFileWriteability( dc_outfile_dot );
1886 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1888 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1889 for( final BinaryDomainCombination bdc : binary_combinations ) {
1890 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1892 out_dot.write( SurfacingConstants.NL );
1896 catch ( final IOException e ) {
1897 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1899 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote binary domain combination for \""
1900 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1901 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile_dot + "\"" );
1904 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1905 final CharacterStateMatrix.GainLossStates state,
1906 final String filename,
1907 final String indentifier_characters_separator,
1908 final String character_separator,
1909 final Map<String, String> descriptions ) {
1910 final File outfile = new File( filename );
1911 checkForOutputFileWriteability( outfile );
1912 final SortedSet<String> sorted_ids = new TreeSet<String>();
1913 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1914 sorted_ids.add( matrix.getIdentifier( i ) );
1917 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1918 for( final String id : sorted_ids ) {
1919 out.write( indentifier_characters_separator );
1920 out.write( "#" + id );
1921 out.write( indentifier_characters_separator );
1922 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1924 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1925 if ( ( matrix.getState( id, c ) == state )
1926 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1927 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1928 out.write( matrix.getCharacter( c ) );
1929 if ( ( descriptions != null ) && !descriptions.isEmpty()
1930 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
1932 out.write( descriptions.get( matrix.getCharacter( c ) ) );
1934 out.write( character_separator );
1941 catch ( final IOException e ) {
1942 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1944 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1947 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1948 final CharacterStateMatrix.GainLossStates state,
1949 final String filename,
1950 final String indentifier_characters_separator,
1951 final String character_separator,
1952 final BinaryDomainCombination.OutputFormat bc_output_format ) {
1953 final File outfile = new File( filename );
1954 checkForOutputFileWriteability( outfile );
1955 final SortedSet<String> sorted_ids = new TreeSet<String>();
1956 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1957 sorted_ids.add( matrix.getIdentifier( i ) );
1960 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1961 for( final String id : sorted_ids ) {
1962 out.write( indentifier_characters_separator );
1963 out.write( "#" + id );
1964 out.write( indentifier_characters_separator );
1965 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1967 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1968 if ( ( matrix.getState( id, c ) == state )
1969 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1970 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1971 BinaryDomainCombination bdc = null;
1973 bdc = BasicBinaryDomainCombination.createInstance( matrix.getCharacter( c ) );
1975 catch ( final Exception e ) {
1976 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1978 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
1979 out.write( character_separator );
1986 catch ( final IOException e ) {
1987 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1989 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1992 public static void writeBinaryStatesMatrixToList( final Map<String, List<GoId>> domain_id_to_go_ids_map,
1993 final Map<GoId, GoTerm> go_id_to_term_map,
1994 final GoNameSpace go_namespace_limit,
1995 final boolean domain_combinations,
1996 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1997 final CharacterStateMatrix.GainLossStates state,
1998 final String filename,
1999 final String indentifier_characters_separator,
2000 final String character_separator,
2001 final String title_for_html,
2002 final String prefix_for_html,
2003 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
2004 final SortedSet<String> all_pfams_encountered,
2005 final SortedSet<String> pfams_gained_or_lost,
2006 final String suffix_for_per_node_events_file,
2007 final Map<String, Integer> tax_code_to_id_map ) {
2008 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
2009 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
2011 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
2012 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
2014 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
2015 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
2017 final File outfile = new File( filename );
2018 checkForOutputFileWriteability( outfile );
2019 final SortedSet<String> sorted_ids = new TreeSet<String>();
2020 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
2021 sorted_ids.add( matrix.getIdentifier( i ) );
2024 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
2025 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
2026 domain_combinations,
2029 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
2030 File per_node_go_mapped_domain_gain_loss_outfile = null;
2031 int per_node_counter = 0;
2032 out.write( "<html>" );
2033 out.write( SurfacingConstants.NL );
2034 writeHtmlHead( out, title_for_html );
2035 out.write( SurfacingConstants.NL );
2036 out.write( "<body>" );
2037 out.write( SurfacingConstants.NL );
2038 out.write( "<h1>" );
2039 out.write( SurfacingConstants.NL );
2040 out.write( title_for_html );
2041 out.write( SurfacingConstants.NL );
2042 out.write( "</h1>" );
2043 out.write( SurfacingConstants.NL );
2044 out.write( "<table>" );
2045 out.write( SurfacingConstants.NL );
2046 for( final String id : sorted_ids ) {
2047 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
2048 if ( matcher.matches() ) {
2051 out.write( "<tr>" );
2052 out.write( "<td>" );
2053 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
2054 out.write( "</td>" );
2055 out.write( "</tr>" );
2056 out.write( SurfacingConstants.NL );
2058 out.write( "</table>" );
2059 out.write( SurfacingConstants.NL );
2060 for( final String id : sorted_ids ) {
2061 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
2062 if ( matcher.matches() ) {
2065 out.write( SurfacingConstants.NL );
2066 out.write( "<h2>" );
2067 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
2068 writeTaxonomyLinks( out, id, tax_code_to_id_map );
2069 out.write( "</h2>" );
2070 out.write( SurfacingConstants.NL );
2071 out.write( "<table>" );
2072 out.write( SurfacingConstants.NL );
2073 out.write( "<tr>" );
2074 out.write( "<td><b>" );
2075 out.write( "Pfam domain(s)" );
2076 out.write( "</b></td><td><b>" );
2077 out.write( "GO term acc" );
2078 out.write( "</b></td><td><b>" );
2079 out.write( "GO term" );
2080 out.write( "</b></td><td><b>" );
2081 out.write( "GO namespace" );
2082 out.write( "</b></td>" );
2083 out.write( "</tr>" );
2084 out.write( SurfacingConstants.NL );
2085 out.write( "</tr>" );
2086 out.write( SurfacingConstants.NL );
2087 per_node_counter = 0;
2088 if ( matrix.getNumberOfCharacters() > 0 ) {
2089 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2090 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
2091 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
2092 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
2093 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
2096 per_node_go_mapped_domain_gain_loss_outfile = null;
2097 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
2099 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
2101 // using null to indicate either UNCHANGED_PRESENT or GAIN.
2102 if ( ( matrix.getState( id, c ) == state )
2103 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) || ( matrix
2104 .getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
2105 final String character = matrix.getCharacter( c );
2106 String domain_0 = "";
2107 String domain_1 = "";
2108 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
2109 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
2110 if ( s.length != 2 ) {
2111 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
2112 + character + "]" );
2118 domain_0 = character;
2120 writeDomainData( domain_id_to_go_ids_map,
2127 character_separator,
2128 domain_id_to_secondary_features_maps,
2130 all_pfams_encountered.add( domain_0 );
2131 if ( pfams_gained_or_lost != null ) {
2132 pfams_gained_or_lost.add( domain_0 );
2134 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2135 all_pfams_encountered.add( domain_1 );
2136 if ( pfams_gained_or_lost != null ) {
2137 pfams_gained_or_lost.add( domain_1 );
2140 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
2141 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
2148 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
2149 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
2150 if ( per_node_counter < 1 ) {
2151 per_node_go_mapped_domain_gain_loss_outfile.delete();
2153 per_node_counter = 0;
2155 out.write( "</table>" );
2156 out.write( SurfacingConstants.NL );
2157 out.write( "<hr>" );
2158 out.write( SurfacingConstants.NL );
2159 } // for( final String id : sorted_ids ) {
2160 out.write( "</body>" );
2161 out.write( SurfacingConstants.NL );
2162 out.write( "</html>" );
2163 out.write( SurfacingConstants.NL );
2167 catch ( final IOException e ) {
2168 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2170 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
2173 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
2174 final File output_dir,
2175 final Writer per_genome_domain_promiscuity_statistics_writer,
2176 final GenomeWideCombinableDomains gwcd,
2178 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
2179 File dc_outfile = new File( input_file_properties[ i ][ 1 ]
2180 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
2181 if ( output_dir != null ) {
2182 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
2184 checkForOutputFileWriteability( dc_outfile );
2186 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
2187 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
2190 catch ( final IOException e ) {
2191 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2193 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
2195 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 1 ] + "\t" );
2196 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
2197 if ( stats.getN() < 2 ) {
2198 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
2201 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
2202 .sampleStandardDeviation() ) + "\t" );
2204 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
2205 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
2206 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
2207 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
2208 final SortedSet<String> mpds = gwcd.getMostPromiscuosDomain();
2209 for( final String mpd : mpds ) {
2210 per_genome_domain_promiscuity_statistics_writer.write( mpd + " " );
2212 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
2214 catch ( final IOException e ) {
2215 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2217 if ( input_file_properties[ i ].length == 3 ) {
2218 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2219 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
2220 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
2223 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2224 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
2225 + dc_outfile + "\"" );
2229 public static void writeDomainSimilaritiesToFile( final StringBuilder html_desc,
2230 final StringBuilder html_title,
2231 final Writer simple_tab_writer,
2232 final Writer single_writer,
2233 Map<Character, Writer> split_writers,
2234 final SortedSet<DomainSimilarity> similarities,
2235 final boolean treat_as_binary,
2236 final List<Species> species_order,
2237 final PrintableDomainSimilarity.PRINT_OPTION print_option,
2238 final DomainSimilarity.DomainSimilarityScoring scoring,
2239 final boolean verbose,
2240 final Map<String, Integer> tax_code_to_id_map,
2241 final Phylogeny phy ) throws IOException {
2242 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
2243 split_writers = new HashMap<Character, Writer>();
2244 split_writers.put( '_', single_writer );
2246 switch ( print_option ) {
2247 case SIMPLE_TAB_DELIMITED:
2250 for( final Character key : split_writers.keySet() ) {
2251 final Writer w = split_writers.get( key );
2252 w.write( "<html>" );
2253 w.write( SurfacingConstants.NL );
2255 writeHtmlHead( w, "DC analysis (" + html_title + ") " + key.toString().toUpperCase() );
2258 writeHtmlHead( w, "DC analysis (" + html_title + ")" );
2260 w.write( SurfacingConstants.NL );
2261 w.write( "<body>" );
2262 w.write( SurfacingConstants.NL );
2263 w.write( html_desc.toString() );
2264 w.write( SurfacingConstants.NL );
2266 w.write( SurfacingConstants.NL );
2268 w.write( SurfacingConstants.NL );
2269 w.write( "<table>" );
2270 w.write( SurfacingConstants.NL );
2271 w.write( "<tr><td><b>Domains:</b></td></tr>" );
2272 w.write( SurfacingConstants.NL );
2277 for( final DomainSimilarity similarity : similarities ) {
2278 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2279 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
2281 if ( single_writer != null ) {
2282 single_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
2283 + similarity.getDomainId() + "</a></b></td></tr>" );
2284 single_writer.write( SurfacingConstants.NL );
2287 Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
2289 if ( local_writer == null ) {
2290 local_writer = split_writers.get( '0' );
2292 local_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
2293 + similarity.getDomainId() + "</a></b></td></tr>" );
2294 local_writer.write( SurfacingConstants.NL );
2297 for( final Writer w : split_writers.values() ) {
2298 w.write( "</table>" );
2299 w.write( SurfacingConstants.NL );
2301 w.write( SurfacingConstants.NL );
2303 w.write( "<table>" );
2304 w.write( SurfacingConstants.NL );
2305 w.write( "<tr><td><b>" );
2306 w.write( "Species group colors:" );
2307 w.write( "</b></td></tr>" );
2308 w.write( SurfacingConstants.NL );
2309 writeColorLabels( "Deuterostomia", TaxonomyColors.DEUTEROSTOMIA_COLOR, w );
2310 writeColorLabels( "Protostomia", TaxonomyColors.PROTOSTOMIA_COLOR, w );
2311 writeColorLabels( "Cnidaria", TaxonomyColors.CNIDARIA_COLOR, w );
2312 writeColorLabels( "Placozoa", TaxonomyColors.PLACOZOA_COLOR, w );
2313 writeColorLabels( "Ctenophora (comb jellies)", TaxonomyColors.CTENOPHORA_COLOR, w );
2314 writeColorLabels( "Porifera (sponges)", TaxonomyColors.PORIFERA_COLOR, w );
2315 writeColorLabels( "Choanoflagellida", TaxonomyColors.CHOANOFLAGELLIDA, w );
2316 writeColorLabels( "Ichthyosporea & Filasterea", TaxonomyColors.ICHTHYOSPOREA_AND_FILASTEREA, w );
2317 writeColorLabels( "Dikarya (Ascomycota & Basidiomycota, so-called \"higher fungi\")",
2318 TaxonomyColors.DIKARYA_COLOR,
2320 writeColorLabels( "other Fungi", TaxonomyColors.OTHER_FUNGI_COLOR, w );
2321 writeColorLabels( "Nucleariidae and Fonticula group",
2322 TaxonomyColors.NUCLEARIIDAE_AND_FONTICULA_GROUP_COLOR,
2324 writeColorLabels( "Amoebozoa", TaxonomyColors.AMOEBOZOA_COLOR, w );
2325 writeColorLabels( "Embryophyta (plants)", TaxonomyColors.EMBRYOPHYTA_COLOR, w );
2326 writeColorLabels( "Chlorophyta (green algae)", TaxonomyColors.CHLOROPHYTA_COLOR, w );
2327 writeColorLabels( "Rhodophyta (red algae)", TaxonomyColors.RHODOPHYTA_COLOR, w );
2328 writeColorLabels( "Glaucocystophyce (Glaucophyta)", TaxonomyColors.GLAUCOPHYTA_COLOR, w );
2329 writeColorLabels( "Hacrobia (Cryptophyta & Haptophyceae & Centroheliozoa)",
2330 TaxonomyColors.HACROBIA_COLOR,
2332 writeColorLabels( "Stramenopiles (Chromophyta, heterokonts)", TaxonomyColors.STRAMENOPILES_COLOR, w );
2333 writeColorLabels( "Alveolata", TaxonomyColors.ALVEOLATA_COLOR, w );
2334 writeColorLabels( "Rhizaria", TaxonomyColors.RHIZARIA_COLOR, w );
2335 writeColorLabels( "Excavata", TaxonomyColors.EXCAVATA_COLOR, w );
2336 writeColorLabels( "Apusozoa", TaxonomyColors.APUSOZOA_COLOR, w );
2337 writeColorLabels( "Archaea", TaxonomyColors.ARCHAEA_COLOR, w );
2338 writeColorLabels( "Bacteria", TaxonomyColors.BACTERIA_COLOR, w );
2339 w.write( "</table>" );
2340 w.write( SurfacingConstants.NL );
2343 w.write( SurfacingConstants.NL );
2344 w.write( "<table>" );
2345 w.write( SurfacingConstants.NL );
2348 for( final DomainSimilarity similarity : similarities ) {
2349 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2350 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
2352 if ( simple_tab_writer != null ) {
2353 simple_tab_writer.write( similarity.toStringBuffer( PRINT_OPTION.SIMPLE_TAB_DELIMITED,
2355 null ).toString() );
2357 if ( single_writer != null ) {
2358 single_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map, phy ).toString() );
2359 single_writer.write( SurfacingConstants.NL );
2362 Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
2364 if ( local_writer == null ) {
2365 local_writer = split_writers.get( '0' );
2367 local_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map, phy ).toString() );
2368 local_writer.write( SurfacingConstants.NL );
2371 switch ( print_option ) {
2373 for( final Writer w : split_writers.values() ) {
2374 w.write( SurfacingConstants.NL );
2375 w.write( "</table>" );
2376 w.write( SurfacingConstants.NL );
2377 w.write( "</font>" );
2378 w.write( SurfacingConstants.NL );
2379 w.write( "</body>" );
2380 w.write( SurfacingConstants.NL );
2381 w.write( "</html>" );
2382 w.write( SurfacingConstants.NL );
2388 for( final Writer w : split_writers.values() ) {
2393 public static void writeHtmlHead( final Writer w, final String title ) throws IOException {
2394 w.write( SurfacingConstants.NL );
2395 w.write( "<head>" );
2396 w.write( "<title>" );
2398 w.write( "</title>" );
2399 w.write( SurfacingConstants.NL );
2400 w.write( "<style>" );
2401 w.write( SurfacingConstants.NL );
2402 w.write( "a:visited { color : #000066; text-decoration : none; }" );
2403 w.write( SurfacingConstants.NL );
2404 w.write( "a:link { color : #000066; text-decoration : none; }" );
2405 w.write( SurfacingConstants.NL );
2406 w.write( "a:active { color : ##000066; text-decoration : none; }" );
2407 w.write( SurfacingConstants.NL );
2408 w.write( "a:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; }" );
2409 w.write( SurfacingConstants.NL );
2411 w.write( "a.pl:visited { color : #505050; text-decoration : none; font-size: 7px;}" );
2412 w.write( SurfacingConstants.NL );
2413 w.write( "a.pl:link { color : #505050; text-decoration : none; font-size: 7px;}" );
2414 w.write( SurfacingConstants.NL );
2415 w.write( "a.pl:active { color : #505050; text-decoration : none; font-size: 7px;}" );
2416 w.write( SurfacingConstants.NL );
2417 w.write( "a.pl:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; font-size: 7px;}" );
2418 w.write( SurfacingConstants.NL );
2420 w.write( "a.ps:visited { color : #707070; text-decoration : none; font-size: 7px;}" );
2421 w.write( SurfacingConstants.NL );
2422 w.write( "a.ps:link { color : #707070; text-decoration : none; font-size: 7px;}" );
2423 w.write( SurfacingConstants.NL );
2424 w.write( "a.ps:active { color : #707070; text-decoration : none; font-size: 7px;}" );
2425 w.write( SurfacingConstants.NL );
2426 w.write( "a.ps:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; font-size: 7px;}" );
2427 w.write( SurfacingConstants.NL );
2429 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
2430 w.write( SurfacingConstants.NL );
2431 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
2432 w.write( SurfacingConstants.NL );
2433 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
2434 w.write( SurfacingConstants.NL );
2435 w.write( "</style>" );
2436 w.write( SurfacingConstants.NL );
2437 w.write( "</head>" );
2438 w.write( SurfacingConstants.NL );
2441 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
2442 final String filename,
2443 final Format format ) {
2444 final File outfile = new File( filename );
2445 checkForOutputFileWriteability( outfile );
2447 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
2448 matrix.toWriter( out, format );
2452 catch ( final IOException e ) {
2453 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2455 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
2458 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
2459 checkForOutputFileWriteability( matrix_outfile );
2461 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
2462 for( final DistanceMatrix distance_matrix : matrices ) {
2463 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
2464 out.write( ForesterUtil.LINE_SEPARATOR );
2469 catch ( final IOException e ) {
2470 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2472 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
2475 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
2476 final PhylogenyWriter writer = new PhylogenyWriter();
2478 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
2480 catch ( final IOException e ) {
2481 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
2484 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
2487 public static void writePresentToNexus( final File output_file,
2488 final File positive_filter_file,
2489 final SortedSet<String> filter,
2490 final List<GenomeWideCombinableDomains> gwcd_list ) {
2492 writeMatrixToFile( DomainParsimonyCalculator.createMatrixOfDomainPresenceOrAbsence( gwcd_list,
2493 positive_filter_file == null ? null
2495 output_file + surfacing.DOMAINS_PRESENT_NEXUS,
2496 Format.NEXUS_BINARY );
2497 writeMatrixToFile( DomainParsimonyCalculator.createMatrixOfBinaryDomainCombinationPresenceOrAbsence( gwcd_list ),
2498 output_file + surfacing.BDC_PRESENT_NEXUS,
2499 Format.NEXUS_BINARY );
2501 catch ( final Exception e ) {
2502 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2506 public static void writeProteinListsForAllSpecies( final File output_dir,
2507 final SortedMap<Species, List<Protein>> protein_lists_per_species,
2508 final List<GenomeWideCombinableDomains> gwcd_list,
2509 final double domain_e_cutoff ) {
2510 final SortedSet<String> all_domains = new TreeSet<String>();
2511 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
2512 all_domains.addAll( gwcd.getAllDomainIds() );
2514 for( final String domain : all_domains ) {
2515 final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain + surfacing.SEQ_EXTRACT_SUFFIX );
2516 checkForOutputFileWriteability( out );
2518 final Writer proteins_file_writer = new BufferedWriter( new FileWriter( out ) );
2519 extractProteinNames( protein_lists_per_species,
2521 proteins_file_writer,
2523 surfacing.LIMIT_SPEC_FOR_PROT_EX,
2525 proteins_file_writer.close();
2527 catch ( final IOException e ) {
2528 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2530 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote proteins list to \"" + out + "\"" );
2534 public static void writeTaxonomyLinks( final Writer writer,
2535 final String species,
2536 final Map<String, Integer> tax_code_to_id_map ) throws IOException {
2537 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
2538 writer.write( " [" );
2539 if ( ( tax_code_to_id_map != null ) && tax_code_to_id_map.containsKey( species ) ) {
2540 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_TAXONOMY_ID_LINK
2541 + tax_code_to_id_map.get( species ) + "\" target=\"taxonomy_window\">uniprot</a>" );
2544 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
2545 + "\" target=\"taxonomy_window\">eol</a>" );
2546 writer.write( "|" );
2547 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_SCHOLAR_SEARCH + species
2548 + "\" target=\"taxonomy_window\">scholar</a>" );
2549 writer.write( "|" );
2550 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_WEB_SEARCH_LINK + species
2551 + "\" target=\"taxonomy_window\">google</a>" );
2553 writer.write( "]" );
2557 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
2558 if ( map.containsKey( s ) ) {
2559 map.put( s, map.get( s ) + 1 );
2566 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
2567 final String outfilename_for_counts,
2568 final String outfilename_for_dc,
2569 final String outfilename_for_dc_for_go_mapping,
2570 final String outfilename_for_dc_for_go_mapping_unique,
2571 final String outfilename_for_rank_counts,
2572 final String outfilename_for_ancestor_species_counts,
2573 final String outfilename_for_protein_stats,
2574 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
2575 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
2576 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
2579 // if ( protein_length_stats_by_dc != null ) {
2580 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
2581 // System.out.print( entry.getKey().toString() );
2582 // System.out.print( ": " );
2583 // double[] a = entry.getValue().getDataAsDoubleArray();
2584 // for( int i = 0; i < a.length; i++ ) {
2585 // System.out.print( a[ i ] + " " );
2587 // System.out.println();
2590 // if ( domain_number_stats_by_dc != null ) {
2591 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
2592 // System.out.print( entry.getKey().toString() );
2593 // System.out.print( ": " );
2594 // double[] a = entry.getValue().getDataAsDoubleArray();
2595 // for( int i = 0; i < a.length; i++ ) {
2596 // System.out.print( a[ i ] + " " );
2598 // System.out.println();
2602 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
2603 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
2604 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
2605 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
2606 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
2607 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
2608 final PhylogenyNode n = it.next();
2609 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
2610 for( final String dc : gained_dc ) {
2611 if ( dc_gain_counts.containsKey( dc ) ) {
2612 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
2615 dc_gain_counts.put( dc, 1 );
2619 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
2620 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
2621 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
2622 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
2623 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
2624 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
2625 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
2626 final Set<String> dcs = dc_gain_counts.keySet();
2627 final SortedSet<String> more_than_once = new TreeSet<String>();
2628 DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
2629 DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
2630 DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
2631 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
2632 long gained_multiple_times_domain_length_sum = 0;
2633 long gained_once_domain_length_sum = 0;
2634 long gained_multiple_times_domain_length_count = 0;
2635 long gained_once_domain_length_count = 0;
2636 for( final String dc : dcs ) {
2637 final int count = dc_gain_counts.get( dc );
2638 if ( histogram.containsKey( count ) ) {
2639 histogram.put( count, histogram.get( count ) + 1 );
2640 domain_lists.get( count ).append( ", " + dc );
2641 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
2642 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
2645 histogram.put( count, 1 );
2646 domain_lists.put( count, new StringBuilder( dc ) );
2647 final PriorityQueue<String> q = new PriorityQueue<String>();
2648 q.addAll( splitDomainCombination( dc ) );
2649 domain_lists_go.put( count, q );
2650 final SortedSet<String> set = new TreeSet<String>();
2651 set.addAll( splitDomainCombination( dc ) );
2652 domain_lists_go_unique.put( count, set );
2654 if ( protein_length_stats_by_dc != null ) {
2655 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
2656 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
2658 dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
2659 .arithmeticMean() );
2661 if ( domain_number_stats_by_dc != null ) {
2662 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
2663 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
2665 dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
2666 .arithmeticMean() );
2668 if ( domain_length_stats_by_domain != null ) {
2669 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
2670 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
2672 final String[] ds = dc.split( "=" );
2673 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2674 .get( ds[ 0 ] ).arithmeticMean() );
2675 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2676 .get( ds[ 1 ] ).arithmeticMean() );
2679 more_than_once.add( dc );
2680 if ( protein_length_stats_by_dc != null ) {
2681 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2682 for( final double element : s.getData() ) {
2683 gained_multiple_times_lengths_stats.addValue( element );
2686 if ( domain_number_stats_by_dc != null ) {
2687 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2688 for( final double element : s.getData() ) {
2689 gained_multiple_times_domain_count_stats.addValue( element );
2692 if ( domain_length_stats_by_domain != null ) {
2693 final String[] ds = dc.split( "=" );
2694 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2695 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2696 for( final double element : s0.getData() ) {
2697 gained_multiple_times_domain_length_sum += element;
2698 ++gained_multiple_times_domain_length_count;
2700 for( final double element : s1.getData() ) {
2701 gained_multiple_times_domain_length_sum += element;
2702 ++gained_multiple_times_domain_length_count;
2707 if ( protein_length_stats_by_dc != null ) {
2708 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2709 for( final double element : s.getData() ) {
2710 gained_once_lengths_stats.addValue( element );
2713 if ( domain_number_stats_by_dc != null ) {
2714 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2715 for( final double element : s.getData() ) {
2716 gained_once_domain_count_stats.addValue( element );
2719 if ( domain_length_stats_by_domain != null ) {
2720 final String[] ds = dc.split( "=" );
2721 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2722 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2723 for( final double element : s0.getData() ) {
2724 gained_once_domain_length_sum += element;
2725 ++gained_once_domain_length_count;
2727 for( final double element : s1.getData() ) {
2728 gained_once_domain_length_sum += element;
2729 ++gained_once_domain_length_count;
2734 final Set<Integer> histogram_keys = histogram.keySet();
2735 for( final Integer histogram_key : histogram_keys ) {
2736 final int count = histogram.get( histogram_key );
2737 final StringBuilder dc = domain_lists.get( histogram_key );
2738 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
2739 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
2740 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2741 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
2742 Arrays.sort( sorted );
2743 for( final Object domain : sorted ) {
2744 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
2746 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2747 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
2748 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
2753 out_dc_for_go_mapping.close();
2754 out_dc_for_go_mapping_unique.close();
2755 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
2756 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
2757 for( final String dc : more_than_once ) {
2758 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
2759 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
2760 final PhylogenyNode n = it.next();
2761 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
2765 for( int i = 0; i < ( nodes.size() - 1 ); ++i ) {
2766 for( int j = i + 1; j < nodes.size(); ++j ) {
2767 final PhylogenyNode lca = PhylogenyMethods.calculateLCA( nodes.get( i ), nodes.get( j ) );
2768 String rank = "unknown";
2769 if ( lca.getNodeData().isHasTaxonomy()
2770 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
2771 rank = lca.getNodeData().getTaxonomy().getRank();
2773 addToCountMap( lca_rank_counts, rank );
2775 if ( lca.getNodeData().isHasTaxonomy()
2776 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
2777 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
2779 else if ( lca.getNodeData().isHasTaxonomy()
2780 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
2781 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
2784 lca_species = lca.getName();
2786 addToCountMap( lca_ancestor_species_counts, lca_species );
2790 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
2791 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
2792 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
2793 ForesterUtil.map2writer( out_for_ancestor_species_counts,
2794 lca_ancestor_species_counts,
2796 ForesterUtil.LINE_SEPARATOR );
2797 out_for_rank_counts.close();
2798 out_for_ancestor_species_counts.close();
2799 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
2800 && ( ( domain_length_stats_by_domain != null ) || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
2801 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
2802 w.write( "Domain Lengths: " );
2804 if ( domain_length_stats_by_domain != null ) {
2805 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
2807 w.write( entry.getKey().toString() );
2808 w.write( "\t" + entry.getValue().arithmeticMean() );
2809 w.write( "\t" + entry.getValue().median() );
2816 w.write( "Protein Lengths: " );
2818 if ( protein_length_stats_by_dc != null ) {
2819 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
2821 w.write( entry.getKey().toString() );
2822 w.write( "\t" + entry.getValue().arithmeticMean() );
2823 w.write( "\t" + entry.getValue().median() );
2830 w.write( "Number of domains: " );
2832 if ( domain_number_stats_by_dc != null ) {
2833 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
2835 w.write( entry.getKey().toString() );
2836 w.write( "\t" + entry.getValue().arithmeticMean() );
2837 w.write( "\t" + entry.getValue().median() );
2844 w.write( "Gained once, domain lengths:" );
2846 w.write( "N: " + gained_once_domain_length_count );
2848 w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
2851 w.write( "Gained multiple times, domain lengths:" );
2853 w.write( "N: " + gained_multiple_times_domain_length_count );
2856 + ( ( double ) gained_multiple_times_domain_length_sum / gained_multiple_times_domain_length_count ) );
2861 w.write( "Gained once, protein lengths:" );
2863 w.write( gained_once_lengths_stats.toString() );
2864 gained_once_lengths_stats = null;
2867 w.write( "Gained once, domain counts:" );
2869 w.write( gained_once_domain_count_stats.toString() );
2870 gained_once_domain_count_stats = null;
2873 w.write( "Gained multiple times, protein lengths:" );
2875 w.write( gained_multiple_times_lengths_stats.toString() );
2876 gained_multiple_times_lengths_stats = null;
2879 w.write( "Gained multiple times, domain counts:" );
2881 w.write( gained_multiple_times_domain_count_stats.toString() );
2886 catch ( final IOException e ) {
2887 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2889 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
2890 + outfilename_for_counts + "]" );
2891 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
2892 + outfilename_for_dc + "]" );
2893 ForesterUtil.programMessage( surfacing.PRG_NAME,
2894 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
2895 + outfilename_for_dc_for_go_mapping + "]" );
2896 ForesterUtil.programMessage( surfacing.PRG_NAME,
2897 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
2898 + outfilename_for_dc_for_go_mapping_unique + "]" );
2901 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
2902 final boolean get_gains ) {
2903 final SortedSet<String> domains = new TreeSet<String>();
2904 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
2905 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
2907 domains.addAll( chars.getGainedCharacters() );
2910 domains.addAll( chars.getLostCharacters() );
2916 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
2917 final boolean domain_combinations,
2918 final CharacterStateMatrix.GainLossStates state,
2919 final String outfile ) {
2920 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
2921 + ForesterUtil.FILE_SEPARATOR + base_dir );
2922 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2923 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2925 if ( domain_combinations ) {
2926 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2927 + ForesterUtil.FILE_SEPARATOR + "DC" );
2930 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2931 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
2933 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2934 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2936 if ( state == GainLossStates.GAIN ) {
2937 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2938 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
2940 else if ( state == GainLossStates.LOSS ) {
2941 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2942 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
2945 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2946 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
2948 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2949 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2951 return per_node_go_mapped_domain_gain_loss_files_base_dir;
2954 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
2955 final SortedMap<String, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
2956 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
2957 for( final String domain_id : cds.keySet() ) {
2958 final CombinableDomains cd = cds.get( domain_id );
2959 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
2961 return binary_combinations;
2964 private static void printSomeStats( final DescriptiveStatistics stats, final AsciiHistogram histo, final Writer w )
2965 throws IOException {
2968 w.write( SurfacingConstants.NL );
2969 w.write( "<tt><pre>" );
2970 w.write( SurfacingConstants.NL );
2971 if ( histo != null ) {
2972 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
2973 w.write( SurfacingConstants.NL );
2975 w.write( "</pre></tt>" );
2976 w.write( SurfacingConstants.NL );
2977 w.write( "<table>" );
2978 w.write( SurfacingConstants.NL );
2979 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
2980 w.write( SurfacingConstants.NL );
2981 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
2982 w.write( SurfacingConstants.NL );
2983 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
2984 w.write( SurfacingConstants.NL );
2985 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
2986 w.write( SurfacingConstants.NL );
2987 if ( stats.getN() > 1 ) {
2988 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
2991 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
2993 w.write( SurfacingConstants.NL );
2994 w.write( "</table>" );
2995 w.write( SurfacingConstants.NL );
2997 w.write( SurfacingConstants.NL );
3000 private static List<String> splitDomainCombination( final String dc ) {
3001 final String[] s = dc.split( "=" );
3002 if ( s.length != 2 ) {
3003 ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
3007 final List<String> l = new ArrayList<String>( 2 );
3013 private static void writeAllEncounteredPfamsToFile( final Map<String, List<GoId>> domain_id_to_go_ids_map,
3014 final Map<GoId, GoTerm> go_id_to_term_map,
3015 final String outfile_name,
3016 final SortedSet<String> all_pfams_encountered ) {
3017 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
3018 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
3019 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
3020 final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
3021 int biological_process_counter = 0;
3022 int cellular_component_counter = 0;
3023 int molecular_function_counter = 0;
3024 int pfams_with_mappings_counter = 0;
3025 int pfams_without_mappings_counter = 0;
3026 int pfams_without_mappings_to_bp_or_mf_counter = 0;
3027 int pfams_with_mappings_to_bp_or_mf_counter = 0;
3029 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
3030 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
3031 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
3032 summary_writer.write( "# Pfam to GO mapping summary" );
3033 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3034 summary_writer.write( "# Actual summary is at the end of this file." );
3035 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3036 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
3037 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3038 for( final String pfam : all_pfams_encountered ) {
3039 all_pfams_encountered_writer.write( pfam );
3040 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
3041 final String domain_id = new String( pfam );
3042 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
3043 ++pfams_with_mappings_counter;
3044 all_pfams_encountered_with_go_annotation_writer.write( pfam );
3045 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
3046 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
3047 boolean maps_to_bp = false;
3048 boolean maps_to_cc = false;
3049 boolean maps_to_mf = false;
3050 for( final GoId go_id : go_ids ) {
3051 final GoTerm go_term = go_id_to_term_map.get( go_id );
3052 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
3055 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
3058 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
3063 ++biological_process_counter;
3066 ++cellular_component_counter;
3069 ++molecular_function_counter;
3071 if ( maps_to_bp || maps_to_mf ) {
3072 ++pfams_with_mappings_to_bp_or_mf_counter;
3075 ++pfams_without_mappings_to_bp_or_mf_counter;
3079 ++pfams_without_mappings_to_bp_or_mf_counter;
3080 ++pfams_without_mappings_counter;
3081 summary_writer.write( pfam );
3082 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3085 all_pfams_encountered_writer.close();
3086 all_pfams_encountered_with_go_annotation_writer.close();
3087 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + all_pfams_encountered.size()
3088 + "] encountered Pfams to: \"" + all_pfams_encountered_file + "\"" );
3089 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + pfams_with_mappings_counter
3090 + "] encountered Pfams with GO mappings to: \"" + all_pfams_encountered_with_go_annotation_file
3092 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote summary (including all ["
3093 + pfams_without_mappings_counter + "] encountered Pfams without GO mappings) to: \""
3094 + encountered_pfams_summary_file + "\"" );
3095 ForesterUtil.programMessage( surfacing.PRG_NAME, "Sum of Pfams encountered : "
3096 + all_pfams_encountered.size() );
3097 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without a mapping : "
3098 + pfams_without_mappings_counter + " ["
3099 + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3100 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without mapping to proc. or func. : "
3101 + pfams_without_mappings_to_bp_or_mf_counter + " ["
3102 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3103 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping : "
3104 + pfams_with_mappings_counter + " ["
3105 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3106 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping to proc. or func. : "
3107 + pfams_with_mappings_to_bp_or_mf_counter + " ["
3108 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3109 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to biological process: "
3110 + biological_process_counter + " ["
3111 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
3112 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to molecular function: "
3113 + molecular_function_counter + " ["
3114 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
3115 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to cellular component: "
3116 + cellular_component_counter + " ["
3117 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
3118 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3119 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
3120 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3121 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter
3122 + " [" + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3123 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3124 summary_writer.write( "# Pfams without mapping to proc. or func. : "
3125 + pfams_without_mappings_to_bp_or_mf_counter + " ["
3126 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3127 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3128 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
3129 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3130 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3131 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
3132 + pfams_with_mappings_to_bp_or_mf_counter + " ["
3133 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3134 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3135 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
3136 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
3137 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3138 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
3139 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
3140 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3141 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
3142 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
3143 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3144 summary_writer.close();
3146 catch ( final IOException e ) {
3147 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
3151 private final static void writeColorLabels( final String l, final Color c, final Writer w ) throws IOException {
3152 w.write( "<tr><td><b><span style=\"color:" );
3153 w.write( String.format( "#%02x%02x%02x", c.getRed(), c.getGreen(), c.getBlue() ) );
3156 w.write( "</span></b></td></tr>" );
3157 w.write( SurfacingConstants.NL );
3160 private static void writeDomainData( final Map<String, List<GoId>> domain_id_to_go_ids_map,
3161 final Map<GoId, GoTerm> go_id_to_term_map,
3162 final GoNameSpace go_namespace_limit,
3164 final String domain_0,
3165 final String domain_1,
3166 final String prefix_for_html,
3167 final String character_separator_for_non_html_output,
3168 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
3169 final Set<GoId> all_go_ids ) throws IOException {
3170 boolean any_go_annotation_present = false;
3171 boolean first_has_no_go = false;
3172 int domain_count = 2; // To distinguish between domains and binary domain combinations.
3173 if ( ForesterUtil.isEmpty( domain_1 ) ) {
3176 // The following has a difficult to understand logic.
3177 for( int d = 0; d < domain_count; ++d ) {
3178 List<GoId> go_ids = null;
3179 boolean go_annotation_present = false;
3181 if ( domain_id_to_go_ids_map.containsKey( domain_0 ) ) {
3182 go_annotation_present = true;
3183 any_go_annotation_present = true;
3184 go_ids = domain_id_to_go_ids_map.get( domain_0 );
3187 first_has_no_go = true;
3191 if ( domain_id_to_go_ids_map.containsKey( domain_1 ) ) {
3192 go_annotation_present = true;
3193 any_go_annotation_present = true;
3194 go_ids = domain_id_to_go_ids_map.get( domain_1 );
3197 if ( go_annotation_present ) {
3198 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
3199 for( final GoId go_id : go_ids ) {
3200 out.write( "<tr>" );
3203 writeDomainIdsToHtml( out,
3207 domain_id_to_secondary_features_maps );
3210 out.write( "<td></td>" );
3212 if ( !go_id_to_term_map.containsKey( go_id ) ) {
3213 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
3215 final GoTerm go_term = go_id_to_term_map.get( go_id );
3216 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
3217 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
3218 final String go_id_str = go_id.getId();
3219 out.write( "<td>" );
3220 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
3221 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
3222 out.write( "</td><td>" );
3223 out.write( go_term.getName() );
3224 if ( domain_count == 2 ) {
3225 out.write( " (" + d + ")" );
3227 out.write( "</td><td>" );
3228 // out.write( top );
3229 // out.write( "</td><td>" );
3231 out.write( go_term.getGoNameSpace().toShortString() );
3233 out.write( "</td>" );
3234 if ( all_go_ids != null ) {
3235 all_go_ids.add( go_id );
3239 out.write( "<td>" );
3240 out.write( "</td><td>" );
3241 out.write( "</td><td>" );
3242 out.write( "</td><td>" );
3243 out.write( "</td>" );
3245 out.write( "</tr>" );
3246 out.write( SurfacingConstants.NL );
3249 } // for( int d = 0; d < domain_count; ++d )
3250 if ( !any_go_annotation_present ) {
3251 out.write( "<tr>" );
3252 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
3253 out.write( "<td>" );
3254 out.write( "</td><td>" );
3255 out.write( "</td><td>" );
3256 out.write( "</td><td>" );
3257 out.write( "</td>" );
3258 out.write( "</tr>" );
3259 out.write( SurfacingConstants.NL );
3263 private static void writeDomainIdsToHtml( final Writer out,
3264 final String domain_0,
3265 final String domain_1,
3266 final String prefix_for_detailed_html,
3267 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps )
3268 throws IOException {
3269 out.write( "<td>" );
3270 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
3271 out.write( prefix_for_detailed_html );
3274 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
3275 out.write( "</td>" );
3278 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
3279 final String domain_0,
3280 final String domain_1 ) throws IOException {
3281 individual_files_writer.write( domain_0 );
3282 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
3283 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
3284 individual_files_writer.write( domain_1 );
3285 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
3289 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
3291 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
3292 for( final String pfam : pfams ) {
3293 writer.write( pfam );
3294 writer.write( ForesterUtil.LINE_SEPARATOR );
3297 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote " + pfams.size() + " pfams to [" + outfile_name
3300 catch ( final IOException e ) {
3301 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
3305 private static void writeToNexus( final String outfile_name,
3306 final CharacterStateMatrix<BinaryStates> matrix,
3307 final Phylogeny phylogeny ) {
3308 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
3309 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
3312 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
3313 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
3314 phylogenies.add( phylogeny );
3316 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
3317 w.write( NexusConstants.NEXUS );
3318 w.write( ForesterUtil.LINE_SEPARATOR );
3319 my_matrix.writeNexusTaxaBlock( w );
3320 my_matrix.writeNexusBinaryChractersBlock( w );
3321 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
3324 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
3326 catch ( final IOException e ) {
3327 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
3331 private static void writeToNexus( final String outfile_name,
3332 final DomainParsimonyCalculator domain_parsimony,
3333 final Phylogeny phylogeny ) {
3334 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
3335 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
3337 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
3338 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
3342 final static class DomainComparator implements Comparator<Domain> {
3344 final private boolean _ascending;
3346 public DomainComparator( final boolean ascending ) {
3347 _ascending = ascending;
3351 public final int compare( final Domain d0, final Domain d1 ) {
3352 if ( d0.getFrom() < d1.getFrom() ) {
3353 return _ascending ? -1 : 1;
3355 else if ( d0.getFrom() > d1.getFrom() ) {
3356 return _ascending ? 1 : -1;