3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.surfacing;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.text.DecimalFormat;
35 import java.text.NumberFormat;
36 import java.util.ArrayList;
37 import java.util.Arrays;
38 import java.util.Collections;
39 import java.util.Comparator;
40 import java.util.HashMap;
41 import java.util.HashSet;
42 import java.util.Iterator;
43 import java.util.List;
45 import java.util.Map.Entry;
46 import java.util.PriorityQueue;
48 import java.util.SortedMap;
49 import java.util.SortedSet;
50 import java.util.TreeMap;
51 import java.util.TreeSet;
52 import java.util.regex.Matcher;
53 import java.util.regex.Pattern;
55 import org.forester.application.surfacing;
56 import org.forester.evoinference.distance.NeighborJoining;
57 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
58 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
61 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
62 import org.forester.evoinference.matrix.distance.BasicSymmetricalDistanceMatrix;
63 import org.forester.evoinference.matrix.distance.DistanceMatrix;
64 import org.forester.go.GoId;
65 import org.forester.go.GoNameSpace;
66 import org.forester.go.GoTerm;
67 import org.forester.go.PfamToGoMapping;
68 import org.forester.io.parsers.nexus.NexusConstants;
69 import org.forester.io.writers.PhylogenyWriter;
70 import org.forester.phylogeny.Phylogeny;
71 import org.forester.phylogeny.PhylogenyMethods;
72 import org.forester.phylogeny.PhylogenyNode;
73 import org.forester.phylogeny.PhylogenyNode.NH_CONVERSION_SUPPORT_VALUE_STYLE;
74 import org.forester.phylogeny.data.BinaryCharacters;
75 import org.forester.phylogeny.data.Confidence;
76 import org.forester.phylogeny.data.Taxonomy;
77 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
78 import org.forester.protein.BasicDomain;
79 import org.forester.protein.BasicProtein;
80 import org.forester.protein.BinaryDomainCombination;
81 import org.forester.protein.Domain;
82 import org.forester.protein.Protein;
83 import org.forester.species.Species;
84 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
85 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
86 import org.forester.util.AsciiHistogram;
87 import org.forester.util.BasicDescriptiveStatistics;
88 import org.forester.util.BasicTable;
89 import org.forester.util.BasicTableParser;
90 import org.forester.util.DescriptiveStatistics;
91 import org.forester.util.ForesterUtil;
93 public final class SurfacingUtil {
95 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
96 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
99 public int compare( final Domain d1,
101 if ( d1.getPerSequenceEvalue() < d2
102 .getPerSequenceEvalue() ) {
106 .getPerSequenceEvalue() > d2
107 .getPerSequenceEvalue() ) {
111 return d1.compareTo( d2 );
115 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
117 private SurfacingUtil() {
118 // Hidden constructor.
121 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
122 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
123 final SortedMap<String, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
124 for( final String domain_id : all_cd.keySet() ) {
125 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
129 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
130 final SortedSet<String> domain_ids ) {
131 final SortedSet<String> domains = genome.getAllDomainIds();
132 for( final String domain : domains ) {
133 domain_ids.add( domain );
137 public static void addHtmlHead( final Writer w, final String title ) throws IOException {
138 w.write( SurfacingConstants.NL );
140 w.write( "<title>" );
142 w.write( "</title>" );
143 w.write( SurfacingConstants.NL );
144 w.write( "<style>" );
145 w.write( SurfacingConstants.NL );
146 w.write( "a:visited { color : #6633FF; text-decoration : none; }" );
147 w.write( SurfacingConstants.NL );
148 w.write( "a:link { color : #6633FF; text-decoration : none; }" );
149 w.write( SurfacingConstants.NL );
150 w.write( "a:active { color : #99FF00; text-decoration : none; }" );
151 w.write( SurfacingConstants.NL );
152 w.write( "a:hover { color : #FFFFFF; background-color : #99FF00; text-decoration : none; }" );
153 w.write( SurfacingConstants.NL );
154 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
155 w.write( SurfacingConstants.NL );
156 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
157 w.write( SurfacingConstants.NL );
158 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
159 w.write( SurfacingConstants.NL );
160 w.write( "</style>" );
161 w.write( SurfacingConstants.NL );
162 w.write( "</head>" );
163 w.write( SurfacingConstants.NL );
166 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
167 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
168 for( final DomainSimilarity similarity : similarities ) {
169 stats.addValue( similarity.getMeanSimilarityScore() );
174 public static int calculateOverlap( final Domain domain, final List<Boolean> covered_positions ) {
175 int overlap_count = 0;
176 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
177 if ( ( i < covered_positions.size() ) && ( covered_positions.get( i ) == true ) ) {
181 return overlap_count;
184 public static void checkForOutputFileWriteability( final File outfile ) {
185 final String error = ForesterUtil.isWritableFile( outfile );
186 if ( !ForesterUtil.isEmpty( error ) ) {
187 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
191 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
192 final BinaryDomainCombination.DomainCombinationType dc_type,
193 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
194 final boolean get_gains ) {
195 final SortedSet<String> sorted_ids = new TreeSet<String>();
196 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
197 sorted_ids.add( matrix.getIdentifier( i ) );
199 for( final String id : sorted_ids ) {
200 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
201 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
202 || ( !get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
203 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
204 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
205 .createInstance( matrix.getCharacter( c ) ) );
207 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
208 all_binary_domains_combination_gained.add( DirectedBinaryDomainCombination
209 .createInstance( matrix.getCharacter( c ) ) );
212 all_binary_domains_combination_gained.add( BasicBinaryDomainCombination.createInstance( matrix
213 .getCharacter( c ) ) );
220 public static Map<String, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
221 final Map<String, List<GoId>> domain_id_to_go_ids_map = new HashMap<String, List<GoId>>( pfam_to_go_mappings.size() );
222 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
223 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
224 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
226 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
228 return domain_id_to_go_ids_map;
231 public static Map<String, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
233 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, '\t' );
234 final Map<String, Set<String>> map = new TreeMap<String, Set<String>>();
235 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
236 final String domain_id = primary_table.getValue( 0, r );
237 if ( !map.containsKey( domain_id ) ) {
238 map.put( domain_id, new HashSet<String>() );
240 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
245 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile, final DistanceMatrix distance ) {
246 checkForOutputFileWriteability( nj_tree_outfile );
247 final NeighborJoining nj = NeighborJoining.createInstance();
248 final Phylogeny phylogeny = nj.execute( ( BasicSymmetricalDistanceMatrix ) distance );
249 phylogeny.setName( nj_tree_outfile.getName() );
250 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
254 public static Map<String, Integer> createTaxCodeToIdMap( final Phylogeny phy ) {
255 final Map<String, Integer> m = new HashMap<String, Integer>();
256 for( final PhylogenyNodeIterator iter = phy.iteratorExternalForward(); iter.hasNext(); ) {
257 final PhylogenyNode n = iter.next();
258 if ( n.getNodeData().isHasTaxonomy() ) {
259 final Taxonomy t = n.getNodeData().getTaxonomy();
260 final String c = t.getTaxonomyCode();
261 if ( !ForesterUtil.isEmpty( c ) ) {
262 if ( n.getNodeData().getTaxonomy() == null ) {
263 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
265 final String id = n.getNodeData().getTaxonomy().getIdentifier().getValue();
266 if ( ForesterUtil.isEmpty( id ) ) {
267 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
269 if ( m.containsKey( c ) ) {
270 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy code " + c + " is not unique" );
272 final int iid = Integer.valueOf( id );
273 if ( m.containsValue( iid ) ) {
274 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy id " + iid + " is not unique" );
280 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy for node " + n );
286 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
287 final Detailedness detailedness ) {
288 for( final DomainSimilarity domain_similarity : domain_similarities ) {
289 if ( domain_similarity instanceof PrintableDomainSimilarity ) {
290 final PrintableDomainSimilarity printable_domain_similarity = ( PrintableDomainSimilarity ) domain_similarity;
291 printable_domain_similarity.setDetailedness( detailedness );
296 public static void doit( final List<Protein> proteins,
297 final List<String> query_domain_ids_nc_order,
299 final String separator,
300 final String limit_to_species,
301 final Map<String, List<Integer>> average_protein_lengths_by_dc ) throws IOException {
302 for( final Protein protein : proteins ) {
303 if ( ForesterUtil.isEmpty( limit_to_species )
304 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
305 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
306 out.write( protein.getSpecies().getSpeciesId() );
307 out.write( separator );
308 out.write( protein.getProteinId().getId() );
309 out.write( separator );
311 final Set<String> visited_domain_ids = new HashSet<String>();
312 boolean first = true;
313 for( final Domain domain : protein.getProteinDomains() ) {
314 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
315 visited_domain_ids.add( domain.getDomainId() );
322 out.write( domain.getDomainId() );
324 out.write( "" + domain.getTotalCount() );
329 out.write( separator );
330 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
331 .equals( SurfacingConstants.NONE ) ) ) {
332 out.write( protein.getDescription() );
334 out.write( separator );
335 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
336 .equals( SurfacingConstants.NONE ) ) ) {
337 out.write( protein.getAccession() );
339 out.write( SurfacingConstants.NL );
346 public static void domainsPerProteinsStatistics( final String genome,
347 final List<Protein> protein_list,
348 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
349 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
350 final SortedSet<String> domains_which_are_always_single,
351 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
352 final SortedSet<String> domains_which_never_single,
353 final Writer writer ) {
354 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
355 for( final Protein protein : protein_list ) {
356 final int domains = protein.getNumberOfProteinDomains();
357 //System.out.println( domains );
358 stats.addValue( domains );
359 all_genomes_domains_per_potein_stats.addValue( domains );
360 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
361 all_genomes_domains_per_potein_histo.put( domains, 1 );
364 all_genomes_domains_per_potein_histo.put( domains,
365 1 + all_genomes_domains_per_potein_histo.get( domains ) );
367 if ( domains == 1 ) {
368 final String domain = protein.getProteinDomain( 0 ).getDomainId();
369 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
370 if ( domains_which_never_single.contains( domain ) ) {
371 domains_which_never_single.remove( domain );
372 domains_which_are_sometimes_single_sometimes_not.add( domain );
375 domains_which_are_always_single.add( domain );
379 else if ( domains > 1 ) {
380 for( final Domain d : protein.getProteinDomains() ) {
381 final String domain = d.getDomainId();
382 // System.out.println( domain );
383 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
384 if ( domains_which_are_always_single.contains( domain ) ) {
385 domains_which_are_always_single.remove( domain );
386 domains_which_are_sometimes_single_sometimes_not.add( domain );
389 domains_which_never_single.add( domain );
396 writer.write( genome );
397 writer.write( "\t" );
398 if ( stats.getN() >= 1 ) {
399 writer.write( stats.arithmeticMean() + "" );
400 writer.write( "\t" );
401 if ( stats.getN() >= 2 ) {
402 writer.write( stats.sampleStandardDeviation() + "" );
407 writer.write( "\t" );
408 writer.write( stats.median() + "" );
409 writer.write( "\t" );
410 writer.write( stats.getN() + "" );
411 writer.write( "\t" );
412 writer.write( stats.getMin() + "" );
413 writer.write( "\t" );
414 writer.write( stats.getMax() + "" );
417 writer.write( "\t" );
418 writer.write( "\t" );
419 writer.write( "\t" );
421 writer.write( "\t" );
422 writer.write( "\t" );
424 writer.write( "\n" );
426 catch ( final IOException e ) {
431 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
432 final int number_of_genomes,
433 final DomainLengthsTable domain_lengths_table,
434 final File outfile ) throws IOException {
435 final DecimalFormat df = new DecimalFormat( "#.00" );
436 checkForOutputFileWriteability( outfile );
437 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
438 out.write( "MEAN BASED STATISTICS PER SPECIES" );
439 out.write( ForesterUtil.LINE_SEPARATOR );
440 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
441 out.write( ForesterUtil.LINE_SEPARATOR );
442 out.write( ForesterUtil.LINE_SEPARATOR );
443 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
444 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
445 out.write( ForesterUtil.LINE_SEPARATOR );
446 for( final DomainLengths domain_lengths : domain_lengths_list ) {
447 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
448 if ( species_list.size() > 0 ) {
449 out.write( domain_lengths.getDomainId() + "\t" );
450 for( final Species species : species_list ) {
451 out.write( species + "\t" );
453 out.write( ForesterUtil.LINE_SEPARATOR );
456 out.write( ForesterUtil.LINE_SEPARATOR );
457 out.write( ForesterUtil.LINE_SEPARATOR );
458 out.write( "OUTLIER SPECIES (Z 1.0)" );
459 out.write( ForesterUtil.LINE_SEPARATOR );
460 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
461 .calculateMeanBasedStatisticsForAllSpecies();
462 out.write( stats_for_all_species.asSummary() );
463 out.write( ForesterUtil.LINE_SEPARATOR );
464 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
465 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
466 out.write( ForesterUtil.LINE_SEPARATOR );
467 final double population_sd = stats_for_all_species.sampleStandardDeviation();
468 final double population_mean = stats_for_all_species.arithmeticMean();
469 for( final Species species : domain_lengths_table.getSpecies() ) {
470 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
471 final double z = ( x - population_mean ) / population_sd;
472 out.write( species + "\t" + z );
473 out.write( ForesterUtil.LINE_SEPARATOR );
475 out.write( ForesterUtil.LINE_SEPARATOR );
476 for( final Species species : domain_lengths_table.getSpecies() ) {
477 final DescriptiveStatistics stats_for_species = domain_lengths_table
478 .calculateMeanBasedStatisticsForSpecies( species );
479 final double x = stats_for_species.arithmeticMean();
480 final double z = ( x - population_mean ) / population_sd;
481 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
482 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
483 out.write( ForesterUtil.LINE_SEPARATOR );
492 * @param all_binary_domains_combination_lost_fitch
493 * @param use_last_in_fitch_parsimony
494 * @param consider_directedness_and_adjacency_for_bin_combinations
495 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
496 * which were gained under unweighted (Fitch) parsimony.
498 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
499 final boolean radomize_fitch_parsimony,
500 final String outfile_name,
501 final DomainParsimonyCalculator domain_parsimony,
502 final Phylogeny phylogeny,
503 final Map<String, List<GoId>> domain_id_to_go_ids_map,
504 final Map<GoId, GoTerm> go_id_to_term_map,
505 final GoNameSpace go_namespace_limit,
506 final String parameters_str,
507 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
508 final SortedSet<String> positive_filter,
509 final boolean output_binary_domain_combinations_for_graphs,
510 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
511 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
512 final BinaryDomainCombination.DomainCombinationType dc_type,
513 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
514 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
515 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain,
516 final Map<String, Integer> tax_code_to_id_map,
517 final boolean write_to_nexus,
518 final boolean use_last_in_fitch_parsimony ) {
519 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
520 final String date_time = ForesterUtil.getCurrentDateTime();
521 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
522 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
523 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
524 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
525 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
526 if ( write_to_nexus ) {
527 writeToNexus( outfile_name, domain_parsimony, phylogeny );
531 Phylogeny local_phylogeny_l = phylogeny.copy();
532 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
533 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
536 domain_parsimony.executeDolloParsimonyOnDomainPresence();
538 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
539 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
540 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
541 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
542 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
543 CharacterStateMatrix.GainLossStates.GAIN,
544 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
546 ForesterUtil.LINE_SEPARATOR,
548 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
549 CharacterStateMatrix.GainLossStates.LOSS,
550 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
552 ForesterUtil.LINE_SEPARATOR,
554 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
555 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D, sep, ForesterUtil.LINE_SEPARATOR, null );
557 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
561 domain_parsimony.getGainLossMatrix(),
562 CharacterStateMatrix.GainLossStates.GAIN,
563 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
565 ForesterUtil.LINE_SEPARATOR,
566 "Dollo Parsimony | Gains | Domains",
568 domain_id_to_secondary_features_maps,
569 all_pfams_encountered,
570 all_pfams_gained_as_domains,
572 tax_code_to_id_map );
573 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
577 domain_parsimony.getGainLossMatrix(),
578 CharacterStateMatrix.GainLossStates.LOSS,
579 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
581 ForesterUtil.LINE_SEPARATOR,
582 "Dollo Parsimony | Losses | Domains",
584 domain_id_to_secondary_features_maps,
585 all_pfams_encountered,
586 all_pfams_lost_as_domains,
588 tax_code_to_id_map );
589 // writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
590 // go_id_to_term_map,
591 // go_namespace_limit,
593 // domain_parsimony.getGainLossMatrix(),
595 // outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
597 // ForesterUtil.LINE_SEPARATOR,
598 // "Dollo Parsimony | Present | Domains",
600 // domain_id_to_secondary_features_maps,
601 // all_pfams_encountered,
603 // "_dollo_present_d",
604 // tax_code_to_id_map );
605 preparePhylogeny( local_phylogeny_l,
608 "Dollo parsimony on domain presence/absence",
609 "dollo_on_domains_" + outfile_name,
611 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
612 + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
614 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
615 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
617 catch ( final IOException e ) {
619 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
621 if ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) {
622 // FITCH DOMAIN COMBINATIONS
623 // -------------------------
624 local_phylogeny_l = phylogeny.copy();
625 String randomization = "no";
626 if ( radomize_fitch_parsimony ) {
627 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
628 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
631 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( use_last_in_fitch_parsimony );
633 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
634 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
635 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
636 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
638 .writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
639 CharacterStateMatrix.GainLossStates.GAIN,
640 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
642 ForesterUtil.LINE_SEPARATOR,
644 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
645 CharacterStateMatrix.GainLossStates.LOSS,
647 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
649 ForesterUtil.LINE_SEPARATOR,
651 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
652 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC, sep, ForesterUtil.LINE_SEPARATOR, null );
653 if ( all_binary_domains_combination_gained_fitch != null ) {
654 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
656 all_binary_domains_combination_gained_fitch,
659 if ( all_binary_domains_combination_lost_fitch != null ) {
660 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
662 all_binary_domains_combination_lost_fitch,
665 if ( output_binary_domain_combinations_for_graphs ) {
667 .writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
668 .getGainLossMatrix(),
671 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
673 ForesterUtil.LINE_SEPARATOR,
674 BinaryDomainCombination.OutputFormat.DOT );
677 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
681 domain_parsimony.getGainLossMatrix(),
682 CharacterStateMatrix.GainLossStates.GAIN,
683 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
685 ForesterUtil.LINE_SEPARATOR,
686 "Fitch Parsimony | Gains | Domain Combinations",
689 all_pfams_encountered,
690 all_pfams_gained_as_dom_combinations,
692 tax_code_to_id_map );
693 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
697 domain_parsimony.getGainLossMatrix(),
698 CharacterStateMatrix.GainLossStates.LOSS,
699 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
701 ForesterUtil.LINE_SEPARATOR,
702 "Fitch Parsimony | Losses | Domain Combinations",
705 all_pfams_encountered,
706 all_pfams_lost_as_dom_combinations,
708 tax_code_to_id_map );
709 // writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
710 // go_id_to_term_map,
711 // go_namespace_limit,
713 // domain_parsimony.getGainLossMatrix(),
715 // outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
717 // ForesterUtil.LINE_SEPARATOR,
718 // "Fitch Parsimony | Present | Domain Combinations",
721 // all_pfams_encountered,
723 // "_fitch_present_dc",
724 // tax_code_to_id_map );
725 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
728 all_pfams_encountered );
729 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX, all_pfams_gained_as_domains );
730 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
731 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
732 all_pfams_gained_as_dom_combinations );
733 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX, all_pfams_lost_as_dom_combinations );
734 preparePhylogeny( local_phylogeny_l,
737 "Fitch parsimony on binary domain combination presence/absence randomization: "
739 "fitch_on_binary_domain_combinations_" + outfile_name,
741 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
742 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
743 calculateIndependentDomainCombinationGains( local_phylogeny_l,
745 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
747 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
749 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
751 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
752 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
753 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
754 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
755 protein_length_stats_by_dc,
756 domain_number_stats_by_dc,
757 domain_length_stats_by_domain );
761 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
762 final DomainParsimonyCalculator secondary_features_parsimony,
763 final Phylogeny phylogeny,
764 final String parameters_str,
765 final Map<Species, MappingResults> mapping_results_map,
766 final boolean use_last_in_fitch_parsimony ) {
767 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
768 final String date_time = ForesterUtil.getCurrentDateTime();
769 System.out.println();
770 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
771 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
773 Phylogeny local_phylogeny_copy = phylogeny.copy();
774 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
775 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(), outfile_name
776 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
777 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(), outfile_name
778 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
780 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
781 CharacterStateMatrix.GainLossStates.GAIN,
783 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
785 ForesterUtil.LINE_SEPARATOR,
788 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
789 CharacterStateMatrix.GainLossStates.LOSS,
791 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
793 ForesterUtil.LINE_SEPARATOR,
796 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
799 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
801 ForesterUtil.LINE_SEPARATOR,
803 preparePhylogeny( local_phylogeny_copy,
804 secondary_features_parsimony,
806 "Dollo parsimony on secondary feature presence/absence",
807 "dollo_on_secondary_features_" + outfile_name,
809 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
810 + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
811 // FITCH DOMAIN COMBINATIONS
812 // -------------------------
813 local_phylogeny_copy = phylogeny.copy();
814 final String randomization = "no";
815 secondary_features_parsimony
816 .executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( use_last_in_fitch_parsimony );
817 preparePhylogeny( local_phylogeny_copy,
818 secondary_features_parsimony,
820 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
822 "fitch_on_binary_domain_combinations_" + outfile_name,
824 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
825 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
826 calculateIndependentDomainCombinationGains( local_phylogeny_copy, outfile_name
827 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX, outfile_name
828 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX, outfile_name
829 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX, outfile_name
830 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX, outfile_name
831 + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt", outfile_name
832 + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt", null, null, null, null );
835 public static void extractProteinNames( final List<Protein> proteins,
836 final List<String> query_domain_ids_nc_order,
838 final String separator,
839 final String limit_to_species ) throws IOException {
840 for( final Protein protein : proteins ) {
841 if ( ForesterUtil.isEmpty( limit_to_species )
842 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
843 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
844 out.write( protein.getSpecies().getSpeciesId() );
845 out.write( separator );
846 out.write( protein.getProteinId().getId() );
847 out.write( separator );
849 final Set<String> visited_domain_ids = new HashSet<String>();
850 boolean first = true;
851 for( final Domain domain : protein.getProteinDomains() ) {
852 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
853 visited_domain_ids.add( domain.getDomainId() );
860 out.write( domain.getDomainId() );
862 out.write( "" + domain.getTotalCount() );
867 out.write( separator );
868 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
869 .equals( SurfacingConstants.NONE ) ) ) {
870 out.write( protein.getDescription() );
872 out.write( separator );
873 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
874 .equals( SurfacingConstants.NONE ) ) ) {
875 out.write( protein.getAccession() );
877 out.write( SurfacingConstants.NL );
884 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
885 final String domain_id,
887 final String separator,
888 final String limit_to_species,
889 final double domain_e_cutoff ) throws IOException {
890 System.out.println( "Per domain E-value: " + domain_e_cutoff );
891 for( final Species species : protein_lists_per_species.keySet() ) {
892 System.out.println( species + ":" );
893 for( final Protein protein : protein_lists_per_species.get( species ) ) {
894 if ( ForesterUtil.isEmpty( limit_to_species )
895 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
896 final List<Domain> domains = protein.getProteinDomains( domain_id );
897 if ( domains.size() > 0 ) {
898 out.write( protein.getSpecies().getSpeciesId() );
899 out.write( separator );
900 out.write( protein.getProteinId().getId() );
901 out.write( separator );
902 out.write( domain_id.toString() );
903 out.write( separator );
905 for( final Domain domain : domains ) {
906 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
908 out.write( domain.getFrom() + "-" + domain.getTo() );
909 if ( prev_to >= 0 ) {
910 final int l = domain.getFrom() - prev_to;
911 System.out.println( l );
913 prev_to = domain.getTo();
917 out.write( separator );
918 final List<Domain> domain_list = new ArrayList<Domain>();
919 for( final Domain domain : protein.getProteinDomains() ) {
920 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
921 domain_list.add( domain );
924 final Domain domain_ary[] = new Domain[ domain_list.size() ];
925 for( int i = 0; i < domain_list.size(); ++i ) {
926 domain_ary[ i ] = domain_list.get( i );
928 Arrays.sort( domain_ary, new DomainComparator( true ) );
930 boolean first = true;
931 for( final Domain domain : domain_ary ) {
938 out.write( domain.getDomainId().toString() );
939 out.write( ":" + domain.getFrom() + "-" + domain.getTo() );
940 out.write( ":" + domain.getPerDomainEvalue() );
943 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
944 .equals( SurfacingConstants.NONE ) ) ) {
945 out.write( protein.getDescription() );
947 out.write( separator );
948 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
949 .equals( SurfacingConstants.NONE ) ) ) {
950 out.write( protein.getAccession() );
952 out.write( SurfacingConstants.NL );
960 public static SortedSet<String> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
961 final SortedSet<String> all_domains_ids = new TreeSet<String>();
962 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
963 final Set<String> all_domains = gwcd.getAllDomainIds();
964 // for( final Domain domain : all_domains ) {
965 all_domains_ids.addAll( all_domains );
968 return all_domains_ids;
971 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
972 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
973 for( final Protein protein_domain_collection : protein_domain_collections ) {
974 for( final Object name : protein_domain_collection.getProteinDomains() ) {
975 final BasicDomain protein_domain = ( BasicDomain ) name;
976 final String id = protein_domain.getDomainId();
977 if ( map.containsKey( id ) ) {
978 map.put( id, map.get( id ) + 1 );
988 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
989 final PhylogenyNodeIterator it = p.iteratorPostorder();
991 while ( it.hasNext() ) {
992 final PhylogenyNode n = it.next();
993 if ( ForesterUtil.isEmpty( n.getName() )
994 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
995 .getScientificName() ) )
996 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
997 .getCommonName() ) ) ) {
998 if ( n.getParent() != null ) {
1000 names.append( n.getParent().getName() );
1002 final List l = n.getAllExternalDescendants();
1003 for( final Object object : l ) {
1004 System.out.println( l.toString() );
1013 * Returns true is Domain domain falls in an uninterrupted stretch of
1014 * covered positions.
1017 * @param covered_positions
1020 public static boolean isEngulfed( final Domain domain, final List<Boolean> covered_positions ) {
1021 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1022 if ( ( i >= covered_positions.size() ) || ( covered_positions.get( i ) != true ) ) {
1029 public static void performDomainArchitectureAnalysis( final SortedMap<String, Set<String>> domain_architecutures,
1030 final SortedMap<String, Integer> domain_architecuture_counts,
1031 final int min_count,
1032 final File da_counts_outfile,
1033 final File unique_da_outfile ) {
1034 checkForOutputFileWriteability( da_counts_outfile );
1035 checkForOutputFileWriteability( unique_da_outfile );
1037 final BufferedWriter da_counts_out = new BufferedWriter( new FileWriter( da_counts_outfile ) );
1038 final BufferedWriter unique_da_out = new BufferedWriter( new FileWriter( unique_da_outfile ) );
1039 final Iterator<Entry<String, Integer>> it = domain_architecuture_counts.entrySet().iterator();
1040 while ( it.hasNext() ) {
1041 final Map.Entry<String, Integer> e = it.next();
1042 final String da = e.getKey();
1043 final int count = e.getValue();
1044 if ( count >= min_count ) {
1045 da_counts_out.write( da );
1046 da_counts_out.write( "\t" );
1047 da_counts_out.write( String.valueOf( count ) );
1048 da_counts_out.write( ForesterUtil.LINE_SEPARATOR );
1051 final Iterator<Entry<String, Set<String>>> it2 = domain_architecutures.entrySet().iterator();
1052 while ( it2.hasNext() ) {
1053 final Map.Entry<String, Set<String>> e2 = it2.next();
1054 final String genome = e2.getKey();
1055 final Set<String> das = e2.getValue();
1056 if ( das.contains( da ) ) {
1057 unique_da_out.write( genome );
1058 unique_da_out.write( "\t" );
1059 unique_da_out.write( da );
1060 unique_da_out.write( ForesterUtil.LINE_SEPARATOR );
1065 unique_da_out.close();
1066 da_counts_out.close();
1068 catch ( final IOException e ) {
1069 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1071 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + da_counts_outfile + "\"" );
1072 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + unique_da_outfile + "\"" );
1076 public static void preparePhylogeny( final Phylogeny p,
1077 final DomainParsimonyCalculator domain_parsimony,
1078 final String date_time,
1079 final String method,
1081 final String parameters_str ) {
1082 domain_parsimony.decoratePhylogenyWithDomains( p );
1083 final StringBuilder desc = new StringBuilder();
1084 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1085 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1086 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1087 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1088 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1089 desc.append( "[Parameters: " + parameters_str + "]" );
1091 p.setDescription( desc.toString() );
1092 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1093 p.setRerootable( false );
1094 p.setRooted( true );
1098 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1102 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1103 final String protein_id,
1104 final String separator ) {
1105 final StringBuffer sb = new StringBuffer();
1106 if ( protein.getSpecies() == null ) {
1107 throw new IllegalArgumentException( "species must not be null" );
1109 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1110 throw new IllegalArgumentException( "species id must not be empty" );
1112 final List<Domain> domains = protein.getProteinDomains();
1113 if ( domains.size() > 1 ) {
1114 final Map<String, Integer> counts = new HashMap<String, Integer>();
1115 for( final Domain domain : domains ) {
1116 final String id = domain.getDomainId();
1117 if ( counts.containsKey( id ) ) {
1118 counts.put( id, counts.get( id ) + 1 );
1121 counts.put( id, 1 );
1124 final Set<String> dcs = new HashSet<String>();
1125 for( int i = 1; i < domains.size(); ++i ) {
1126 for( int j = 0; j < i; ++j ) {
1127 Domain domain_n = domains.get( i );
1128 Domain domain_c = domains.get( j );
1129 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1130 domain_n = domains.get( j );
1131 domain_c = domains.get( i );
1133 final String dc = domain_n.getDomainId() + domain_c.getDomainId();
1134 if ( !dcs.contains( dc ) ) {
1136 sb.append( protein.getSpecies() );
1137 sb.append( separator );
1138 sb.append( protein_id );
1139 sb.append( separator );
1140 sb.append( domain_n.getDomainId() );
1141 sb.append( separator );
1142 sb.append( domain_c.getDomainId() );
1143 sb.append( separator );
1144 sb.append( domain_n.getPerDomainEvalue() );
1145 sb.append( separator );
1146 sb.append( domain_c.getPerDomainEvalue() );
1147 sb.append( separator );
1148 sb.append( counts.get( domain_n.getDomainId() ) );
1149 sb.append( separator );
1150 sb.append( counts.get( domain_c.getDomainId() ) );
1151 sb.append( ForesterUtil.LINE_SEPARATOR );
1156 else if ( domains.size() == 1 ) {
1157 sb.append( protein.getSpecies() );
1158 sb.append( separator );
1159 sb.append( protein_id );
1160 sb.append( separator );
1161 sb.append( domains.get( 0 ).getDomainId() );
1162 sb.append( separator );
1163 sb.append( separator );
1164 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1165 sb.append( separator );
1166 sb.append( separator );
1168 sb.append( separator );
1169 sb.append( ForesterUtil.LINE_SEPARATOR );
1172 sb.append( protein.getSpecies() );
1173 sb.append( separator );
1174 sb.append( protein_id );
1175 sb.append( separator );
1176 sb.append( separator );
1177 sb.append( separator );
1178 sb.append( separator );
1179 sb.append( separator );
1180 sb.append( separator );
1181 sb.append( ForesterUtil.LINE_SEPARATOR );
1188 * Example regarding engulfment: ------------0.1 ----------0.2 --0.3 =>
1189 * domain with 0.3 is ignored
1191 * -----------0.1 ----------0.2 --0.3 => domain with 0.3 is ignored
1194 * ------------0.1 ----------0.3 --0.2 => domains with 0.3 and 0.2 are _not_
1197 * @param max_allowed_overlap
1198 * maximal allowed overlap (inclusive) to be still considered not
1199 * overlapping (zero or negative value to allow any overlap)
1200 * @param remove_engulfed_domains
1201 * to remove domains which are completely engulfed by coverage of
1202 * domains with better support
1206 public static Protein removeOverlappingDomains( final int max_allowed_overlap,
1207 final boolean remove_engulfed_domains,
1208 final Protein protein ) {
1209 final Protein pruned_protein = new BasicProtein( protein.getProteinId().getId(), protein.getSpecies()
1210 .getSpeciesId(), protein.getLength() );
1211 final List<Domain> sorted = SurfacingUtil.sortDomainsWithAscendingConfidenceValues( protein );
1212 final List<Boolean> covered_positions = new ArrayList<Boolean>();
1213 for( final Domain domain : sorted ) {
1214 if ( ( ( max_allowed_overlap < 0 ) || ( SurfacingUtil.calculateOverlap( domain, covered_positions ) <= max_allowed_overlap ) )
1215 && ( !remove_engulfed_domains || !isEngulfed( domain, covered_positions ) ) ) {
1216 final int covered_positions_size = covered_positions.size();
1217 for( int i = covered_positions_size; i < domain.getFrom(); ++i ) {
1218 covered_positions.add( false );
1220 final int new_covered_positions_size = covered_positions.size();
1221 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1222 if ( i < new_covered_positions_size ) {
1223 covered_positions.set( i, true );
1226 covered_positions.add( true );
1229 pruned_protein.addProteinDomain( domain );
1232 return pruned_protein;
1235 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1236 final List<Domain> domains = new ArrayList<Domain>();
1237 for( final Domain d : protein.getProteinDomains() ) {
1240 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1244 public static int storeDomainArchitectures( final String genome,
1245 final SortedMap<String, Set<String>> domain_architecutures,
1246 final List<Protein> protein_list,
1247 final Map<String, Integer> distinct_domain_architecuture_counts ) {
1248 final Set<String> da = new HashSet<String>();
1249 domain_architecutures.put( genome, da );
1250 for( final Protein protein : protein_list ) {
1251 final String da_str = ( ( BasicProtein ) protein ).toDomainArchitectureString( "~", 3, "=" );
1252 if ( !da.contains( da_str ) ) {
1253 if ( !distinct_domain_architecuture_counts.containsKey( da_str ) ) {
1254 distinct_domain_architecuture_counts.put( da_str, 1 );
1257 distinct_domain_architecuture_counts.put( da_str,
1258 distinct_domain_architecuture_counts.get( da_str ) + 1 );
1266 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1267 final boolean get_gains,
1268 final String outdir,
1269 final String suffix_for_filename ) throws IOException {
1270 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1272 state = CharacterStateMatrix.GainLossStates.LOSS;
1274 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1278 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1279 final PhylogenyNode node = it.next();
1280 if ( !node.isExternal() ) {
1281 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1282 if ( domains.size() > 0 ) {
1283 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1284 + node.getName() + suffix_for_filename );
1285 for( final String domain : domains ) {
1286 writer.write( domain );
1287 writer.write( ForesterUtil.LINE_SEPARATOR );
1295 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1296 final File output_dir,
1297 final GenomeWideCombinableDomains gwcd,
1299 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1300 File dc_outfile_dot = new File( input_file_properties[ i ][ 1 ]
1301 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1302 if ( output_dir != null ) {
1303 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1305 checkForOutputFileWriteability( dc_outfile_dot );
1306 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1308 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1309 for( final BinaryDomainCombination bdc : binary_combinations ) {
1310 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1312 out_dot.write( SurfacingConstants.NL );
1316 catch ( final IOException e ) {
1317 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1319 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote binary domain combination for \""
1320 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1321 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile_dot + "\"" );
1324 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1325 final CharacterStateMatrix.GainLossStates state,
1326 final String filename,
1327 final String indentifier_characters_separator,
1328 final String character_separator,
1329 final Map<String, String> descriptions ) {
1330 final File outfile = new File( filename );
1331 checkForOutputFileWriteability( outfile );
1332 final SortedSet<String> sorted_ids = new TreeSet<String>();
1333 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1334 sorted_ids.add( matrix.getIdentifier( i ) );
1337 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1338 for( final String id : sorted_ids ) {
1339 out.write( indentifier_characters_separator );
1340 out.write( "#" + id );
1341 out.write( indentifier_characters_separator );
1342 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1344 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1345 if ( ( matrix.getState( id, c ) == state )
1346 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1347 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1348 out.write( matrix.getCharacter( c ) );
1349 if ( ( descriptions != null ) && !descriptions.isEmpty()
1350 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
1352 out.write( descriptions.get( matrix.getCharacter( c ) ) );
1354 out.write( character_separator );
1361 catch ( final IOException e ) {
1362 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1364 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1367 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1368 final CharacterStateMatrix.GainLossStates state,
1369 final String filename,
1370 final String indentifier_characters_separator,
1371 final String character_separator,
1372 final BinaryDomainCombination.OutputFormat bc_output_format ) {
1373 final File outfile = new File( filename );
1374 checkForOutputFileWriteability( outfile );
1375 final SortedSet<String> sorted_ids = new TreeSet<String>();
1376 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1377 sorted_ids.add( matrix.getIdentifier( i ) );
1380 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1381 for( final String id : sorted_ids ) {
1382 out.write( indentifier_characters_separator );
1383 out.write( "#" + id );
1384 out.write( indentifier_characters_separator );
1385 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1387 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1388 if ( ( matrix.getState( id, c ) == state )
1389 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1390 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1391 BinaryDomainCombination bdc = null;
1393 bdc = BasicBinaryDomainCombination.createInstance( matrix.getCharacter( c ) );
1395 catch ( final Exception e ) {
1396 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1398 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
1399 out.write( character_separator );
1406 catch ( final IOException e ) {
1407 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1409 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1412 public static void writeBinaryStatesMatrixToList( final Map<String, List<GoId>> domain_id_to_go_ids_map,
1413 final Map<GoId, GoTerm> go_id_to_term_map,
1414 final GoNameSpace go_namespace_limit,
1415 final boolean domain_combinations,
1416 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1417 final CharacterStateMatrix.GainLossStates state,
1418 final String filename,
1419 final String indentifier_characters_separator,
1420 final String character_separator,
1421 final String title_for_html,
1422 final String prefix_for_html,
1423 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
1424 final SortedSet<String> all_pfams_encountered,
1425 final SortedSet<String> pfams_gained_or_lost,
1426 final String suffix_for_per_node_events_file,
1427 final Map<String, Integer> tax_code_to_id_map ) {
1428 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1429 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
1431 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
1432 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
1434 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1435 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
1437 final File outfile = new File( filename );
1438 checkForOutputFileWriteability( outfile );
1439 final SortedSet<String> sorted_ids = new TreeSet<String>();
1440 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1441 sorted_ids.add( matrix.getIdentifier( i ) );
1444 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
1445 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
1446 domain_combinations,
1449 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1450 File per_node_go_mapped_domain_gain_loss_outfile = null;
1451 int per_node_counter = 0;
1452 out.write( "<html>" );
1453 out.write( SurfacingConstants.NL );
1454 addHtmlHead( out, title_for_html );
1455 out.write( SurfacingConstants.NL );
1456 out.write( "<body>" );
1457 out.write( SurfacingConstants.NL );
1458 out.write( "<h1>" );
1459 out.write( SurfacingConstants.NL );
1460 out.write( title_for_html );
1461 out.write( SurfacingConstants.NL );
1462 out.write( "</h1>" );
1463 out.write( SurfacingConstants.NL );
1464 out.write( "<table>" );
1465 out.write( SurfacingConstants.NL );
1466 for( final String id : sorted_ids ) {
1467 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1468 if ( matcher.matches() ) {
1471 out.write( "<tr>" );
1472 out.write( "<td>" );
1473 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
1474 out.write( "</td>" );
1475 out.write( "</tr>" );
1476 out.write( SurfacingConstants.NL );
1478 out.write( "</table>" );
1479 out.write( SurfacingConstants.NL );
1480 for( final String id : sorted_ids ) {
1481 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1482 if ( matcher.matches() ) {
1485 out.write( SurfacingConstants.NL );
1486 out.write( "<h2>" );
1487 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
1488 writeTaxonomyLinks( out, id, tax_code_to_id_map );
1489 out.write( "</h2>" );
1490 out.write( SurfacingConstants.NL );
1491 out.write( "<table>" );
1492 out.write( SurfacingConstants.NL );
1493 out.write( "<tr>" );
1494 out.write( "<td><b>" );
1495 out.write( "Pfam domain(s)" );
1496 out.write( "</b></td><td><b>" );
1497 out.write( "GO term acc" );
1498 out.write( "</b></td><td><b>" );
1499 out.write( "GO term" );
1500 out.write( "</b></td><td><b>" );
1501 out.write( "GO namespace" );
1502 out.write( "</b></td>" );
1503 out.write( "</tr>" );
1504 out.write( SurfacingConstants.NL );
1505 out.write( "</tr>" );
1506 out.write( SurfacingConstants.NL );
1507 per_node_counter = 0;
1508 if ( matrix.getNumberOfCharacters() > 0 ) {
1509 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
1510 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
1511 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
1512 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
1513 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
1516 per_node_go_mapped_domain_gain_loss_outfile = null;
1517 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1519 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1521 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1522 if ( ( matrix.getState( id, c ) == state )
1523 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) || ( matrix
1524 .getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
1525 final String character = matrix.getCharacter( c );
1526 String domain_0 = "";
1527 String domain_1 = "";
1528 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
1529 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
1530 if ( s.length != 2 ) {
1531 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
1532 + character + "]" );
1538 domain_0 = character;
1540 writeDomainData( domain_id_to_go_ids_map,
1547 character_separator,
1548 domain_id_to_secondary_features_maps,
1550 all_pfams_encountered.add( domain_0 );
1551 if ( pfams_gained_or_lost != null ) {
1552 pfams_gained_or_lost.add( domain_0 );
1554 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
1555 all_pfams_encountered.add( domain_1 );
1556 if ( pfams_gained_or_lost != null ) {
1557 pfams_gained_or_lost.add( domain_1 );
1560 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1561 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
1568 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1569 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
1570 if ( per_node_counter < 1 ) {
1571 per_node_go_mapped_domain_gain_loss_outfile.delete();
1573 per_node_counter = 0;
1575 out.write( "</table>" );
1576 out.write( SurfacingConstants.NL );
1577 out.write( "<hr>" );
1578 out.write( SurfacingConstants.NL );
1579 } // for( final String id : sorted_ids ) {
1580 out.write( "</body>" );
1581 out.write( SurfacingConstants.NL );
1582 out.write( "</html>" );
1583 out.write( SurfacingConstants.NL );
1587 catch ( final IOException e ) {
1588 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1590 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
1593 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
1594 final File output_dir,
1595 final Writer per_genome_domain_promiscuity_statistics_writer,
1596 final GenomeWideCombinableDomains gwcd,
1598 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1599 File dc_outfile = new File( input_file_properties[ i ][ 1 ]
1600 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1601 if ( output_dir != null ) {
1602 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
1604 checkForOutputFileWriteability( dc_outfile );
1606 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
1607 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
1610 catch ( final IOException e ) {
1611 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1613 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
1615 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 1 ] + "\t" );
1616 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
1617 if ( stats.getN() < 2 ) {
1618 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
1621 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
1622 .sampleStandardDeviation() ) + "\t" );
1624 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
1625 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
1626 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
1627 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
1628 final SortedSet<String> mpds = gwcd.getMostPromiscuosDomain();
1629 for( final String mpd : mpds ) {
1630 per_genome_domain_promiscuity_statistics_writer.write( mpd + " " );
1632 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
1634 catch ( final IOException e ) {
1635 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1637 if ( input_file_properties[ i ].length == 3 ) {
1638 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
1639 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1640 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
1643 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
1644 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
1645 + dc_outfile + "\"" );
1649 public static DescriptiveStatistics writeDomainSimilaritiesToFile( final StringBuilder html_desc,
1650 final StringBuilder html_title,
1651 final Writer single_writer,
1652 Map<Character, Writer> split_writers,
1653 final SortedSet<DomainSimilarity> similarities,
1654 final boolean treat_as_binary,
1655 final List<Species> species_order,
1656 final PrintableDomainSimilarity.PRINT_OPTION print_option,
1657 final DomainSimilarity.DomainSimilarityScoring scoring,
1658 final boolean verbose,
1659 final Map<String, Integer> tax_code_to_id_map,
1660 final boolean print_some_stats )
1661 throws IOException {
1662 DescriptiveStatistics stats = null;
1663 AsciiHistogram histo = null;
1664 if ( print_some_stats ) {
1665 stats = new BasicDescriptiveStatistics();
1666 final String histogram_title = "score mean distribution:";
1667 for( final DomainSimilarity similarity : similarities ) {
1668 stats.addValue( similarity.getMeanSimilarityScore() );
1671 if ( stats.getMin() < stats.getMax() ) {
1672 histo = new AsciiHistogram( stats, histogram_title );
1675 catch ( final Exception e ) {
1679 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
1680 split_writers = new HashMap<Character, Writer>();
1681 split_writers.put( '_', single_writer );
1683 switch ( print_option ) {
1684 case SIMPLE_TAB_DELIMITED:
1687 for( final Character key : split_writers.keySet() ) {
1688 final Writer w = split_writers.get( key );
1689 w.write( "<html>" );
1690 w.write( SurfacingConstants.NL );
1692 addHtmlHead( w, "DC analysis (" + html_title + ") " + key.toString().toUpperCase() );
1695 addHtmlHead( w, "DC analysis (" + html_title + ")" );
1697 w.write( SurfacingConstants.NL );
1698 w.write( "<body>" );
1699 w.write( SurfacingConstants.NL );
1700 w.write( html_desc.toString() );
1701 w.write( SurfacingConstants.NL );
1702 if ( print_some_stats ) {
1703 printSomeStats( stats, histo, w );
1706 w.write( SurfacingConstants.NL );
1708 w.write( SurfacingConstants.NL );
1709 w.write( "<table>" );
1710 w.write( SurfacingConstants.NL );
1711 w.write( "<tr><td><b>Domains:</b></td></tr>" );
1712 w.write( SurfacingConstants.NL );
1717 for( final DomainSimilarity similarity : similarities ) {
1718 if ( ( species_order != null ) && !species_order.isEmpty() ) {
1719 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
1721 if ( single_writer != null ) {
1722 single_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
1723 + similarity.getDomainId() + "</a></b></td></tr>" );
1724 single_writer.write( SurfacingConstants.NL );
1727 Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
1729 if ( local_writer == null ) {
1730 local_writer = split_writers.get( '0' );
1732 local_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
1733 + similarity.getDomainId() + "</a></b></td></tr>" );
1734 local_writer.write( SurfacingConstants.NL );
1737 for( final Writer w : split_writers.values() ) {
1738 w.write( "</table>" );
1739 w.write( SurfacingConstants.NL );
1741 w.write( SurfacingConstants.NL );
1742 w.write( "<table>" );
1743 w.write( SurfacingConstants.NL );
1746 for( final DomainSimilarity similarity : similarities ) {
1747 if ( ( species_order != null ) && !species_order.isEmpty() ) {
1748 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
1750 if ( single_writer != null ) {
1751 single_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map ).toString() );
1752 single_writer.write( SurfacingConstants.NL );
1755 Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
1757 if ( local_writer == null ) {
1758 local_writer = split_writers.get( '0' );
1760 local_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map ).toString() );
1761 local_writer.write( SurfacingConstants.NL );
1764 switch ( print_option ) {
1766 for( final Writer w : split_writers.values() ) {
1767 w.write( SurfacingConstants.NL );
1768 w.write( "</table>" );
1769 w.write( SurfacingConstants.NL );
1770 w.write( "</font>" );
1771 w.write( SurfacingConstants.NL );
1772 w.write( "</body>" );
1773 w.write( SurfacingConstants.NL );
1774 w.write( "</html>" );
1775 w.write( SurfacingConstants.NL );
1779 for( final Writer w : split_writers.values() ) {
1785 private static void printSomeStats( final DescriptiveStatistics stats, final AsciiHistogram histo, final Writer w )
1786 throws IOException {
1789 w.write( SurfacingConstants.NL );
1790 w.write( "<tt><pre>" );
1791 w.write( SurfacingConstants.NL );
1792 if ( histo != null ) {
1793 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
1794 w.write( SurfacingConstants.NL );
1796 w.write( "</pre></tt>" );
1797 w.write( SurfacingConstants.NL );
1798 w.write( "<table>" );
1799 w.write( SurfacingConstants.NL );
1800 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
1801 w.write( SurfacingConstants.NL );
1802 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
1803 w.write( SurfacingConstants.NL );
1804 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
1805 w.write( SurfacingConstants.NL );
1806 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
1807 w.write( SurfacingConstants.NL );
1808 if ( stats.getN() > 1 ) {
1809 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
1812 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
1814 w.write( SurfacingConstants.NL );
1815 w.write( "</table>" );
1816 w.write( SurfacingConstants.NL );
1818 w.write( SurfacingConstants.NL );
1821 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
1822 final String filename,
1823 final Format format ) {
1824 final File outfile = new File( filename );
1825 checkForOutputFileWriteability( outfile );
1827 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1828 matrix.toWriter( out, format );
1832 catch ( final IOException e ) {
1833 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1835 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
1838 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
1839 checkForOutputFileWriteability( matrix_outfile );
1841 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
1842 for( final DistanceMatrix distance_matrix : matrices ) {
1843 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
1844 out.write( ForesterUtil.LINE_SEPARATOR );
1849 catch ( final IOException e ) {
1850 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1852 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
1855 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
1856 final PhylogenyWriter writer = new PhylogenyWriter();
1858 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
1860 catch ( final IOException e ) {
1861 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
1864 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
1867 public static void writeTaxonomyLinks( final Writer writer,
1868 final String species,
1869 final Map<String, Integer> tax_code_to_id_map ) throws IOException {
1870 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
1871 writer.write( " [" );
1872 if ( ( tax_code_to_id_map != null ) && tax_code_to_id_map.containsKey( species ) ) {
1873 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_TAXONOMY_ID_LINK
1874 + tax_code_to_id_map.get( species ) + "\" target=\"taxonomy_window\">uniprot</a>" );
1877 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
1878 + "\" target=\"taxonomy_window\">eol</a>" );
1879 writer.write( "|" );
1880 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_SCHOLAR_SEARCH + species
1881 + "\" target=\"taxonomy_window\">scholar</a>" );
1882 writer.write( "|" );
1883 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_WEB_SEARCH_LINK + species
1884 + "\" target=\"taxonomy_window\">google</a>" );
1886 writer.write( "]" );
1890 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
1891 if ( map.containsKey( s ) ) {
1892 map.put( s, map.get( s ) + 1 );
1899 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
1900 final String outfilename_for_counts,
1901 final String outfilename_for_dc,
1902 final String outfilename_for_dc_for_go_mapping,
1903 final String outfilename_for_dc_for_go_mapping_unique,
1904 final String outfilename_for_rank_counts,
1905 final String outfilename_for_ancestor_species_counts,
1906 final String outfilename_for_protein_stats,
1907 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
1908 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
1909 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
1912 // if ( protein_length_stats_by_dc != null ) {
1913 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
1914 // System.out.print( entry.getKey().toString() );
1915 // System.out.print( ": " );
1916 // double[] a = entry.getValue().getDataAsDoubleArray();
1917 // for( int i = 0; i < a.length; i++ ) {
1918 // System.out.print( a[ i ] + " " );
1920 // System.out.println();
1923 // if ( domain_number_stats_by_dc != null ) {
1924 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
1925 // System.out.print( entry.getKey().toString() );
1926 // System.out.print( ": " );
1927 // double[] a = entry.getValue().getDataAsDoubleArray();
1928 // for( int i = 0; i < a.length; i++ ) {
1929 // System.out.print( a[ i ] + " " );
1931 // System.out.println();
1935 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
1936 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
1937 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
1938 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
1939 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
1940 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
1941 final PhylogenyNode n = it.next();
1942 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
1943 for( final String dc : gained_dc ) {
1944 if ( dc_gain_counts.containsKey( dc ) ) {
1945 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
1948 dc_gain_counts.put( dc, 1 );
1952 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
1953 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
1954 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
1955 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
1956 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
1957 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
1958 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
1959 final Set<String> dcs = dc_gain_counts.keySet();
1960 final SortedSet<String> more_than_once = new TreeSet<String>();
1961 DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
1962 DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
1963 DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
1964 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
1965 long gained_multiple_times_domain_length_sum = 0;
1966 long gained_once_domain_length_sum = 0;
1967 long gained_multiple_times_domain_length_count = 0;
1968 long gained_once_domain_length_count = 0;
1969 for( final String dc : dcs ) {
1970 final int count = dc_gain_counts.get( dc );
1971 if ( histogram.containsKey( count ) ) {
1972 histogram.put( count, histogram.get( count ) + 1 );
1973 domain_lists.get( count ).append( ", " + dc );
1974 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
1975 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
1978 histogram.put( count, 1 );
1979 domain_lists.put( count, new StringBuilder( dc ) );
1980 final PriorityQueue<String> q = new PriorityQueue<String>();
1981 q.addAll( splitDomainCombination( dc ) );
1982 domain_lists_go.put( count, q );
1983 final SortedSet<String> set = new TreeSet<String>();
1984 set.addAll( splitDomainCombination( dc ) );
1985 domain_lists_go_unique.put( count, set );
1987 if ( protein_length_stats_by_dc != null ) {
1988 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
1989 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
1991 dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
1992 .arithmeticMean() );
1994 if ( domain_number_stats_by_dc != null ) {
1995 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
1996 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
1998 dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
1999 .arithmeticMean() );
2001 if ( domain_length_stats_by_domain != null ) {
2002 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
2003 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
2005 final String[] ds = dc.split( "=" );
2006 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2007 .get( ds[ 0 ] ).arithmeticMean() );
2008 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2009 .get( ds[ 1 ] ).arithmeticMean() );
2012 more_than_once.add( dc );
2013 if ( protein_length_stats_by_dc != null ) {
2014 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2015 for( final double element : s.getData() ) {
2016 gained_multiple_times_lengths_stats.addValue( element );
2019 if ( domain_number_stats_by_dc != null ) {
2020 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2021 for( final double element : s.getData() ) {
2022 gained_multiple_times_domain_count_stats.addValue( element );
2025 if ( domain_length_stats_by_domain != null ) {
2026 final String[] ds = dc.split( "=" );
2027 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2028 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2029 for( final double element : s0.getData() ) {
2030 gained_multiple_times_domain_length_sum += element;
2031 ++gained_multiple_times_domain_length_count;
2033 for( final double element : s1.getData() ) {
2034 gained_multiple_times_domain_length_sum += element;
2035 ++gained_multiple_times_domain_length_count;
2040 if ( protein_length_stats_by_dc != null ) {
2041 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2042 for( final double element : s.getData() ) {
2043 gained_once_lengths_stats.addValue( element );
2046 if ( domain_number_stats_by_dc != null ) {
2047 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2048 for( final double element : s.getData() ) {
2049 gained_once_domain_count_stats.addValue( element );
2052 if ( domain_length_stats_by_domain != null ) {
2053 final String[] ds = dc.split( "=" );
2054 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2055 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2056 for( final double element : s0.getData() ) {
2057 gained_once_domain_length_sum += element;
2058 ++gained_once_domain_length_count;
2060 for( final double element : s1.getData() ) {
2061 gained_once_domain_length_sum += element;
2062 ++gained_once_domain_length_count;
2067 final Set<Integer> histogram_keys = histogram.keySet();
2068 for( final Integer histogram_key : histogram_keys ) {
2069 final int count = histogram.get( histogram_key );
2070 final StringBuilder dc = domain_lists.get( histogram_key );
2071 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
2072 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
2073 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2074 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
2075 Arrays.sort( sorted );
2076 for( final Object domain : sorted ) {
2077 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
2079 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2080 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
2081 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
2086 out_dc_for_go_mapping.close();
2087 out_dc_for_go_mapping_unique.close();
2088 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
2089 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
2090 for( final String dc : more_than_once ) {
2091 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
2092 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
2093 final PhylogenyNode n = it.next();
2094 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
2098 for( int i = 0; i < ( nodes.size() - 1 ); ++i ) {
2099 for( int j = i + 1; j < nodes.size(); ++j ) {
2100 final PhylogenyNode lca = PhylogenyMethods.calculateLCA( nodes.get( i ), nodes.get( j ) );
2101 String rank = "unknown";
2102 if ( lca.getNodeData().isHasTaxonomy()
2103 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
2104 rank = lca.getNodeData().getTaxonomy().getRank();
2106 addToCountMap( lca_rank_counts, rank );
2108 if ( lca.getNodeData().isHasTaxonomy()
2109 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
2110 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
2112 else if ( lca.getNodeData().isHasTaxonomy()
2113 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
2114 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
2117 lca_species = lca.getName();
2119 addToCountMap( lca_ancestor_species_counts, lca_species );
2123 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
2124 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
2125 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
2126 ForesterUtil.map2writer( out_for_ancestor_species_counts,
2127 lca_ancestor_species_counts,
2129 ForesterUtil.LINE_SEPARATOR );
2130 out_for_rank_counts.close();
2131 out_for_ancestor_species_counts.close();
2132 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
2133 && ( ( domain_length_stats_by_domain != null ) || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
2134 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
2135 w.write( "Domain Lengths: " );
2137 if ( domain_length_stats_by_domain != null ) {
2138 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
2140 w.write( entry.getKey().toString() );
2141 w.write( "\t" + entry.getValue().arithmeticMean() );
2142 w.write( "\t" + entry.getValue().median() );
2149 w.write( "Protein Lengths: " );
2151 if ( protein_length_stats_by_dc != null ) {
2152 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
2154 w.write( entry.getKey().toString() );
2155 w.write( "\t" + entry.getValue().arithmeticMean() );
2156 w.write( "\t" + entry.getValue().median() );
2163 w.write( "Number of domains: " );
2165 if ( domain_number_stats_by_dc != null ) {
2166 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
2168 w.write( entry.getKey().toString() );
2169 w.write( "\t" + entry.getValue().arithmeticMean() );
2170 w.write( "\t" + entry.getValue().median() );
2177 w.write( "Gained once, domain lengths:" );
2179 w.write( "N: " + gained_once_domain_length_count );
2181 w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
2184 w.write( "Gained multiple times, domain lengths:" );
2186 w.write( "N: " + gained_multiple_times_domain_length_count );
2189 + ( ( double ) gained_multiple_times_domain_length_sum / gained_multiple_times_domain_length_count ) );
2194 w.write( "Gained once, protein lengths:" );
2196 w.write( gained_once_lengths_stats.toString() );
2197 gained_once_lengths_stats = null;
2200 w.write( "Gained once, domain counts:" );
2202 w.write( gained_once_domain_count_stats.toString() );
2203 gained_once_domain_count_stats = null;
2206 w.write( "Gained multiple times, protein lengths:" );
2208 w.write( gained_multiple_times_lengths_stats.toString() );
2209 gained_multiple_times_lengths_stats = null;
2212 w.write( "Gained multiple times, domain counts:" );
2214 w.write( gained_multiple_times_domain_count_stats.toString() );
2219 catch ( final IOException e ) {
2220 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2222 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
2223 + outfilename_for_counts + "]" );
2224 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
2225 + outfilename_for_dc + "]" );
2226 ForesterUtil.programMessage( surfacing.PRG_NAME,
2227 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
2228 + outfilename_for_dc_for_go_mapping + "]" );
2229 ForesterUtil.programMessage( surfacing.PRG_NAME,
2230 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
2231 + outfilename_for_dc_for_go_mapping_unique + "]" );
2234 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
2235 final boolean get_gains ) {
2236 final SortedSet<String> domains = new TreeSet<String>();
2237 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
2238 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
2240 domains.addAll( chars.getGainedCharacters() );
2243 domains.addAll( chars.getLostCharacters() );
2249 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
2250 final boolean domain_combinations,
2251 final CharacterStateMatrix.GainLossStates state,
2252 final String outfile ) {
2253 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
2254 + ForesterUtil.FILE_SEPARATOR + base_dir );
2255 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2256 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2258 if ( domain_combinations ) {
2259 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2260 + ForesterUtil.FILE_SEPARATOR + "DC" );
2263 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2264 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
2266 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2267 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2269 if ( state == GainLossStates.GAIN ) {
2270 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2271 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
2273 else if ( state == GainLossStates.LOSS ) {
2274 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2275 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
2278 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2279 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
2281 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2282 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2284 return per_node_go_mapped_domain_gain_loss_files_base_dir;
2287 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
2288 final SortedMap<String, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
2289 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
2290 for( final String domain_id : cds.keySet() ) {
2291 final CombinableDomains cd = cds.get( domain_id );
2292 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
2294 return binary_combinations;
2297 private static List<String> splitDomainCombination( final String dc ) {
2298 final String[] s = dc.split( "=" );
2299 if ( s.length != 2 ) {
2300 ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
2304 final List<String> l = new ArrayList<String>( 2 );
2310 private static void writeAllEncounteredPfamsToFile( final Map<String, List<GoId>> domain_id_to_go_ids_map,
2311 final Map<GoId, GoTerm> go_id_to_term_map,
2312 final String outfile_name,
2313 final SortedSet<String> all_pfams_encountered ) {
2314 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
2315 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
2316 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
2317 final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
2318 int biological_process_counter = 0;
2319 int cellular_component_counter = 0;
2320 int molecular_function_counter = 0;
2321 int pfams_with_mappings_counter = 0;
2322 int pfams_without_mappings_counter = 0;
2323 int pfams_without_mappings_to_bp_or_mf_counter = 0;
2324 int pfams_with_mappings_to_bp_or_mf_counter = 0;
2326 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
2327 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
2328 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
2329 summary_writer.write( "# Pfam to GO mapping summary" );
2330 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2331 summary_writer.write( "# Actual summary is at the end of this file." );
2332 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2333 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
2334 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2335 for( final String pfam : all_pfams_encountered ) {
2336 all_pfams_encountered_writer.write( pfam );
2337 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
2338 final String domain_id = new String( pfam );
2339 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2340 ++pfams_with_mappings_counter;
2341 all_pfams_encountered_with_go_annotation_writer.write( pfam );
2342 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
2343 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
2344 boolean maps_to_bp = false;
2345 boolean maps_to_cc = false;
2346 boolean maps_to_mf = false;
2347 for( final GoId go_id : go_ids ) {
2348 final GoTerm go_term = go_id_to_term_map.get( go_id );
2349 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
2352 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
2355 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
2360 ++biological_process_counter;
2363 ++cellular_component_counter;
2366 ++molecular_function_counter;
2368 if ( maps_to_bp || maps_to_mf ) {
2369 ++pfams_with_mappings_to_bp_or_mf_counter;
2372 ++pfams_without_mappings_to_bp_or_mf_counter;
2376 ++pfams_without_mappings_to_bp_or_mf_counter;
2377 ++pfams_without_mappings_counter;
2378 summary_writer.write( pfam );
2379 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2382 all_pfams_encountered_writer.close();
2383 all_pfams_encountered_with_go_annotation_writer.close();
2384 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + all_pfams_encountered.size()
2385 + "] encountered Pfams to: \"" + all_pfams_encountered_file + "\"" );
2386 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + pfams_with_mappings_counter
2387 + "] encountered Pfams with GO mappings to: \"" + all_pfams_encountered_with_go_annotation_file
2389 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote summary (including all ["
2390 + pfams_without_mappings_counter + "] encountered Pfams without GO mappings) to: \""
2391 + encountered_pfams_summary_file + "\"" );
2392 ForesterUtil.programMessage( surfacing.PRG_NAME, "Sum of Pfams encountered : "
2393 + all_pfams_encountered.size() );
2394 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without a mapping : "
2395 + pfams_without_mappings_counter + " ["
2396 + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2397 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without mapping to proc. or func. : "
2398 + pfams_without_mappings_to_bp_or_mf_counter + " ["
2399 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2400 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping : "
2401 + pfams_with_mappings_counter + " ["
2402 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2403 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping to proc. or func. : "
2404 + pfams_with_mappings_to_bp_or_mf_counter + " ["
2405 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2406 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to biological process: "
2407 + biological_process_counter + " ["
2408 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
2409 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to molecular function: "
2410 + molecular_function_counter + " ["
2411 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
2412 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to cellular component: "
2413 + cellular_component_counter + " ["
2414 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
2415 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2416 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
2417 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2418 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter
2419 + " [" + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2420 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2421 summary_writer.write( "# Pfams without mapping to proc. or func. : "
2422 + pfams_without_mappings_to_bp_or_mf_counter + " ["
2423 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2424 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2425 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
2426 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2427 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2428 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
2429 + pfams_with_mappings_to_bp_or_mf_counter + " ["
2430 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2431 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2432 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
2433 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
2434 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2435 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
2436 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
2437 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2438 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
2439 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
2440 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2441 summary_writer.close();
2443 catch ( final IOException e ) {
2444 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2448 private static void writeDomainData( final Map<String, List<GoId>> domain_id_to_go_ids_map,
2449 final Map<GoId, GoTerm> go_id_to_term_map,
2450 final GoNameSpace go_namespace_limit,
2452 final String domain_0,
2453 final String domain_1,
2454 final String prefix_for_html,
2455 final String character_separator_for_non_html_output,
2456 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
2457 final Set<GoId> all_go_ids ) throws IOException {
2458 boolean any_go_annotation_present = false;
2459 boolean first_has_no_go = false;
2460 int domain_count = 2; // To distinguish between domains and binary domain combinations.
2461 if ( ForesterUtil.isEmpty( domain_1 ) ) {
2464 // The following has a difficult to understand logic.
2465 for( int d = 0; d < domain_count; ++d ) {
2466 List<GoId> go_ids = null;
2467 boolean go_annotation_present = false;
2469 if ( domain_id_to_go_ids_map.containsKey( domain_0 ) ) {
2470 go_annotation_present = true;
2471 any_go_annotation_present = true;
2472 go_ids = domain_id_to_go_ids_map.get( domain_0 );
2475 first_has_no_go = true;
2479 if ( domain_id_to_go_ids_map.containsKey( domain_1 ) ) {
2480 go_annotation_present = true;
2481 any_go_annotation_present = true;
2482 go_ids = domain_id_to_go_ids_map.get( domain_1 );
2485 if ( go_annotation_present ) {
2486 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
2487 for( final GoId go_id : go_ids ) {
2488 out.write( "<tr>" );
2491 writeDomainIdsToHtml( out,
2495 domain_id_to_secondary_features_maps );
2498 out.write( "<td></td>" );
2500 if ( !go_id_to_term_map.containsKey( go_id ) ) {
2501 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
2503 final GoTerm go_term = go_id_to_term_map.get( go_id );
2504 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
2505 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
2506 final String go_id_str = go_id.getId();
2507 out.write( "<td>" );
2508 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
2509 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
2510 out.write( "</td><td>" );
2511 out.write( go_term.getName() );
2512 if ( domain_count == 2 ) {
2513 out.write( " (" + d + ")" );
2515 out.write( "</td><td>" );
2516 // out.write( top );
2517 // out.write( "</td><td>" );
2519 out.write( go_term.getGoNameSpace().toShortString() );
2521 out.write( "</td>" );
2522 if ( all_go_ids != null ) {
2523 all_go_ids.add( go_id );
2527 out.write( "<td>" );
2528 out.write( "</td><td>" );
2529 out.write( "</td><td>" );
2530 out.write( "</td><td>" );
2531 out.write( "</td>" );
2533 out.write( "</tr>" );
2534 out.write( SurfacingConstants.NL );
2537 } // for( int d = 0; d < domain_count; ++d )
2538 if ( !any_go_annotation_present ) {
2539 out.write( "<tr>" );
2540 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
2541 out.write( "<td>" );
2542 out.write( "</td><td>" );
2543 out.write( "</td><td>" );
2544 out.write( "</td><td>" );
2545 out.write( "</td>" );
2546 out.write( "</tr>" );
2547 out.write( SurfacingConstants.NL );
2551 private static void writeDomainIdsToHtml( final Writer out,
2552 final String domain_0,
2553 final String domain_1,
2554 final String prefix_for_detailed_html,
2555 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps )
2556 throws IOException {
2557 out.write( "<td>" );
2558 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
2559 out.write( prefix_for_detailed_html );
2562 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
2563 out.write( "</td>" );
2566 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
2567 final String domain_0,
2568 final String domain_1 ) throws IOException {
2569 individual_files_writer.write( domain_0 );
2570 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2571 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2572 individual_files_writer.write( domain_1 );
2573 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2577 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
2579 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
2580 for( final String pfam : pfams ) {
2581 writer.write( pfam );
2582 writer.write( ForesterUtil.LINE_SEPARATOR );
2585 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote " + pfams.size() + " pfams to [" + outfile_name
2588 catch ( final IOException e ) {
2589 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2593 private static void writeToNexus( final String outfile_name,
2594 final CharacterStateMatrix<BinaryStates> matrix,
2595 final Phylogeny phylogeny ) {
2596 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
2597 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
2600 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
2601 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
2602 phylogenies.add( phylogeny );
2604 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
2605 w.write( NexusConstants.NEXUS );
2606 w.write( ForesterUtil.LINE_SEPARATOR );
2607 my_matrix.writeNexusTaxaBlock( w );
2608 my_matrix.writeNexusBinaryChractersBlock( w );
2609 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
2612 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
2614 catch ( final IOException e ) {
2615 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2619 private static void writeToNexus( final String outfile_name,
2620 final DomainParsimonyCalculator domain_parsimony,
2621 final Phylogeny phylogeny ) {
2622 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
2623 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
2625 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
2626 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
2630 final static class DomainComparator implements Comparator<Domain> {
2632 final private boolean _ascending;
2634 public DomainComparator( final boolean ascending ) {
2635 _ascending = ascending;
2639 public final int compare( final Domain d0, final Domain d1 ) {
2640 if ( d0.getFrom() < d1.getFrom() ) {
2641 return _ascending ? -1 : 1;
2643 else if ( d0.getFrom() > d1.getFrom() ) {
2644 return _ascending ? 1 : -1;