3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.surfacing;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.text.DecimalFormat;
35 import java.text.NumberFormat;
36 import java.util.ArrayList;
37 import java.util.Arrays;
38 import java.util.Collections;
39 import java.util.Comparator;
40 import java.util.HashMap;
41 import java.util.HashSet;
42 import java.util.Iterator;
43 import java.util.List;
45 import java.util.Map.Entry;
46 import java.util.PriorityQueue;
48 import java.util.SortedMap;
49 import java.util.SortedSet;
50 import java.util.TreeMap;
51 import java.util.TreeSet;
52 import java.util.regex.Matcher;
53 import java.util.regex.Pattern;
55 import org.forester.application.surfacing;
56 import org.forester.evoinference.distance.NeighborJoining;
57 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
58 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
61 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
62 import org.forester.evoinference.matrix.distance.BasicSymmetricalDistanceMatrix;
63 import org.forester.evoinference.matrix.distance.DistanceMatrix;
64 import org.forester.go.GoId;
65 import org.forester.go.GoNameSpace;
66 import org.forester.go.GoTerm;
67 import org.forester.go.PfamToGoMapping;
68 import org.forester.io.parsers.nexus.NexusConstants;
69 import org.forester.io.writers.PhylogenyWriter;
70 import org.forester.phylogeny.Phylogeny;
71 import org.forester.phylogeny.PhylogenyMethods;
72 import org.forester.phylogeny.PhylogenyNode;
73 import org.forester.phylogeny.PhylogenyNode.NH_CONVERSION_SUPPORT_VALUE_STYLE;
74 import org.forester.phylogeny.data.BinaryCharacters;
75 import org.forester.phylogeny.data.Confidence;
76 import org.forester.phylogeny.data.Taxonomy;
77 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
78 import org.forester.protein.BasicDomain;
79 import org.forester.protein.BasicProtein;
80 import org.forester.protein.BinaryDomainCombination;
81 import org.forester.protein.Domain;
82 import org.forester.protein.Protein;
83 import org.forester.species.Species;
84 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
85 import org.forester.surfacing.DomainSimilarityCalculator.GoAnnotationOutput;
86 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
87 import org.forester.util.AsciiHistogram;
88 import org.forester.util.BasicDescriptiveStatistics;
89 import org.forester.util.BasicTable;
90 import org.forester.util.BasicTableParser;
91 import org.forester.util.DescriptiveStatistics;
92 import org.forester.util.ForesterUtil;
94 public final class SurfacingUtil {
96 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
97 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
100 public int compare( final Domain d1,
102 if ( d1.getPerSequenceEvalue() < d2
103 .getPerSequenceEvalue() ) {
107 .getPerSequenceEvalue() > d2
108 .getPerSequenceEvalue() ) {
112 return d1.compareTo( d2 );
116 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
117 private static final boolean USE_LAST = true;
119 private SurfacingUtil() {
120 // Hidden constructor.
123 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
124 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
125 final SortedMap<String, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
126 for( final String domain_id : all_cd.keySet() ) {
127 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
131 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
132 final SortedSet<String> domain_ids ) {
133 final SortedSet<String> domains = genome.getAllDomainIds();
134 for( final String domain : domains ) {
135 domain_ids.add( domain );
139 public static void addHtmlHead( final Writer w, final String title ) throws IOException {
140 w.write( SurfacingConstants.NL );
142 w.write( "<title>" );
144 w.write( "</title>" );
145 w.write( SurfacingConstants.NL );
146 w.write( "<style>" );
147 w.write( SurfacingConstants.NL );
148 w.write( "a:visited { color : #6633FF; text-decoration : none; }" );
149 w.write( SurfacingConstants.NL );
150 w.write( "a:link { color : #6633FF; text-decoration : none; }" );
151 w.write( SurfacingConstants.NL );
152 w.write( "a:active { color : #99FF00; text-decoration : none; }" );
153 w.write( SurfacingConstants.NL );
154 w.write( "a:hover { color : #FFFFFF; background-color : #99FF00; text-decoration : none; }" );
155 w.write( SurfacingConstants.NL );
156 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
157 w.write( SurfacingConstants.NL );
158 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
159 w.write( SurfacingConstants.NL );
160 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
161 w.write( SurfacingConstants.NL );
162 w.write( "</style>" );
163 w.write( SurfacingConstants.NL );
164 w.write( "</head>" );
165 w.write( SurfacingConstants.NL );
168 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
169 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
170 for( final DomainSimilarity similarity : similarities ) {
171 stats.addValue( similarity.getMeanSimilarityScore() );
176 public static int calculateOverlap( final Domain domain, final List<Boolean> covered_positions ) {
177 int overlap_count = 0;
178 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
179 if ( ( i < covered_positions.size() ) && ( covered_positions.get( i ) == true ) ) {
183 return overlap_count;
186 public static void checkForOutputFileWriteability( final File outfile ) {
187 final String error = ForesterUtil.isWritableFile( outfile );
188 if ( !ForesterUtil.isEmpty( error ) ) {
189 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
193 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
194 final BinaryDomainCombination.DomainCombinationType dc_type,
195 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
196 final boolean get_gains ) {
197 final SortedSet<String> sorted_ids = new TreeSet<String>();
198 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
199 sorted_ids.add( matrix.getIdentifier( i ) );
201 for( final String id : sorted_ids ) {
202 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
203 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
204 || ( !get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
205 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
206 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
207 .createInstance( matrix.getCharacter( c ) ) );
209 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
210 all_binary_domains_combination_gained.add( DirectedBinaryDomainCombination
211 .createInstance( matrix.getCharacter( c ) ) );
214 all_binary_domains_combination_gained.add( BasicBinaryDomainCombination.createInstance( matrix
215 .getCharacter( c ) ) );
222 public static Map<String, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
223 final Map<String, List<GoId>> domain_id_to_go_ids_map = new HashMap<String, List<GoId>>( pfam_to_go_mappings.size() );
224 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
225 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
226 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
228 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
230 return domain_id_to_go_ids_map;
233 public static Map<String, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
235 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, '\t' );
236 final Map<String, Set<String>> map = new TreeMap<String, Set<String>>();
237 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
238 final String domain_id = primary_table.getValue( 0, r );
239 if ( !map.containsKey( domain_id ) ) {
240 map.put( domain_id, new HashSet<String>() );
242 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
247 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile, final DistanceMatrix distance ) {
248 checkForOutputFileWriteability( nj_tree_outfile );
249 final NeighborJoining nj = NeighborJoining.createInstance();
250 final Phylogeny phylogeny = nj.execute( ( BasicSymmetricalDistanceMatrix ) distance );
251 phylogeny.setName( nj_tree_outfile.getName() );
252 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
256 public static Map<String, Integer> createTaxCodeToIdMap( final Phylogeny phy ) {
257 final Map<String, Integer> m = new HashMap<String, Integer>();
258 for( final PhylogenyNodeIterator iter = phy.iteratorExternalForward(); iter.hasNext(); ) {
259 final PhylogenyNode n = iter.next();
260 if ( n.getNodeData().isHasTaxonomy() ) {
261 final Taxonomy t = n.getNodeData().getTaxonomy();
262 final String c = t.getTaxonomyCode();
263 if ( !ForesterUtil.isEmpty( c ) ) {
264 if ( n.getNodeData().getTaxonomy() == null ) {
265 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
267 final String id = n.getNodeData().getTaxonomy().getIdentifier().getValue();
268 if ( ForesterUtil.isEmpty( id ) ) {
269 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
271 if ( m.containsKey( c ) ) {
272 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy code " + c + " is not unique" );
274 final int iid = Integer.valueOf( id );
275 if ( m.containsValue( iid ) ) {
276 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy id " + iid + " is not unique" );
282 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy for node " + n );
288 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
289 final Detailedness detailedness,
290 final GoAnnotationOutput go_annotation_output,
291 final Map<GoId, GoTerm> go_id_to_term_map,
292 final GoNameSpace go_namespace_limit ) {
293 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) ) {
294 throw new IllegalArgumentException( "attempt to use a GO namespace limit without a GO id to term map" );
296 for( final DomainSimilarity domain_similarity : domain_similarities ) {
297 if ( domain_similarity instanceof PrintableDomainSimilarity ) {
298 final PrintableDomainSimilarity printable_domain_similarity = ( PrintableDomainSimilarity ) domain_similarity;
299 printable_domain_similarity.setDetailedness( detailedness );
300 printable_domain_similarity.setGoAnnotationOutput( go_annotation_output );
301 printable_domain_similarity.setGoIdToTermMap( go_id_to_term_map );
302 printable_domain_similarity.setGoNamespaceLimit( go_namespace_limit );
307 public static void doit( final List<Protein> proteins,
308 final List<String> query_domain_ids_nc_order,
310 final String separator,
311 final String limit_to_species,
312 final Map<String, List<Integer>> average_protein_lengths_by_dc ) throws IOException {
313 for( final Protein protein : proteins ) {
314 if ( ForesterUtil.isEmpty( limit_to_species )
315 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
316 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
317 out.write( protein.getSpecies().getSpeciesId() );
318 out.write( separator );
319 out.write( protein.getProteinId().getId() );
320 out.write( separator );
322 final Set<String> visited_domain_ids = new HashSet<String>();
323 boolean first = true;
324 for( final Domain domain : protein.getProteinDomains() ) {
325 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
326 visited_domain_ids.add( domain.getDomainId() );
333 out.write( domain.getDomainId() );
335 out.write( "" + domain.getTotalCount() );
340 out.write( separator );
341 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
342 .equals( SurfacingConstants.NONE ) ) ) {
343 out.write( protein.getDescription() );
345 out.write( separator );
346 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
347 .equals( SurfacingConstants.NONE ) ) ) {
348 out.write( protein.getAccession() );
350 out.write( SurfacingConstants.NL );
357 public static void domainsPerProteinsStatistics( final String genome,
358 final List<Protein> protein_list,
359 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
360 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
361 final SortedSet<String> domains_which_are_always_single,
362 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
363 final SortedSet<String> domains_which_never_single,
364 final Writer writer ) {
365 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
366 for( final Protein protein : protein_list ) {
367 final int domains = protein.getNumberOfProteinDomains();
368 //System.out.println( domains );
369 stats.addValue( domains );
370 all_genomes_domains_per_potein_stats.addValue( domains );
371 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
372 all_genomes_domains_per_potein_histo.put( domains, 1 );
375 all_genomes_domains_per_potein_histo.put( domains,
376 1 + all_genomes_domains_per_potein_histo.get( domains ) );
378 if ( domains == 1 ) {
379 final String domain = protein.getProteinDomain( 0 ).getDomainId();
380 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
381 if ( domains_which_never_single.contains( domain ) ) {
382 domains_which_never_single.remove( domain );
383 domains_which_are_sometimes_single_sometimes_not.add( domain );
386 domains_which_are_always_single.add( domain );
390 else if ( domains > 1 ) {
391 for( final Domain d : protein.getProteinDomains() ) {
392 final String domain = d.getDomainId();
393 // System.out.println( domain );
394 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
395 if ( domains_which_are_always_single.contains( domain ) ) {
396 domains_which_are_always_single.remove( domain );
397 domains_which_are_sometimes_single_sometimes_not.add( domain );
400 domains_which_never_single.add( domain );
407 writer.write( genome );
408 writer.write( "\t" );
409 if ( stats.getN() >= 1 ) {
410 writer.write( stats.arithmeticMean() + "" );
411 writer.write( "\t" );
412 if ( stats.getN() >= 2 ) {
413 writer.write( stats.sampleStandardDeviation() + "" );
418 writer.write( "\t" );
419 writer.write( stats.median() + "" );
420 writer.write( "\t" );
421 writer.write( stats.getN() + "" );
422 writer.write( "\t" );
423 writer.write( stats.getMin() + "" );
424 writer.write( "\t" );
425 writer.write( stats.getMax() + "" );
428 writer.write( "\t" );
429 writer.write( "\t" );
430 writer.write( "\t" );
432 writer.write( "\t" );
433 writer.write( "\t" );
435 writer.write( "\n" );
437 catch ( final IOException e ) {
442 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
443 final int number_of_genomes,
444 final DomainLengthsTable domain_lengths_table,
445 final File outfile ) throws IOException {
446 final DecimalFormat df = new DecimalFormat( "#.00" );
447 checkForOutputFileWriteability( outfile );
448 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
449 out.write( "MEAN BASED STATISTICS PER SPECIES" );
450 out.write( ForesterUtil.LINE_SEPARATOR );
451 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
452 out.write( ForesterUtil.LINE_SEPARATOR );
453 out.write( ForesterUtil.LINE_SEPARATOR );
454 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
455 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
456 out.write( ForesterUtil.LINE_SEPARATOR );
457 for( final DomainLengths domain_lengths : domain_lengths_list ) {
458 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
459 if ( species_list.size() > 0 ) {
460 out.write( domain_lengths.getDomainId() + "\t" );
461 for( final Species species : species_list ) {
462 out.write( species + "\t" );
464 out.write( ForesterUtil.LINE_SEPARATOR );
465 // DescriptiveStatistics stats_for_domain = domain_lengths
466 // .calculateMeanBasedStatistics();
467 //AsciiHistogram histo = new AsciiHistogram( stats_for_domain );
468 //System.out.println( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
471 out.write( ForesterUtil.LINE_SEPARATOR );
472 out.write( ForesterUtil.LINE_SEPARATOR );
473 out.write( "OUTLIER SPECIES (Z 1.0)" );
474 out.write( ForesterUtil.LINE_SEPARATOR );
475 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
476 .calculateMeanBasedStatisticsForAllSpecies();
477 out.write( stats_for_all_species.asSummary() );
478 out.write( ForesterUtil.LINE_SEPARATOR );
479 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
480 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
481 out.write( ForesterUtil.LINE_SEPARATOR );
482 final double population_sd = stats_for_all_species.sampleStandardDeviation();
483 final double population_mean = stats_for_all_species.arithmeticMean();
484 for( final Species species : domain_lengths_table.getSpecies() ) {
485 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
486 final double z = ( x - population_mean ) / population_sd;
487 out.write( species + "\t" + z );
488 out.write( ForesterUtil.LINE_SEPARATOR );
490 out.write( ForesterUtil.LINE_SEPARATOR );
491 for( final Species species : domain_lengths_table.getSpecies() ) {
492 final DescriptiveStatistics stats_for_species = domain_lengths_table
493 .calculateMeanBasedStatisticsForSpecies( species );
494 final double x = stats_for_species.arithmeticMean();
495 final double z = ( x - population_mean ) / population_sd;
496 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
497 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
498 out.write( ForesterUtil.LINE_SEPARATOR );
502 // final List<HistogramData> histogram_datas = new ArrayList<HistogramData>();
503 // for( int i = 0; i < number_of_genomes; ++i ) {
504 // final Species species = new BasicSpecies( input_file_properties[ i ][ 0 ] );
506 // .add( new HistogramData( species.toString(), domain_lengths_table
507 // .calculateMeanBasedStatisticsForSpecies( species )
508 // .getDataAsDoubleArray(), 5, 600, null, 60 ) );
510 // final HistogramsFrame hf = new HistogramsFrame( histogram_datas );
511 // hf.setVisible( true );
517 * @param all_binary_domains_combination_lost_fitch
518 * @param consider_directedness_and_adjacency_for_bin_combinations
519 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
520 * which were gained under unweighted (Fitch) parsimony.
522 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
523 final boolean radomize_fitch_parsimony,
524 final String outfile_name,
525 final DomainParsimonyCalculator domain_parsimony,
526 final Phylogeny phylogeny,
527 final Map<String, List<GoId>> domain_id_to_go_ids_map,
528 final Map<GoId, GoTerm> go_id_to_term_map,
529 final GoNameSpace go_namespace_limit,
530 final String parameters_str,
531 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
532 final SortedSet<String> positive_filter,
533 final boolean output_binary_domain_combinations_for_graphs,
534 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
535 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
536 final BinaryDomainCombination.DomainCombinationType dc_type,
537 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
538 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
539 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain,
540 final Map<String, Integer> tax_code_to_id_map,
541 final boolean write_to_nexus ) {
542 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
543 final String date_time = ForesterUtil.getCurrentDateTime();
544 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
545 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
546 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
547 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
548 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
549 if ( write_to_nexus ) {
550 writeToNexus( outfile_name, domain_parsimony, phylogeny );
554 Phylogeny local_phylogeny_l = phylogeny.copy();
555 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
556 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
559 domain_parsimony.executeDolloParsimonyOnDomainPresence();
561 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
562 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
563 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
564 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
565 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
566 CharacterStateMatrix.GainLossStates.GAIN,
567 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
569 ForesterUtil.LINE_SEPARATOR,
571 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
572 CharacterStateMatrix.GainLossStates.LOSS,
573 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
575 ForesterUtil.LINE_SEPARATOR,
577 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
578 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D, sep, ForesterUtil.LINE_SEPARATOR, null );
580 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
584 domain_parsimony.getGainLossMatrix(),
585 CharacterStateMatrix.GainLossStates.GAIN,
586 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
588 ForesterUtil.LINE_SEPARATOR,
589 "Dollo Parsimony | Gains | Domains",
591 domain_id_to_secondary_features_maps,
592 all_pfams_encountered,
593 all_pfams_gained_as_domains,
595 tax_code_to_id_map );
596 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
600 domain_parsimony.getGainLossMatrix(),
601 CharacterStateMatrix.GainLossStates.LOSS,
602 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
604 ForesterUtil.LINE_SEPARATOR,
605 "Dollo Parsimony | Losses | Domains",
607 domain_id_to_secondary_features_maps,
608 all_pfams_encountered,
609 all_pfams_lost_as_domains,
611 tax_code_to_id_map );
612 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
616 domain_parsimony.getGainLossMatrix(),
618 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
620 ForesterUtil.LINE_SEPARATOR,
621 "Dollo Parsimony | Present | Domains",
623 domain_id_to_secondary_features_maps,
624 all_pfams_encountered,
627 tax_code_to_id_map );
628 preparePhylogeny( local_phylogeny_l,
631 "Dollo parsimony on domain presence/absence",
632 "dollo_on_domains_" + outfile_name,
634 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
635 + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
637 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
638 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
640 catch ( final IOException e ) {
642 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
644 if ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) {
645 // FITCH DOMAIN COMBINATIONS
646 // -------------------------
647 local_phylogeny_l = phylogeny.copy();
648 String randomization = "no";
649 if ( radomize_fitch_parsimony ) {
650 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
651 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
654 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( USE_LAST );
656 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
657 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
658 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
659 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
661 .writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
662 CharacterStateMatrix.GainLossStates.GAIN,
663 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
665 ForesterUtil.LINE_SEPARATOR,
667 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
668 CharacterStateMatrix.GainLossStates.LOSS,
670 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
672 ForesterUtil.LINE_SEPARATOR,
674 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
675 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC, sep, ForesterUtil.LINE_SEPARATOR, null );
676 if ( all_binary_domains_combination_gained_fitch != null ) {
677 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
679 all_binary_domains_combination_gained_fitch,
682 if ( all_binary_domains_combination_lost_fitch != null ) {
683 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
685 all_binary_domains_combination_lost_fitch,
688 if ( output_binary_domain_combinations_for_graphs ) {
690 .writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
691 .getGainLossMatrix(),
694 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
696 ForesterUtil.LINE_SEPARATOR,
697 BinaryDomainCombination.OutputFormat.DOT );
700 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
704 domain_parsimony.getGainLossMatrix(),
705 CharacterStateMatrix.GainLossStates.GAIN,
706 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
708 ForesterUtil.LINE_SEPARATOR,
709 "Fitch Parsimony | Gains | Domain Combinations",
712 all_pfams_encountered,
713 all_pfams_gained_as_dom_combinations,
715 tax_code_to_id_map );
716 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
720 domain_parsimony.getGainLossMatrix(),
721 CharacterStateMatrix.GainLossStates.LOSS,
722 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
724 ForesterUtil.LINE_SEPARATOR,
725 "Fitch Parsimony | Losses | Domain Combinations",
728 all_pfams_encountered,
729 all_pfams_lost_as_dom_combinations,
731 tax_code_to_id_map );
732 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
736 domain_parsimony.getGainLossMatrix(),
738 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
740 ForesterUtil.LINE_SEPARATOR,
741 "Fitch Parsimony | Present | Domain Combinations",
744 all_pfams_encountered,
747 tax_code_to_id_map );
748 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
751 all_pfams_encountered );
752 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX, all_pfams_gained_as_domains );
753 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
754 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
755 all_pfams_gained_as_dom_combinations );
756 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX, all_pfams_lost_as_dom_combinations );
757 preparePhylogeny( local_phylogeny_l,
760 "Fitch parsimony on binary domain combination presence/absence randomization: "
762 "fitch_on_binary_domain_combinations_" + outfile_name,
764 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
765 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
766 calculateIndependentDomainCombinationGains( local_phylogeny_l,
768 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
770 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
772 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
774 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
775 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
776 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
777 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
778 protein_length_stats_by_dc,
779 domain_number_stats_by_dc,
780 domain_length_stats_by_domain );
784 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
785 final DomainParsimonyCalculator secondary_features_parsimony,
786 final Phylogeny phylogeny,
787 final String parameters_str,
788 final Map<Species, MappingResults> mapping_results_map ) {
789 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
790 final String date_time = ForesterUtil.getCurrentDateTime();
791 System.out.println();
792 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
793 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
795 Phylogeny local_phylogeny_copy = phylogeny.copy();
796 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
797 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(), outfile_name
798 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
799 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(), outfile_name
800 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
802 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
803 CharacterStateMatrix.GainLossStates.GAIN,
805 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
807 ForesterUtil.LINE_SEPARATOR,
810 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
811 CharacterStateMatrix.GainLossStates.LOSS,
813 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
815 ForesterUtil.LINE_SEPARATOR,
818 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
821 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
823 ForesterUtil.LINE_SEPARATOR,
825 preparePhylogeny( local_phylogeny_copy,
826 secondary_features_parsimony,
828 "Dollo parsimony on secondary feature presence/absence",
829 "dollo_on_secondary_features_" + outfile_name,
831 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
832 + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
833 // FITCH DOMAIN COMBINATIONS
834 // -------------------------
835 local_phylogeny_copy = phylogeny.copy();
836 final String randomization = "no";
837 secondary_features_parsimony.executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( USE_LAST );
838 preparePhylogeny( local_phylogeny_copy,
839 secondary_features_parsimony,
841 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
843 "fitch_on_binary_domain_combinations_" + outfile_name,
845 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
846 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
847 calculateIndependentDomainCombinationGains( local_phylogeny_copy, outfile_name
848 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX, outfile_name
849 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX, outfile_name
850 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX, outfile_name
851 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX, outfile_name
852 + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt", outfile_name
853 + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt", null, null, null, null );
856 public static void extractProteinNames( final List<Protein> proteins,
857 final List<String> query_domain_ids_nc_order,
859 final String separator,
860 final String limit_to_species ) throws IOException {
861 for( final Protein protein : proteins ) {
862 if ( ForesterUtil.isEmpty( limit_to_species )
863 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
864 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
865 out.write( protein.getSpecies().getSpeciesId() );
866 out.write( separator );
867 out.write( protein.getProteinId().getId() );
868 out.write( separator );
870 final Set<String> visited_domain_ids = new HashSet<String>();
871 boolean first = true;
872 for( final Domain domain : protein.getProteinDomains() ) {
873 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
874 visited_domain_ids.add( domain.getDomainId() );
881 out.write( domain.getDomainId() );
883 out.write( "" + domain.getTotalCount() );
888 out.write( separator );
889 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
890 .equals( SurfacingConstants.NONE ) ) ) {
891 out.write( protein.getDescription() );
893 out.write( separator );
894 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
895 .equals( SurfacingConstants.NONE ) ) ) {
896 out.write( protein.getAccession() );
898 out.write( SurfacingConstants.NL );
905 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
906 final String domain_id,
908 final String separator,
909 final String limit_to_species,
910 final double domain_e_cutoff ) throws IOException {
911 System.out.println( "Per domain E-value: " + domain_e_cutoff );
912 for( final Species species : protein_lists_per_species.keySet() ) {
913 System.out.println( species + ":" );
914 for( final Protein protein : protein_lists_per_species.get( species ) ) {
915 if ( ForesterUtil.isEmpty( limit_to_species )
916 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
917 final List<Domain> domains = protein.getProteinDomains( domain_id );
918 if ( domains.size() > 0 ) {
919 out.write( protein.getSpecies().getSpeciesId() );
920 out.write( separator );
921 out.write( protein.getProteinId().getId() );
922 out.write( separator );
923 out.write( domain_id.toString() );
924 out.write( separator );
926 for( final Domain domain : domains ) {
927 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
929 out.write( domain.getFrom() + "-" + domain.getTo() );
930 if ( prev_to >= 0 ) {
931 final int l = domain.getFrom() - prev_to;
932 System.out.println( l );
934 prev_to = domain.getTo();
938 out.write( separator );
939 final List<Domain> domain_list = new ArrayList<Domain>();
940 for( final Domain domain : protein.getProteinDomains() ) {
941 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
942 domain_list.add( domain );
945 final Domain domain_ary[] = new Domain[ domain_list.size() ];
946 for( int i = 0; i < domain_list.size(); ++i ) {
947 domain_ary[ i ] = domain_list.get( i );
949 Arrays.sort( domain_ary, new DomainComparator( true ) );
951 boolean first = true;
952 for( final Domain domain : domain_ary ) {
959 out.write( domain.getDomainId().toString() );
960 out.write( ":" + domain.getFrom() + "-" + domain.getTo() );
961 out.write( ":" + domain.getPerDomainEvalue() );
964 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
965 .equals( SurfacingConstants.NONE ) ) ) {
966 out.write( protein.getDescription() );
968 out.write( separator );
969 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
970 .equals( SurfacingConstants.NONE ) ) ) {
971 out.write( protein.getAccession() );
973 out.write( SurfacingConstants.NL );
981 public static SortedSet<String> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
982 final SortedSet<String> all_domains_ids = new TreeSet<String>();
983 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
984 final Set<String> all_domains = gwcd.getAllDomainIds();
985 // for( final Domain domain : all_domains ) {
986 all_domains_ids.addAll( all_domains );
989 return all_domains_ids;
992 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
993 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
994 for( final Protein protein_domain_collection : protein_domain_collections ) {
995 for( final Object name : protein_domain_collection.getProteinDomains() ) {
996 final BasicDomain protein_domain = ( BasicDomain ) name;
997 final String id = protein_domain.getDomainId();
998 if ( map.containsKey( id ) ) {
999 map.put( id, map.get( id ) + 1 );
1009 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
1010 final PhylogenyNodeIterator it = p.iteratorPostorder();
1012 while ( it.hasNext() ) {
1013 final PhylogenyNode n = it.next();
1014 if ( ForesterUtil.isEmpty( n.getName() )
1015 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1016 .getScientificName() ) )
1017 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1018 .getCommonName() ) ) ) {
1019 if ( n.getParent() != null ) {
1020 names.append( " " );
1021 names.append( n.getParent().getName() );
1023 final List l = n.getAllExternalDescendants();
1024 for( final Object object : l ) {
1025 System.out.println( l.toString() );
1034 * Returns true is Domain domain falls in an uninterrupted stretch of
1035 * covered positions.
1038 * @param covered_positions
1041 public static boolean isEngulfed( final Domain domain, final List<Boolean> covered_positions ) {
1042 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1043 if ( ( i >= covered_positions.size() ) || ( covered_positions.get( i ) != true ) ) {
1050 public static void performDomainArchitectureAnalysis( final SortedMap<String, Set<String>> domain_architecutures,
1051 final SortedMap<String, Integer> domain_architecuture_counts,
1052 final int min_count,
1053 final File da_counts_outfile,
1054 final File unique_da_outfile ) {
1055 checkForOutputFileWriteability( da_counts_outfile );
1056 checkForOutputFileWriteability( unique_da_outfile );
1058 final BufferedWriter da_counts_out = new BufferedWriter( new FileWriter( da_counts_outfile ) );
1059 final BufferedWriter unique_da_out = new BufferedWriter( new FileWriter( unique_da_outfile ) );
1060 final Iterator<Entry<String, Integer>> it = domain_architecuture_counts.entrySet().iterator();
1061 while ( it.hasNext() ) {
1062 final Map.Entry<String, Integer> e = it.next();
1063 final String da = e.getKey();
1064 final int count = e.getValue();
1065 if ( count >= min_count ) {
1066 da_counts_out.write( da );
1067 da_counts_out.write( "\t" );
1068 da_counts_out.write( String.valueOf( count ) );
1069 da_counts_out.write( ForesterUtil.LINE_SEPARATOR );
1072 final Iterator<Entry<String, Set<String>>> it2 = domain_architecutures.entrySet().iterator();
1073 while ( it2.hasNext() ) {
1074 final Map.Entry<String, Set<String>> e2 = it2.next();
1075 final String genome = e2.getKey();
1076 final Set<String> das = e2.getValue();
1077 if ( das.contains( da ) ) {
1078 unique_da_out.write( genome );
1079 unique_da_out.write( "\t" );
1080 unique_da_out.write( da );
1081 unique_da_out.write( ForesterUtil.LINE_SEPARATOR );
1086 unique_da_out.close();
1087 da_counts_out.close();
1089 catch ( final IOException e ) {
1090 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1092 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + da_counts_outfile + "\"" );
1093 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + unique_da_outfile + "\"" );
1097 public static void preparePhylogeny( final Phylogeny p,
1098 final DomainParsimonyCalculator domain_parsimony,
1099 final String date_time,
1100 final String method,
1102 final String parameters_str ) {
1103 domain_parsimony.decoratePhylogenyWithDomains( p );
1104 final StringBuilder desc = new StringBuilder();
1105 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1106 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1107 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1108 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1109 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1110 desc.append( "[Parameters: " + parameters_str + "]" );
1112 p.setDescription( desc.toString() );
1113 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1114 p.setRerootable( false );
1115 p.setRooted( true );
1119 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1123 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1124 final String protein_id,
1125 final String separator ) {
1126 final StringBuffer sb = new StringBuffer();
1127 if ( protein.getSpecies() == null ) {
1128 throw new IllegalArgumentException( "species must not be null" );
1130 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1131 throw new IllegalArgumentException( "species id must not be empty" );
1133 final List<Domain> domains = protein.getProteinDomains();
1134 if ( domains.size() > 1 ) {
1135 final Map<String, Integer> counts = new HashMap<String, Integer>();
1136 for( final Domain domain : domains ) {
1137 final String id = domain.getDomainId();
1138 if ( counts.containsKey( id ) ) {
1139 counts.put( id, counts.get( id ) + 1 );
1142 counts.put( id, 1 );
1145 final Set<String> dcs = new HashSet<String>();
1146 for( int i = 1; i < domains.size(); ++i ) {
1147 for( int j = 0; j < i; ++j ) {
1148 Domain domain_n = domains.get( i );
1149 Domain domain_c = domains.get( j );
1150 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1151 domain_n = domains.get( j );
1152 domain_c = domains.get( i );
1154 final String dc = domain_n.getDomainId() + domain_c.getDomainId();
1155 if ( !dcs.contains( dc ) ) {
1157 sb.append( protein.getSpecies() );
1158 sb.append( separator );
1159 sb.append( protein_id );
1160 sb.append( separator );
1161 sb.append( domain_n.getDomainId() );
1162 sb.append( separator );
1163 sb.append( domain_c.getDomainId() );
1164 sb.append( separator );
1165 sb.append( domain_n.getPerDomainEvalue() );
1166 sb.append( separator );
1167 sb.append( domain_c.getPerDomainEvalue() );
1168 sb.append( separator );
1169 sb.append( counts.get( domain_n.getDomainId() ) );
1170 sb.append( separator );
1171 sb.append( counts.get( domain_c.getDomainId() ) );
1172 sb.append( ForesterUtil.LINE_SEPARATOR );
1177 else if ( domains.size() == 1 ) {
1178 sb.append( protein.getSpecies() );
1179 sb.append( separator );
1180 sb.append( protein_id );
1181 sb.append( separator );
1182 sb.append( domains.get( 0 ).getDomainId() );
1183 sb.append( separator );
1184 sb.append( separator );
1185 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1186 sb.append( separator );
1187 sb.append( separator );
1189 sb.append( separator );
1190 sb.append( ForesterUtil.LINE_SEPARATOR );
1193 sb.append( protein.getSpecies() );
1194 sb.append( separator );
1195 sb.append( protein_id );
1196 sb.append( separator );
1197 sb.append( separator );
1198 sb.append( separator );
1199 sb.append( separator );
1200 sb.append( separator );
1201 sb.append( separator );
1202 sb.append( ForesterUtil.LINE_SEPARATOR );
1209 * Example regarding engulfment: ------------0.1 ----------0.2 --0.3 =>
1210 * domain with 0.3 is ignored
1212 * -----------0.1 ----------0.2 --0.3 => domain with 0.3 is ignored
1215 * ------------0.1 ----------0.3 --0.2 => domains with 0.3 and 0.2 are _not_
1218 * @param max_allowed_overlap
1219 * maximal allowed overlap (inclusive) to be still considered not
1220 * overlapping (zero or negative value to allow any overlap)
1221 * @param remove_engulfed_domains
1222 * to remove domains which are completely engulfed by coverage of
1223 * domains with better support
1227 public static Protein removeOverlappingDomains( final int max_allowed_overlap,
1228 final boolean remove_engulfed_domains,
1229 final Protein protein ) {
1230 final Protein pruned_protein = new BasicProtein( protein.getProteinId().getId(), protein.getSpecies()
1231 .getSpeciesId(), protein.getLength() );
1232 final List<Domain> sorted = SurfacingUtil.sortDomainsWithAscendingConfidenceValues( protein );
1233 final List<Boolean> covered_positions = new ArrayList<Boolean>();
1234 for( final Domain domain : sorted ) {
1235 if ( ( ( max_allowed_overlap < 0 ) || ( SurfacingUtil.calculateOverlap( domain, covered_positions ) <= max_allowed_overlap ) )
1236 && ( !remove_engulfed_domains || !isEngulfed( domain, covered_positions ) ) ) {
1237 final int covered_positions_size = covered_positions.size();
1238 for( int i = covered_positions_size; i < domain.getFrom(); ++i ) {
1239 covered_positions.add( false );
1241 final int new_covered_positions_size = covered_positions.size();
1242 for( int i = domain.getFrom(); i <= domain.getTo(); ++i ) {
1243 if ( i < new_covered_positions_size ) {
1244 covered_positions.set( i, true );
1247 covered_positions.add( true );
1250 pruned_protein.addProteinDomain( domain );
1253 return pruned_protein;
1256 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1257 final List<Domain> domains = new ArrayList<Domain>();
1258 for( final Domain d : protein.getProteinDomains() ) {
1261 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1265 public static int storeDomainArchitectures( final String genome,
1266 final SortedMap<String, Set<String>> domain_architecutures,
1267 final List<Protein> protein_list,
1268 final Map<String, Integer> distinct_domain_architecuture_counts ) {
1269 final Set<String> da = new HashSet<String>();
1270 domain_architecutures.put( genome, da );
1271 for( final Protein protein : protein_list ) {
1272 final String da_str = ( ( BasicProtein ) protein ).toDomainArchitectureString( "~", 3, "=" );
1273 if ( !da.contains( da_str ) ) {
1274 if ( !distinct_domain_architecuture_counts.containsKey( da_str ) ) {
1275 distinct_domain_architecuture_counts.put( da_str, 1 );
1278 distinct_domain_architecuture_counts.put( da_str,
1279 distinct_domain_architecuture_counts.get( da_str ) + 1 );
1287 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1288 final boolean get_gains,
1289 final String outdir,
1290 final String suffix_for_filename ) throws IOException {
1291 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1293 state = CharacterStateMatrix.GainLossStates.LOSS;
1295 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1299 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1300 final PhylogenyNode node = it.next();
1301 if ( !node.isExternal() ) {
1302 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1303 if ( domains.size() > 0 ) {
1304 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1305 + node.getName() + suffix_for_filename );
1306 for( final String domain : domains ) {
1307 writer.write( domain );
1308 writer.write( ForesterUtil.LINE_SEPARATOR );
1316 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1317 final File output_dir,
1318 final GenomeWideCombinableDomains gwcd,
1320 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1321 File dc_outfile_dot = new File( input_file_properties[ i ][ 1 ]
1322 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1323 if ( output_dir != null ) {
1324 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1326 checkForOutputFileWriteability( dc_outfile_dot );
1327 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1329 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1330 for( final BinaryDomainCombination bdc : binary_combinations ) {
1331 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1333 out_dot.write( SurfacingConstants.NL );
1337 catch ( final IOException e ) {
1338 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1340 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote binary domain combination for \""
1341 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1342 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile_dot + "\"" );
1345 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1346 final CharacterStateMatrix.GainLossStates state,
1347 final String filename,
1348 final String indentifier_characters_separator,
1349 final String character_separator,
1350 final Map<String, String> descriptions ) {
1351 final File outfile = new File( filename );
1352 checkForOutputFileWriteability( outfile );
1353 final SortedSet<String> sorted_ids = new TreeSet<String>();
1354 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1355 sorted_ids.add( matrix.getIdentifier( i ) );
1358 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1359 for( final String id : sorted_ids ) {
1360 out.write( indentifier_characters_separator );
1361 out.write( "#" + id );
1362 out.write( indentifier_characters_separator );
1363 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1365 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1366 if ( ( matrix.getState( id, c ) == state )
1367 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1368 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1369 out.write( matrix.getCharacter( c ) );
1370 if ( ( descriptions != null ) && !descriptions.isEmpty()
1371 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
1373 out.write( descriptions.get( matrix.getCharacter( c ) ) );
1375 out.write( character_separator );
1382 catch ( final IOException e ) {
1383 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1385 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1388 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1389 final CharacterStateMatrix.GainLossStates state,
1390 final String filename,
1391 final String indentifier_characters_separator,
1392 final String character_separator,
1393 final BinaryDomainCombination.OutputFormat bc_output_format ) {
1394 final File outfile = new File( filename );
1395 checkForOutputFileWriteability( outfile );
1396 final SortedSet<String> sorted_ids = new TreeSet<String>();
1397 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1398 sorted_ids.add( matrix.getIdentifier( i ) );
1401 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1402 for( final String id : sorted_ids ) {
1403 out.write( indentifier_characters_separator );
1404 out.write( "#" + id );
1405 out.write( indentifier_characters_separator );
1406 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1408 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1409 if ( ( matrix.getState( id, c ) == state )
1410 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1411 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1412 BinaryDomainCombination bdc = null;
1414 bdc = BasicBinaryDomainCombination.createInstance( matrix.getCharacter( c ) );
1416 catch ( final Exception e ) {
1417 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1419 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
1420 out.write( character_separator );
1427 catch ( final IOException e ) {
1428 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1430 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1433 public static void writeBinaryStatesMatrixToList( final Map<String, List<GoId>> domain_id_to_go_ids_map,
1434 final Map<GoId, GoTerm> go_id_to_term_map,
1435 final GoNameSpace go_namespace_limit,
1436 final boolean domain_combinations,
1437 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1438 final CharacterStateMatrix.GainLossStates state,
1439 final String filename,
1440 final String indentifier_characters_separator,
1441 final String character_separator,
1442 final String title_for_html,
1443 final String prefix_for_html,
1444 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
1445 final SortedSet<String> all_pfams_encountered,
1446 final SortedSet<String> pfams_gained_or_lost,
1447 final String suffix_for_per_node_events_file,
1448 final Map<String, Integer> tax_code_to_id_map ) {
1449 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1450 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
1452 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
1453 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
1455 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1456 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
1458 final File outfile = new File( filename );
1459 checkForOutputFileWriteability( outfile );
1460 final SortedSet<String> sorted_ids = new TreeSet<String>();
1461 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1462 sorted_ids.add( matrix.getIdentifier( i ) );
1465 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
1466 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
1467 domain_combinations,
1470 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1471 File per_node_go_mapped_domain_gain_loss_outfile = null;
1472 int per_node_counter = 0;
1473 out.write( "<html>" );
1474 out.write( SurfacingConstants.NL );
1475 addHtmlHead( out, title_for_html );
1476 out.write( SurfacingConstants.NL );
1477 out.write( "<body>" );
1478 out.write( SurfacingConstants.NL );
1479 out.write( "<h1>" );
1480 out.write( SurfacingConstants.NL );
1481 out.write( title_for_html );
1482 out.write( SurfacingConstants.NL );
1483 out.write( "</h1>" );
1484 out.write( SurfacingConstants.NL );
1485 out.write( "<table>" );
1486 out.write( SurfacingConstants.NL );
1487 for( final String id : sorted_ids ) {
1488 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1489 if ( matcher.matches() ) {
1492 out.write( "<tr>" );
1493 out.write( "<td>" );
1494 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
1495 out.write( "</td>" );
1496 out.write( "</tr>" );
1497 out.write( SurfacingConstants.NL );
1499 out.write( "</table>" );
1500 out.write( SurfacingConstants.NL );
1501 for( final String id : sorted_ids ) {
1502 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1503 if ( matcher.matches() ) {
1506 out.write( SurfacingConstants.NL );
1507 out.write( "<h2>" );
1508 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
1509 writeTaxonomyLinks( out, id, tax_code_to_id_map );
1510 out.write( "</h2>" );
1511 out.write( SurfacingConstants.NL );
1512 out.write( "<table>" );
1513 out.write( SurfacingConstants.NL );
1514 out.write( "<tr>" );
1515 out.write( "<td><b>" );
1516 out.write( "Pfam domain(s)" );
1517 out.write( "</b></td><td><b>" );
1518 out.write( "GO term acc" );
1519 out.write( "</b></td><td><b>" );
1520 out.write( "GO term" );
1521 out.write( "</b></td><td><b>" );
1522 out.write( "GO namespace" );
1523 out.write( "</b></td>" );
1524 out.write( "</tr>" );
1525 out.write( SurfacingConstants.NL );
1526 out.write( "</tr>" );
1527 out.write( SurfacingConstants.NL );
1528 per_node_counter = 0;
1529 if ( matrix.getNumberOfCharacters() > 0 ) {
1530 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
1531 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
1532 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
1533 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
1534 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
1537 per_node_go_mapped_domain_gain_loss_outfile = null;
1538 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1540 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1542 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1543 if ( ( matrix.getState( id, c ) == state )
1544 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) || ( matrix
1545 .getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
1546 final String character = matrix.getCharacter( c );
1547 String domain_0 = "";
1548 String domain_1 = "";
1549 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
1550 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
1551 if ( s.length != 2 ) {
1552 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
1553 + character + "]" );
1559 domain_0 = character;
1561 writeDomainData( domain_id_to_go_ids_map,
1568 character_separator,
1569 domain_id_to_secondary_features_maps,
1571 all_pfams_encountered.add( domain_0 );
1572 if ( pfams_gained_or_lost != null ) {
1573 pfams_gained_or_lost.add( domain_0 );
1575 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
1576 all_pfams_encountered.add( domain_1 );
1577 if ( pfams_gained_or_lost != null ) {
1578 pfams_gained_or_lost.add( domain_1 );
1581 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1582 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
1589 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
1590 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
1591 if ( per_node_counter < 1 ) {
1592 per_node_go_mapped_domain_gain_loss_outfile.delete();
1594 per_node_counter = 0;
1596 out.write( "</table>" );
1597 out.write( SurfacingConstants.NL );
1598 out.write( "<hr>" );
1599 out.write( SurfacingConstants.NL );
1600 } // for( final String id : sorted_ids ) {
1601 out.write( "</body>" );
1602 out.write( SurfacingConstants.NL );
1603 out.write( "</html>" );
1604 out.write( SurfacingConstants.NL );
1608 catch ( final IOException e ) {
1609 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1611 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
1614 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
1615 final File output_dir,
1616 final Writer per_genome_domain_promiscuity_statistics_writer,
1617 final GenomeWideCombinableDomains gwcd,
1619 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1620 File dc_outfile = new File( input_file_properties[ i ][ 1 ]
1621 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1622 if ( output_dir != null ) {
1623 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
1625 checkForOutputFileWriteability( dc_outfile );
1627 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
1628 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
1631 catch ( final IOException e ) {
1632 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1634 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
1636 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 1 ] + "\t" );
1637 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
1638 if ( stats.getN() < 2 ) {
1639 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
1642 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
1643 .sampleStandardDeviation() ) + "\t" );
1645 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
1646 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
1647 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
1648 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
1649 final SortedSet<String> mpds = gwcd.getMostPromiscuosDomain();
1650 for( final String mpd : mpds ) {
1651 per_genome_domain_promiscuity_statistics_writer.write( mpd + " " );
1653 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
1655 catch ( final IOException e ) {
1656 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1658 if ( input_file_properties[ i ].length == 3 ) {
1659 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
1660 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1661 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
1664 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
1665 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
1666 + dc_outfile + "\"" );
1670 public static DescriptiveStatistics writeDomainSimilaritiesToFile( final StringBuilder html_desc,
1671 final StringBuilder html_title,
1672 final Writer single_writer,
1673 Map<Character, Writer> split_writers,
1674 final SortedSet<DomainSimilarity> similarities,
1675 final boolean treat_as_binary,
1676 final List<Species> species_order,
1677 final PrintableDomainSimilarity.PRINT_OPTION print_option,
1678 final DomainSimilarity.DomainSimilaritySortField sort_field,
1679 final DomainSimilarity.DomainSimilarityScoring scoring,
1680 final boolean verbose,
1681 final Map<String, Integer> tax_code_to_id_map )
1682 throws IOException {
1683 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
1684 String histogram_title = null;
1685 switch ( sort_field ) {
1686 case ABS_MAX_COUNTS_DIFFERENCE:
1687 if ( treat_as_binary ) {
1688 histogram_title = "absolute counts difference:";
1691 histogram_title = "absolute (maximal) counts difference:";
1694 case MAX_COUNTS_DIFFERENCE:
1695 if ( treat_as_binary ) {
1696 histogram_title = "counts difference:";
1699 histogram_title = "(maximal) counts difference:";
1703 histogram_title = "score mean:";
1706 histogram_title = "score minimum:";
1709 histogram_title = "score maximum:";
1711 case MAX_DIFFERENCE:
1712 if ( treat_as_binary ) {
1713 histogram_title = "difference:";
1716 histogram_title = "(maximal) difference:";
1720 histogram_title = "score mean:";
1723 histogram_title = "score standard deviation:";
1726 histogram_title = "species number:";
1729 throw new AssertionError( "Unknown sort field: " + sort_field );
1731 for( final DomainSimilarity similarity : similarities ) {
1732 switch ( sort_field ) {
1733 case ABS_MAX_COUNTS_DIFFERENCE:
1734 stats.addValue( Math.abs( similarity.getMaximalDifferenceInCounts() ) );
1736 case MAX_COUNTS_DIFFERENCE:
1737 stats.addValue( similarity.getMaximalDifferenceInCounts() );
1740 stats.addValue( similarity.getMeanSimilarityScore() );
1743 stats.addValue( similarity.getMinimalSimilarityScore() );
1746 stats.addValue( similarity.getMaximalSimilarityScore() );
1748 case MAX_DIFFERENCE:
1749 stats.addValue( similarity.getMaximalDifference() );
1752 stats.addValue( similarity.getMeanSimilarityScore() );
1755 stats.addValue( similarity.getStandardDeviationOfSimilarityScore() );
1758 stats.addValue( similarity.getSpecies().size() );
1761 throw new AssertionError( "Unknown sort field: " + sort_field );
1764 AsciiHistogram histo = null;
1765 if ( stats.getMin() < stats.getMin() ) {
1766 histo = new AsciiHistogram( stats, histogram_title );
1769 if ( histo != null ) {
1770 System.out.println( histo.toStringBuffer( 20, '|', 40, 5 ) );
1772 System.out.println();
1773 System.out.println( "N : " + stats.getN() );
1774 System.out.println( "Min : " + stats.getMin() );
1775 System.out.println( "Max : " + stats.getMax() );
1776 System.out.println( "Mean : " + stats.arithmeticMean() );
1777 if ( stats.getN() > 1 ) {
1778 System.out.println( "SD : " + stats.sampleStandardDeviation() );
1781 System.out.println( "SD : n/a" );
1783 System.out.println( "Median : " + stats.median() );
1784 if ( stats.getN() > 1 ) {
1785 System.out.println( "Pearsonian skewness : " + stats.pearsonianSkewness() );
1788 System.out.println( "Pearsonian skewness : n/a" );
1791 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
1792 split_writers = new HashMap<Character, Writer>();
1793 split_writers.put( '_', single_writer );
1795 switch ( print_option ) {
1796 case SIMPLE_TAB_DELIMITED:
1799 for( final Character key : split_writers.keySet() ) {
1800 final Writer w = split_writers.get( key );
1801 w.write( "<html>" );
1802 w.write( SurfacingConstants.NL );
1804 addHtmlHead( w, "DCs (" + html_title + ") " + key.toString().toUpperCase() );
1807 addHtmlHead( w, "DCs (" + html_title + ")" );
1809 w.write( SurfacingConstants.NL );
1810 w.write( "<body>" );
1811 w.write( SurfacingConstants.NL );
1812 w.write( html_desc.toString() );
1813 w.write( SurfacingConstants.NL );
1816 w.write( SurfacingConstants.NL );
1817 w.write( "<tt><pre>" );
1818 w.write( SurfacingConstants.NL );
1819 if ( histo != null ) {
1820 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
1821 w.write( SurfacingConstants.NL );
1823 w.write( "</pre></tt>" );
1824 w.write( SurfacingConstants.NL );
1825 w.write( "<table>" );
1826 w.write( SurfacingConstants.NL );
1827 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
1828 w.write( SurfacingConstants.NL );
1829 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
1830 w.write( SurfacingConstants.NL );
1831 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
1832 w.write( SurfacingConstants.NL );
1833 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
1834 w.write( SurfacingConstants.NL );
1835 if ( stats.getN() > 1 ) {
1836 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
1839 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
1841 w.write( SurfacingConstants.NL );
1842 w.write( "<tr><td>Median: </td><td>" + stats.median() + "</td></tr>" );
1843 w.write( SurfacingConstants.NL );
1844 if ( stats.getN() > 1 ) {
1845 w.write( "<tr><td>Pearsonian skewness: </td><td>" + stats.pearsonianSkewness() + "</td></tr>" );
1848 w.write( "<tr><td>Pearsonian skewness: </td><td>n/a</td></tr>" );
1850 w.write( SurfacingConstants.NL );
1851 w.write( "</table>" );
1852 w.write( SurfacingConstants.NL );
1854 w.write( SurfacingConstants.NL );
1856 w.write( SurfacingConstants.NL );
1858 w.write( SurfacingConstants.NL );
1859 w.write( "<table>" );
1860 w.write( SurfacingConstants.NL );
1864 for( final Writer w : split_writers.values() ) {
1865 w.write( SurfacingConstants.NL );
1867 for( final DomainSimilarity similarity : similarities ) {
1868 if ( ( species_order != null ) && !species_order.isEmpty() ) {
1869 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
1871 if ( single_writer != null ) {
1872 single_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map ).toString() );
1873 single_writer.write( SurfacingConstants.NL );
1876 Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
1878 if ( local_writer == null ) {
1879 local_writer = split_writers.get( '0' );
1881 local_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map ).toString() );
1882 local_writer.write( SurfacingConstants.NL );
1884 // for( final Writer w : split_writers.values() ) {
1885 //w.write( SurfacingConstants.NL );
1888 switch ( print_option ) {
1890 for( final Writer w : split_writers.values() ) {
1891 w.write( SurfacingConstants.NL );
1892 w.write( "</table>" );
1893 w.write( SurfacingConstants.NL );
1894 w.write( "</font>" );
1895 w.write( SurfacingConstants.NL );
1896 w.write( "</body>" );
1897 w.write( SurfacingConstants.NL );
1898 w.write( "</html>" );
1899 w.write( SurfacingConstants.NL );
1903 for( final Writer w : split_writers.values() ) {
1909 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
1910 final String filename,
1911 final Format format ) {
1912 final File outfile = new File( filename );
1913 checkForOutputFileWriteability( outfile );
1915 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1916 matrix.toWriter( out, format );
1920 catch ( final IOException e ) {
1921 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1923 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
1926 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
1927 checkForOutputFileWriteability( matrix_outfile );
1929 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
1930 for( final DistanceMatrix distance_matrix : matrices ) {
1931 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
1932 out.write( ForesterUtil.LINE_SEPARATOR );
1937 catch ( final IOException e ) {
1938 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1940 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
1943 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
1944 final PhylogenyWriter writer = new PhylogenyWriter();
1946 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
1948 catch ( final IOException e ) {
1949 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
1952 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
1955 public static void writeTaxonomyLinks( final Writer writer,
1956 final String species,
1957 final Map<String, Integer> tax_code_to_id_map ) throws IOException {
1958 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
1959 writer.write( " [" );
1960 if ( ( tax_code_to_id_map != null ) && tax_code_to_id_map.containsKey( species ) ) {
1961 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_TAXONOMY_ID_LINK
1962 + tax_code_to_id_map.get( species ) + "\" target=\"taxonomy_window\">uniprot</a>" );
1965 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
1966 + "\" target=\"taxonomy_window\">eol</a>" );
1967 writer.write( "|" );
1968 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_SCHOLAR_SEARCH + species
1969 + "\" target=\"taxonomy_window\">scholar</a>" );
1970 writer.write( "|" );
1971 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_WEB_SEARCH_LINK + species
1972 + "\" target=\"taxonomy_window\">google</a>" );
1974 writer.write( "]" );
1978 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
1979 if ( map.containsKey( s ) ) {
1980 map.put( s, map.get( s ) + 1 );
1987 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
1988 final String outfilename_for_counts,
1989 final String outfilename_for_dc,
1990 final String outfilename_for_dc_for_go_mapping,
1991 final String outfilename_for_dc_for_go_mapping_unique,
1992 final String outfilename_for_rank_counts,
1993 final String outfilename_for_ancestor_species_counts,
1994 final String outfilename_for_protein_stats,
1995 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
1996 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
1997 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
2000 // if ( protein_length_stats_by_dc != null ) {
2001 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
2002 // System.out.print( entry.getKey().toString() );
2003 // System.out.print( ": " );
2004 // double[] a = entry.getValue().getDataAsDoubleArray();
2005 // for( int i = 0; i < a.length; i++ ) {
2006 // System.out.print( a[ i ] + " " );
2008 // System.out.println();
2011 // if ( domain_number_stats_by_dc != null ) {
2012 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
2013 // System.out.print( entry.getKey().toString() );
2014 // System.out.print( ": " );
2015 // double[] a = entry.getValue().getDataAsDoubleArray();
2016 // for( int i = 0; i < a.length; i++ ) {
2017 // System.out.print( a[ i ] + " " );
2019 // System.out.println();
2023 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
2024 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
2025 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
2026 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
2027 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
2028 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
2029 final PhylogenyNode n = it.next();
2030 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
2031 for( final String dc : gained_dc ) {
2032 if ( dc_gain_counts.containsKey( dc ) ) {
2033 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
2036 dc_gain_counts.put( dc, 1 );
2040 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
2041 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
2042 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
2043 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
2044 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
2045 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
2046 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
2047 final Set<String> dcs = dc_gain_counts.keySet();
2048 final SortedSet<String> more_than_once = new TreeSet<String>();
2049 final DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
2050 final DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
2051 final DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
2052 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
2053 long gained_multiple_times_domain_length_sum = 0;
2054 long gained_once_domain_length_sum = 0;
2055 long gained_multiple_times_domain_length_count = 0;
2056 long gained_once_domain_length_count = 0;
2057 for( final String dc : dcs ) {
2058 final int count = dc_gain_counts.get( dc );
2059 if ( histogram.containsKey( count ) ) {
2060 histogram.put( count, histogram.get( count ) + 1 );
2061 domain_lists.get( count ).append( ", " + dc );
2062 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
2063 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
2066 histogram.put( count, 1 );
2067 domain_lists.put( count, new StringBuilder( dc ) );
2068 final PriorityQueue<String> q = new PriorityQueue<String>();
2069 q.addAll( splitDomainCombination( dc ) );
2070 domain_lists_go.put( count, q );
2071 final SortedSet<String> set = new TreeSet<String>();
2072 set.addAll( splitDomainCombination( dc ) );
2073 domain_lists_go_unique.put( count, set );
2075 if ( protein_length_stats_by_dc != null ) {
2076 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
2077 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
2079 dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
2080 .arithmeticMean() );
2082 if ( domain_number_stats_by_dc != null ) {
2083 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
2084 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
2086 dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
2087 .arithmeticMean() );
2089 if ( domain_length_stats_by_domain != null ) {
2090 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
2091 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
2093 final String[] ds = dc.split( "=" );
2094 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2095 .get( ds[ 0 ] ).arithmeticMean() );
2096 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2097 .get( ds[ 1 ] ).arithmeticMean() );
2100 more_than_once.add( dc );
2101 if ( protein_length_stats_by_dc != null ) {
2102 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2103 for( final double element : s.getData() ) {
2104 gained_multiple_times_lengths_stats.addValue( element );
2107 if ( domain_number_stats_by_dc != null ) {
2108 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2109 for( final double element : s.getData() ) {
2110 gained_multiple_times_domain_count_stats.addValue( element );
2113 if ( domain_length_stats_by_domain != null ) {
2114 final String[] ds = dc.split( "=" );
2115 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2116 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2117 for( final double element : s0.getData() ) {
2118 gained_multiple_times_domain_length_sum += element;
2119 ++gained_multiple_times_domain_length_count;
2121 for( final double element : s1.getData() ) {
2122 gained_multiple_times_domain_length_sum += element;
2123 ++gained_multiple_times_domain_length_count;
2128 if ( protein_length_stats_by_dc != null ) {
2129 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2130 for( final double element : s.getData() ) {
2131 gained_once_lengths_stats.addValue( element );
2134 if ( domain_number_stats_by_dc != null ) {
2135 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2136 for( final double element : s.getData() ) {
2137 gained_once_domain_count_stats.addValue( element );
2140 if ( domain_length_stats_by_domain != null ) {
2141 final String[] ds = dc.split( "=" );
2142 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2143 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2144 for( final double element : s0.getData() ) {
2145 gained_once_domain_length_sum += element;
2146 ++gained_once_domain_length_count;
2148 for( final double element : s1.getData() ) {
2149 gained_once_domain_length_sum += element;
2150 ++gained_once_domain_length_count;
2155 final Set<Integer> histogram_keys = histogram.keySet();
2156 for( final Integer histogram_key : histogram_keys ) {
2157 final int count = histogram.get( histogram_key );
2158 final StringBuilder dc = domain_lists.get( histogram_key );
2159 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
2160 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
2161 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2162 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
2163 Arrays.sort( sorted );
2164 for( final Object domain : sorted ) {
2165 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
2167 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2168 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
2169 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
2174 out_dc_for_go_mapping.close();
2175 out_dc_for_go_mapping_unique.close();
2176 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
2177 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
2178 for( final String dc : more_than_once ) {
2179 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
2180 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
2181 final PhylogenyNode n = it.next();
2182 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
2186 for( int i = 0; i < ( nodes.size() - 1 ); ++i ) {
2187 for( int j = i + 1; j < nodes.size(); ++j ) {
2188 final PhylogenyNode lca = PhylogenyMethods.calculateLCA( nodes.get( i ), nodes.get( j ) );
2189 String rank = "unknown";
2190 if ( lca.getNodeData().isHasTaxonomy()
2191 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
2192 rank = lca.getNodeData().getTaxonomy().getRank();
2194 addToCountMap( lca_rank_counts, rank );
2196 if ( lca.getNodeData().isHasTaxonomy()
2197 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
2198 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
2200 else if ( lca.getNodeData().isHasTaxonomy()
2201 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
2202 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
2205 lca_species = lca.getName();
2207 addToCountMap( lca_ancestor_species_counts, lca_species );
2211 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
2212 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
2213 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
2214 ForesterUtil.map2writer( out_for_ancestor_species_counts,
2215 lca_ancestor_species_counts,
2217 ForesterUtil.LINE_SEPARATOR );
2218 out_for_rank_counts.close();
2219 out_for_ancestor_species_counts.close();
2220 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
2221 && ( ( domain_length_stats_by_domain != null ) || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
2222 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
2223 w.write( "Domain Lengths: " );
2225 if ( domain_length_stats_by_domain != null ) {
2226 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
2228 w.write( entry.getKey().toString() );
2229 w.write( "\t" + entry.getValue().arithmeticMean() );
2230 w.write( "\t" + entry.getValue().median() );
2237 w.write( "Protein Lengths: " );
2239 if ( protein_length_stats_by_dc != null ) {
2240 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
2242 w.write( entry.getKey().toString() );
2243 w.write( "\t" + entry.getValue().arithmeticMean() );
2244 w.write( "\t" + entry.getValue().median() );
2251 w.write( "Number of domains: " );
2253 if ( domain_number_stats_by_dc != null ) {
2254 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
2256 w.write( entry.getKey().toString() );
2257 w.write( "\t" + entry.getValue().arithmeticMean() );
2258 w.write( "\t" + entry.getValue().median() );
2265 w.write( "Gained once, domain lengths:" );
2267 w.write( "N: " + gained_once_domain_length_count );
2269 w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
2272 w.write( "Gained multiple times, domain lengths:" );
2274 w.write( "N: " + gained_multiple_times_domain_length_count );
2277 + ( ( double ) gained_multiple_times_domain_length_sum / gained_multiple_times_domain_length_count ) );
2282 w.write( "Gained once, protein lengths:" );
2284 w.write( gained_once_lengths_stats.toString() );
2287 w.write( "Gained once, domain counts:" );
2289 w.write( gained_once_domain_count_stats.toString() );
2292 w.write( "Gained multiple times, protein lengths:" );
2294 w.write( gained_multiple_times_lengths_stats.toString() );
2297 w.write( "Gained multiple times, domain counts:" );
2299 w.write( gained_multiple_times_domain_count_stats.toString() );
2304 catch ( final IOException e ) {
2305 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2307 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
2308 + outfilename_for_counts + "]" );
2309 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
2310 + outfilename_for_dc + "]" );
2311 ForesterUtil.programMessage( surfacing.PRG_NAME,
2312 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
2313 + outfilename_for_dc_for_go_mapping + "]" );
2314 ForesterUtil.programMessage( surfacing.PRG_NAME,
2315 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
2316 + outfilename_for_dc_for_go_mapping_unique + "]" );
2319 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
2320 final boolean get_gains ) {
2321 final SortedSet<String> domains = new TreeSet<String>();
2322 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
2323 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
2325 domains.addAll( chars.getGainedCharacters() );
2328 domains.addAll( chars.getLostCharacters() );
2334 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
2335 final boolean domain_combinations,
2336 final CharacterStateMatrix.GainLossStates state,
2337 final String outfile ) {
2338 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
2339 + ForesterUtil.FILE_SEPARATOR + base_dir );
2340 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2341 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2343 if ( domain_combinations ) {
2344 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2345 + ForesterUtil.FILE_SEPARATOR + "DC" );
2348 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2349 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
2351 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2352 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2354 if ( state == GainLossStates.GAIN ) {
2355 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2356 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
2358 else if ( state == GainLossStates.LOSS ) {
2359 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2360 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
2363 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2364 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
2366 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2367 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2369 return per_node_go_mapped_domain_gain_loss_files_base_dir;
2372 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
2373 final SortedMap<String, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
2374 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
2375 for( final String domain_id : cds.keySet() ) {
2376 final CombinableDomains cd = cds.get( domain_id );
2377 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
2379 return binary_combinations;
2382 private static List<String> splitDomainCombination( final String dc ) {
2383 final String[] s = dc.split( "=" );
2384 if ( s.length != 2 ) {
2385 ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
2389 final List<String> l = new ArrayList<String>( 2 );
2395 private static void writeAllEncounteredPfamsToFile( final Map<String, List<GoId>> domain_id_to_go_ids_map,
2396 final Map<GoId, GoTerm> go_id_to_term_map,
2397 final String outfile_name,
2398 final SortedSet<String> all_pfams_encountered ) {
2399 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
2400 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
2401 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
2402 final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
2403 int biological_process_counter = 0;
2404 int cellular_component_counter = 0;
2405 int molecular_function_counter = 0;
2406 int pfams_with_mappings_counter = 0;
2407 int pfams_without_mappings_counter = 0;
2408 int pfams_without_mappings_to_bp_or_mf_counter = 0;
2409 int pfams_with_mappings_to_bp_or_mf_counter = 0;
2411 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
2412 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
2413 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
2414 summary_writer.write( "# Pfam to GO mapping summary" );
2415 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2416 summary_writer.write( "# Actual summary is at the end of this file." );
2417 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2418 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
2419 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2420 for( final String pfam : all_pfams_encountered ) {
2421 all_pfams_encountered_writer.write( pfam );
2422 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
2423 final String domain_id = new String( pfam );
2424 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2425 ++pfams_with_mappings_counter;
2426 all_pfams_encountered_with_go_annotation_writer.write( pfam );
2427 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
2428 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
2429 boolean maps_to_bp = false;
2430 boolean maps_to_cc = false;
2431 boolean maps_to_mf = false;
2432 for( final GoId go_id : go_ids ) {
2433 final GoTerm go_term = go_id_to_term_map.get( go_id );
2434 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
2437 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
2440 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
2445 ++biological_process_counter;
2448 ++cellular_component_counter;
2451 ++molecular_function_counter;
2453 if ( maps_to_bp || maps_to_mf ) {
2454 ++pfams_with_mappings_to_bp_or_mf_counter;
2457 ++pfams_without_mappings_to_bp_or_mf_counter;
2461 ++pfams_without_mappings_to_bp_or_mf_counter;
2462 ++pfams_without_mappings_counter;
2463 summary_writer.write( pfam );
2464 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2467 all_pfams_encountered_writer.close();
2468 all_pfams_encountered_with_go_annotation_writer.close();
2469 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + all_pfams_encountered.size()
2470 + "] encountered Pfams to: \"" + all_pfams_encountered_file + "\"" );
2471 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + pfams_with_mappings_counter
2472 + "] encountered Pfams with GO mappings to: \"" + all_pfams_encountered_with_go_annotation_file
2474 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote summary (including all ["
2475 + pfams_without_mappings_counter + "] encountered Pfams without GO mappings) to: \""
2476 + encountered_pfams_summary_file + "\"" );
2477 ForesterUtil.programMessage( surfacing.PRG_NAME, "Sum of Pfams encountered : "
2478 + all_pfams_encountered.size() );
2479 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without a mapping : "
2480 + pfams_without_mappings_counter + " ["
2481 + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2482 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without mapping to proc. or func. : "
2483 + pfams_without_mappings_to_bp_or_mf_counter + " ["
2484 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2485 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping : "
2486 + pfams_with_mappings_counter + " ["
2487 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2488 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping to proc. or func. : "
2489 + pfams_with_mappings_to_bp_or_mf_counter + " ["
2490 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2491 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to biological process: "
2492 + biological_process_counter + " ["
2493 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
2494 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to molecular function: "
2495 + molecular_function_counter + " ["
2496 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
2497 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to cellular component: "
2498 + cellular_component_counter + " ["
2499 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
2500 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2501 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
2502 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2503 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter
2504 + " [" + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2505 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2506 summary_writer.write( "# Pfams without mapping to proc. or func. : "
2507 + pfams_without_mappings_to_bp_or_mf_counter + " ["
2508 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2509 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2510 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
2511 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
2512 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2513 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
2514 + pfams_with_mappings_to_bp_or_mf_counter + " ["
2515 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
2516 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2517 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
2518 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
2519 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2520 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
2521 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
2522 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2523 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
2524 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
2525 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2526 summary_writer.close();
2528 catch ( final IOException e ) {
2529 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2533 private static void writeDomainData( final Map<String, List<GoId>> domain_id_to_go_ids_map,
2534 final Map<GoId, GoTerm> go_id_to_term_map,
2535 final GoNameSpace go_namespace_limit,
2537 final String domain_0,
2538 final String domain_1,
2539 final String prefix_for_html,
2540 final String character_separator_for_non_html_output,
2541 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
2542 final Set<GoId> all_go_ids ) throws IOException {
2543 boolean any_go_annotation_present = false;
2544 boolean first_has_no_go = false;
2545 int domain_count = 2; // To distinguish between domains and binary domain combinations.
2546 if ( ForesterUtil.isEmpty( domain_1 ) ) {
2549 // The following has a difficult to understand logic.
2550 for( int d = 0; d < domain_count; ++d ) {
2551 List<GoId> go_ids = null;
2552 boolean go_annotation_present = false;
2554 if ( domain_id_to_go_ids_map.containsKey( domain_0 ) ) {
2555 go_annotation_present = true;
2556 any_go_annotation_present = true;
2557 go_ids = domain_id_to_go_ids_map.get( domain_0 );
2560 first_has_no_go = true;
2564 if ( domain_id_to_go_ids_map.containsKey( domain_1 ) ) {
2565 go_annotation_present = true;
2566 any_go_annotation_present = true;
2567 go_ids = domain_id_to_go_ids_map.get( domain_1 );
2570 if ( go_annotation_present ) {
2571 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
2572 for( final GoId go_id : go_ids ) {
2573 out.write( "<tr>" );
2576 writeDomainIdsToHtml( out,
2580 domain_id_to_secondary_features_maps );
2583 out.write( "<td></td>" );
2585 if ( !go_id_to_term_map.containsKey( go_id ) ) {
2586 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
2588 final GoTerm go_term = go_id_to_term_map.get( go_id );
2589 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
2590 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
2591 final String go_id_str = go_id.getId();
2592 out.write( "<td>" );
2593 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
2594 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
2595 out.write( "</td><td>" );
2596 out.write( go_term.getName() );
2597 if ( domain_count == 2 ) {
2598 out.write( " (" + d + ")" );
2600 out.write( "</td><td>" );
2601 // out.write( top );
2602 // out.write( "</td><td>" );
2604 out.write( go_term.getGoNameSpace().toShortString() );
2606 out.write( "</td>" );
2607 if ( all_go_ids != null ) {
2608 all_go_ids.add( go_id );
2612 out.write( "<td>" );
2613 out.write( "</td><td>" );
2614 out.write( "</td><td>" );
2615 out.write( "</td><td>" );
2616 out.write( "</td>" );
2618 out.write( "</tr>" );
2619 out.write( SurfacingConstants.NL );
2622 } // for( int d = 0; d < domain_count; ++d )
2623 if ( !any_go_annotation_present ) {
2624 out.write( "<tr>" );
2625 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
2626 out.write( "<td>" );
2627 out.write( "</td><td>" );
2628 out.write( "</td><td>" );
2629 out.write( "</td><td>" );
2630 out.write( "</td>" );
2631 out.write( "</tr>" );
2632 out.write( SurfacingConstants.NL );
2636 private static void writeDomainIdsToHtml( final Writer out,
2637 final String domain_0,
2638 final String domain_1,
2639 final String prefix_for_detailed_html,
2640 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps )
2641 throws IOException {
2642 out.write( "<td>" );
2643 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
2644 out.write( prefix_for_detailed_html );
2647 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
2648 out.write( "</td>" );
2651 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
2652 final String domain_0,
2653 final String domain_1 ) throws IOException {
2654 individual_files_writer.write( domain_0 );
2655 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2656 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2657 individual_files_writer.write( domain_1 );
2658 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
2662 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
2664 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
2665 for( final String pfam : pfams ) {
2666 writer.write( pfam );
2667 writer.write( ForesterUtil.LINE_SEPARATOR );
2670 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote " + pfams.size() + " pfams to [" + outfile_name
2673 catch ( final IOException e ) {
2674 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2678 private static void writeToNexus( final String outfile_name,
2679 final CharacterStateMatrix<BinaryStates> matrix,
2680 final Phylogeny phylogeny ) {
2681 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
2682 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
2685 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
2686 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
2687 phylogenies.add( phylogeny );
2689 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
2690 w.write( NexusConstants.NEXUS );
2691 w.write( ForesterUtil.LINE_SEPARATOR );
2692 my_matrix.writeNexusTaxaBlock( w );
2693 my_matrix.writeNexusBinaryChractersBlock( w );
2694 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
2697 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
2699 catch ( final IOException e ) {
2700 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2704 private static void writeToNexus( final String outfile_name,
2705 final DomainParsimonyCalculator domain_parsimony,
2706 final Phylogeny phylogeny ) {
2707 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
2708 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
2710 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
2711 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
2715 final static class DomainComparator implements Comparator<Domain> {
2717 final private boolean _ascending;
2719 public DomainComparator( final boolean ascending ) {
2720 _ascending = ascending;
2724 public final int compare( final Domain d0, final Domain d1 ) {
2725 if ( d0.getFrom() < d1.getFrom() ) {
2726 return _ascending ? -1 : 1;
2728 else if ( d0.getFrom() > d1.getFrom() ) {
2729 return _ascending ? 1 : -1;