3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.surfacing;
29 import java.awt.Color;
30 import java.io.BufferedWriter;
32 import java.io.FileWriter;
33 import java.io.IOException;
34 import java.io.Writer;
35 import java.text.DecimalFormat;
36 import java.text.NumberFormat;
37 import java.util.ArrayList;
38 import java.util.Arrays;
39 import java.util.Collections;
40 import java.util.Comparator;
41 import java.util.HashMap;
42 import java.util.HashSet;
43 import java.util.Iterator;
44 import java.util.List;
46 import java.util.Map.Entry;
47 import java.util.PriorityQueue;
49 import java.util.SortedMap;
50 import java.util.SortedSet;
51 import java.util.TreeMap;
52 import java.util.TreeSet;
53 import java.util.regex.Matcher;
54 import java.util.regex.Pattern;
56 import org.forester.application.surfacing;
57 import org.forester.evoinference.distance.NeighborJoining;
58 import org.forester.evoinference.matrix.character.BasicCharacterStateMatrix;
59 import org.forester.evoinference.matrix.character.CharacterStateMatrix;
60 import org.forester.evoinference.matrix.character.CharacterStateMatrix.BinaryStates;
61 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
62 import org.forester.evoinference.matrix.character.CharacterStateMatrix.GainLossStates;
63 import org.forester.evoinference.matrix.distance.BasicSymmetricalDistanceMatrix;
64 import org.forester.evoinference.matrix.distance.DistanceMatrix;
65 import org.forester.go.GoId;
66 import org.forester.go.GoNameSpace;
67 import org.forester.go.GoTerm;
68 import org.forester.go.PfamToGoMapping;
69 import org.forester.io.parsers.nexus.NexusConstants;
70 import org.forester.io.parsers.phyloxml.PhyloXmlUtil;
71 import org.forester.io.parsers.util.ParserUtils;
72 import org.forester.io.writers.PhylogenyWriter;
73 import org.forester.phylogeny.Phylogeny;
74 import org.forester.phylogeny.PhylogenyMethods;
75 import org.forester.phylogeny.PhylogenyNode;
76 import org.forester.phylogeny.PhylogenyNode.NH_CONVERSION_SUPPORT_VALUE_STYLE;
77 import org.forester.phylogeny.data.BinaryCharacters;
78 import org.forester.phylogeny.data.Confidence;
79 import org.forester.phylogeny.data.Taxonomy;
80 import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
81 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
82 import org.forester.protein.BasicDomain;
83 import org.forester.protein.BasicProtein;
84 import org.forester.protein.BinaryDomainCombination;
85 import org.forester.protein.Domain;
86 import org.forester.protein.Protein;
87 import org.forester.species.Species;
88 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
89 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
90 import org.forester.surfacing.PrintableDomainSimilarity.PRINT_OPTION;
91 import org.forester.util.AsciiHistogram;
92 import org.forester.util.BasicDescriptiveStatistics;
93 import org.forester.util.BasicTable;
94 import org.forester.util.BasicTableParser;
95 import org.forester.util.CommandLineArguments;
96 import org.forester.util.DescriptiveStatistics;
97 import org.forester.util.ForesterUtil;
98 import org.forester.util.TaxonomyColors;
100 public final class SurfacingUtil {
102 public final static Pattern PATTERN_SP_STYLE_TAXONOMY = Pattern.compile( "^[A-Z0-9]{3,5}$" );
103 private final static Map<String, String> _TAXCODE_HEXCOLORSTRING_MAP = new HashMap<String, String>();
104 private static final Comparator<Domain> ASCENDING_CONFIDENCE_VALUE_ORDER = new Comparator<Domain>() {
107 public int compare( final Domain d1,
109 if ( d1.getPerSequenceEvalue() < d2
110 .getPerSequenceEvalue() ) {
114 .getPerSequenceEvalue() > d2
115 .getPerSequenceEvalue() ) {
119 return d1.compareTo( d2 );
123 private final static NumberFormat FORMATTER_3 = new DecimalFormat( "0.000" );
125 private SurfacingUtil() {
126 // Hidden constructor.
129 public static void addAllBinaryDomainCombinationToSet( final GenomeWideCombinableDomains genome,
130 final SortedSet<BinaryDomainCombination> binary_domain_combinations ) {
131 final SortedMap<String, CombinableDomains> all_cd = genome.getAllCombinableDomainsIds();
132 for( final String domain_id : all_cd.keySet() ) {
133 binary_domain_combinations.addAll( all_cd.get( domain_id ).toBinaryDomainCombinations() );
137 public static void addAllDomainIdsToSet( final GenomeWideCombinableDomains genome,
138 final SortedSet<String> domain_ids ) {
139 final SortedSet<String> domains = genome.getAllDomainIds();
140 for( final String domain : domains ) {
141 domain_ids.add( domain );
145 public static DescriptiveStatistics calculateDescriptiveStatisticsForMeanValues( final Set<DomainSimilarity> similarities ) {
146 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
147 for( final DomainSimilarity similarity : similarities ) {
148 stats.addValue( similarity.getMeanSimilarityScore() );
153 public static void checkForOutputFileWriteability( final File outfile ) {
154 final String error = ForesterUtil.isWritableFile( outfile );
155 if ( !ForesterUtil.isEmpty( error ) ) {
156 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
160 public static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
161 final String[][] input_file_properties,
162 final String automated_pairwise_comparison_suffix,
163 final File outdir ) {
164 for( int i = 0; i < input_file_properties.length; ++i ) {
165 for( int j = 0; j < i; ++j ) {
166 final String species_i = input_file_properties[ i ][ 1 ];
167 final String species_j = input_file_properties[ j ][ 1 ];
168 String pairwise_similarities_output_file_str = surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX + species_i
169 + "_" + species_j + automated_pairwise_comparison_suffix;
170 switch ( domain_similarity_print_option ) {
172 if ( !pairwise_similarities_output_file_str.endsWith( ".html" ) ) {
173 pairwise_similarities_output_file_str += ".html";
177 final String error = ForesterUtil
178 .isWritableFile( new File( outdir == null ? pairwise_similarities_output_file_str : outdir
179 + ForesterUtil.FILE_SEPARATOR + pairwise_similarities_output_file_str ) );
180 if ( !ForesterUtil.isEmpty( error ) ) {
181 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
187 public static void collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
188 final BinaryDomainCombination.DomainCombinationType dc_type,
189 final List<BinaryDomainCombination> all_binary_domains_combination_gained,
190 final boolean get_gains ) {
191 final SortedSet<String> sorted_ids = new TreeSet<String>();
192 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
193 sorted_ids.add( matrix.getIdentifier( i ) );
195 for( final String id : sorted_ids ) {
196 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
197 if ( ( get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) )
198 || ( !get_gains && ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.LOSS ) ) ) {
199 if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) {
200 all_binary_domains_combination_gained.add( AdjactantDirectedBinaryDomainCombination
201 .createInstance( matrix.getCharacter( c ) ) );
203 else if ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED ) {
204 all_binary_domains_combination_gained.add( DirectedBinaryDomainCombination
205 .createInstance( matrix.getCharacter( c ) ) );
208 all_binary_domains_combination_gained.add( BasicBinaryDomainCombination.createInstance( matrix
209 .getCharacter( c ) ) );
216 public static Map<String, List<GoId>> createDomainIdToGoIdMap( final List<PfamToGoMapping> pfam_to_go_mappings ) {
217 final Map<String, List<GoId>> domain_id_to_go_ids_map = new HashMap<String, List<GoId>>( pfam_to_go_mappings.size() );
218 for( final PfamToGoMapping pfam_to_go : pfam_to_go_mappings ) {
219 if ( !domain_id_to_go_ids_map.containsKey( pfam_to_go.getKey() ) ) {
220 domain_id_to_go_ids_map.put( pfam_to_go.getKey(), new ArrayList<GoId>() );
222 domain_id_to_go_ids_map.get( pfam_to_go.getKey() ).add( pfam_to_go.getValue() );
224 return domain_id_to_go_ids_map;
227 public static Map<String, Set<String>> createDomainIdToSecondaryFeaturesMap( final File secondary_features_map_file )
229 final BasicTable<String> primary_table = BasicTableParser.parse( secondary_features_map_file, '\t' );
230 final Map<String, Set<String>> map = new TreeMap<String, Set<String>>();
231 for( int r = 0; r < primary_table.getNumberOfRows(); ++r ) {
232 final String domain_id = primary_table.getValue( 0, r );
233 if ( !map.containsKey( domain_id ) ) {
234 map.put( domain_id, new HashSet<String>() );
236 map.get( domain_id ).add( primary_table.getValue( 1, r ) );
241 public static Phylogeny createNjTreeBasedOnMatrixToFile( final File nj_tree_outfile, final DistanceMatrix distance ) {
242 checkForOutputFileWriteability( nj_tree_outfile );
243 final NeighborJoining nj = NeighborJoining.createInstance();
244 final Phylogeny phylogeny = nj.execute( ( BasicSymmetricalDistanceMatrix ) distance );
245 phylogeny.setName( nj_tree_outfile.getName() );
246 writePhylogenyToFile( phylogeny, nj_tree_outfile.toString() );
250 public static StringBuilder createParametersAsString( final boolean ignore_dufs,
251 final double e_value_max,
252 final int max_allowed_overlap,
253 final boolean no_engulfing_overlaps,
254 final File cutoff_scores_file,
255 final BinaryDomainCombination.DomainCombinationType dc_type ) {
256 final StringBuilder parameters_sb = new StringBuilder();
257 parameters_sb.append( "E-value: " + e_value_max );
258 if ( cutoff_scores_file != null ) {
259 parameters_sb.append( ", Cutoff-scores-file: " + cutoff_scores_file );
262 parameters_sb.append( ", Cutoff-scores-file: not-set" );
264 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
265 parameters_sb.append( ", Max-overlap: " + max_allowed_overlap );
268 parameters_sb.append( ", Max-overlap: not-set" );
270 if ( no_engulfing_overlaps ) {
271 parameters_sb.append( ", Engulfing-overlaps: not-allowed" );
274 parameters_sb.append( ", Engulfing-overlaps: allowed" );
277 parameters_sb.append( ", Ignore-dufs: true" );
280 parameters_sb.append( ", Ignore-dufs: false" );
282 parameters_sb.append( ", DC type (if applicable): " + dc_type );
283 return parameters_sb;
286 public static void createSplitWriters( final File out_dir,
287 final String my_outfile,
288 final Map<Character, Writer> split_writers ) throws IOException {
289 split_writers.put( 'a', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
290 + "_domains_A.html" ) ) );
291 split_writers.put( 'b', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
292 + "_domains_B.html" ) ) );
293 split_writers.put( 'c', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
294 + "_domains_C.html" ) ) );
295 split_writers.put( 'd', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
296 + "_domains_D.html" ) ) );
297 split_writers.put( 'e', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
298 + "_domains_E.html" ) ) );
299 split_writers.put( 'f', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
300 + "_domains_F.html" ) ) );
301 split_writers.put( 'g', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
302 + "_domains_G.html" ) ) );
303 split_writers.put( 'h', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
304 + "_domains_H.html" ) ) );
305 split_writers.put( 'i', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
306 + "_domains_I.html" ) ) );
307 split_writers.put( 'j', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
308 + "_domains_J.html" ) ) );
309 split_writers.put( 'k', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
310 + "_domains_K.html" ) ) );
311 split_writers.put( 'l', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
312 + "_domains_L.html" ) ) );
313 split_writers.put( 'm', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
314 + "_domains_M.html" ) ) );
315 split_writers.put( 'n', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
316 + "_domains_N.html" ) ) );
317 split_writers.put( 'o', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
318 + "_domains_O.html" ) ) );
319 split_writers.put( 'p', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
320 + "_domains_P.html" ) ) );
321 split_writers.put( 'q', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
322 + "_domains_Q.html" ) ) );
323 split_writers.put( 'r', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
324 + "_domains_R.html" ) ) );
325 split_writers.put( 's', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
326 + "_domains_S.html" ) ) );
327 split_writers.put( 't', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
328 + "_domains_T.html" ) ) );
329 split_writers.put( 'u', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
330 + "_domains_U.html" ) ) );
331 split_writers.put( 'v', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
332 + "_domains_V.html" ) ) );
333 split_writers.put( 'w', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
334 + "_domains_W.html" ) ) );
335 split_writers.put( 'x', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
336 + "_domains_X.html" ) ) );
337 split_writers.put( 'y', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
338 + "_domains_Y.html" ) ) );
339 split_writers.put( 'z', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
340 + "_domains_Z.html" ) ) );
341 split_writers.put( '0', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
342 + "_domains_0.html" ) ) );
345 public static Map<String, Integer> createTaxCodeToIdMap( final Phylogeny phy ) {
346 final Map<String, Integer> m = new HashMap<String, Integer>();
347 for( final PhylogenyNodeIterator iter = phy.iteratorExternalForward(); iter.hasNext(); ) {
348 final PhylogenyNode n = iter.next();
349 if ( n.getNodeData().isHasTaxonomy() ) {
350 final Taxonomy t = n.getNodeData().getTaxonomy();
351 final String c = t.getTaxonomyCode();
352 if ( !ForesterUtil.isEmpty( c ) ) {
353 if ( n.getNodeData().getTaxonomy() == null ) {
354 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
356 final String id = n.getNodeData().getTaxonomy().getIdentifier().getValue();
357 if ( ForesterUtil.isEmpty( id ) ) {
358 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy id for node " + n );
360 if ( m.containsKey( c ) ) {
361 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy code " + c + " is not unique" );
363 final int iid = Integer.valueOf( id );
364 if ( m.containsValue( iid ) ) {
365 ForesterUtil.fatalError( surfacing.PRG_NAME, "taxonomy id " + iid + " is not unique" );
371 ForesterUtil.fatalError( surfacing.PRG_NAME, "no taxonomy for node " + n );
377 public static void decoratePrintableDomainSimilarities( final SortedSet<DomainSimilarity> domain_similarities,
378 final Detailedness detailedness ) {
379 for( final DomainSimilarity domain_similarity : domain_similarities ) {
380 if ( domain_similarity instanceof PrintableDomainSimilarity ) {
381 final PrintableDomainSimilarity printable_domain_similarity = ( PrintableDomainSimilarity ) domain_similarity;
382 printable_domain_similarity.setDetailedness( detailedness );
387 public static void doit( final List<Protein> proteins,
388 final List<String> query_domain_ids_nc_order,
390 final String separator,
391 final String limit_to_species,
392 final Map<String, List<Integer>> average_protein_lengths_by_dc ) throws IOException {
393 for( final Protein protein : proteins ) {
394 if ( ForesterUtil.isEmpty( limit_to_species )
395 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
396 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
397 out.write( protein.getSpecies().getSpeciesId() );
398 out.write( separator );
399 out.write( protein.getProteinId().getId() );
400 out.write( separator );
402 final Set<String> visited_domain_ids = new HashSet<String>();
403 boolean first = true;
404 for( final Domain domain : protein.getProteinDomains() ) {
405 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
406 visited_domain_ids.add( domain.getDomainId() );
413 out.write( domain.getDomainId() );
415 out.write( "" + domain.getTotalCount() );
420 out.write( separator );
421 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
422 .equals( SurfacingConstants.NONE ) ) ) {
423 out.write( protein.getDescription() );
425 out.write( separator );
426 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
427 .equals( SurfacingConstants.NONE ) ) ) {
428 out.write( protein.getAccession() );
430 out.write( SurfacingConstants.NL );
437 public static void domainsPerProteinsStatistics( final String genome,
438 final List<Protein> protein_list,
439 final DescriptiveStatistics all_genomes_domains_per_potein_stats,
440 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
441 final SortedSet<String> domains_which_are_always_single,
442 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not,
443 final SortedSet<String> domains_which_never_single,
444 final Writer writer ) {
445 final DescriptiveStatistics stats = new BasicDescriptiveStatistics();
446 for( final Protein protein : protein_list ) {
447 final int domains = protein.getNumberOfProteinDomains();
448 //System.out.println( domains );
449 stats.addValue( domains );
450 all_genomes_domains_per_potein_stats.addValue( domains );
451 if ( !all_genomes_domains_per_potein_histo.containsKey( domains ) ) {
452 all_genomes_domains_per_potein_histo.put( domains, 1 );
455 all_genomes_domains_per_potein_histo.put( domains,
456 1 + all_genomes_domains_per_potein_histo.get( domains ) );
458 if ( domains == 1 ) {
459 final String domain = protein.getProteinDomain( 0 ).getDomainId();
460 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
461 if ( domains_which_never_single.contains( domain ) ) {
462 domains_which_never_single.remove( domain );
463 domains_which_are_sometimes_single_sometimes_not.add( domain );
466 domains_which_are_always_single.add( domain );
470 else if ( domains > 1 ) {
471 for( final Domain d : protein.getProteinDomains() ) {
472 final String domain = d.getDomainId();
473 // System.out.println( domain );
474 if ( !domains_which_are_sometimes_single_sometimes_not.contains( domain ) ) {
475 if ( domains_which_are_always_single.contains( domain ) ) {
476 domains_which_are_always_single.remove( domain );
477 domains_which_are_sometimes_single_sometimes_not.add( domain );
480 domains_which_never_single.add( domain );
487 writer.write( genome );
488 writer.write( "\t" );
489 if ( stats.getN() >= 1 ) {
490 writer.write( stats.arithmeticMean() + "" );
491 writer.write( "\t" );
492 if ( stats.getN() >= 2 ) {
493 writer.write( stats.sampleStandardDeviation() + "" );
498 writer.write( "\t" );
499 writer.write( stats.median() + "" );
500 writer.write( "\t" );
501 writer.write( stats.getN() + "" );
502 writer.write( "\t" );
503 writer.write( stats.getMin() + "" );
504 writer.write( "\t" );
505 writer.write( stats.getMax() + "" );
508 writer.write( "\t" );
509 writer.write( "\t" );
510 writer.write( "\t" );
512 writer.write( "\t" );
513 writer.write( "\t" );
515 writer.write( "\n" );
517 catch ( final IOException e ) {
522 public static void executeDomainLengthAnalysis( final String[][] input_file_properties,
523 final int number_of_genomes,
524 final DomainLengthsTable domain_lengths_table,
525 final File outfile ) throws IOException {
526 final DecimalFormat df = new DecimalFormat( "#.00" );
527 checkForOutputFileWriteability( outfile );
528 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
529 out.write( "MEAN BASED STATISTICS PER SPECIES" );
530 out.write( ForesterUtil.LINE_SEPARATOR );
531 out.write( domain_lengths_table.createMeanBasedStatisticsPerSpeciesTable().toString() );
532 out.write( ForesterUtil.LINE_SEPARATOR );
533 out.write( ForesterUtil.LINE_SEPARATOR );
534 final List<DomainLengths> domain_lengths_list = domain_lengths_table.getDomainLengthsList();
535 out.write( "OUTLIER SPECIES PER DOMAIN (Z>=1.5)" );
536 out.write( ForesterUtil.LINE_SEPARATOR );
537 for( final DomainLengths domain_lengths : domain_lengths_list ) {
538 final List<Species> species_list = domain_lengths.getMeanBasedOutlierSpecies( 1.5 );
539 if ( species_list.size() > 0 ) {
540 out.write( domain_lengths.getDomainId() + "\t" );
541 for( final Species species : species_list ) {
542 out.write( species + "\t" );
544 out.write( ForesterUtil.LINE_SEPARATOR );
547 out.write( ForesterUtil.LINE_SEPARATOR );
548 out.write( ForesterUtil.LINE_SEPARATOR );
549 out.write( "OUTLIER SPECIES (Z 1.0)" );
550 out.write( ForesterUtil.LINE_SEPARATOR );
551 final DescriptiveStatistics stats_for_all_species = domain_lengths_table
552 .calculateMeanBasedStatisticsForAllSpecies();
553 out.write( stats_for_all_species.asSummary() );
554 out.write( ForesterUtil.LINE_SEPARATOR );
555 final AsciiHistogram histo = new AsciiHistogram( stats_for_all_species );
556 out.write( histo.toStringBuffer( 40, '=', 60, 4 ).toString() );
557 out.write( ForesterUtil.LINE_SEPARATOR );
558 final double population_sd = stats_for_all_species.sampleStandardDeviation();
559 final double population_mean = stats_for_all_species.arithmeticMean();
560 for( final Species species : domain_lengths_table.getSpecies() ) {
561 final double x = domain_lengths_table.calculateMeanBasedStatisticsForSpecies( species ).arithmeticMean();
562 final double z = ( x - population_mean ) / population_sd;
563 out.write( species + "\t" + z );
564 out.write( ForesterUtil.LINE_SEPARATOR );
566 out.write( ForesterUtil.LINE_SEPARATOR );
567 for( final Species species : domain_lengths_table.getSpecies() ) {
568 final DescriptiveStatistics stats_for_species = domain_lengths_table
569 .calculateMeanBasedStatisticsForSpecies( species );
570 final double x = stats_for_species.arithmeticMean();
571 final double z = ( x - population_mean ) / population_sd;
572 if ( ( z <= -1.0 ) || ( z >= 1.0 ) ) {
573 out.write( species + "\t" + df.format( z ) + "\t" + stats_for_species.asSummary() );
574 out.write( ForesterUtil.LINE_SEPARATOR );
582 * Warning: This side-effects 'all_bin_domain_combinations_encountered'!
586 * @param all_bin_domain_combinations_changed
587 * @param sum_of_all_domains_encountered
588 * @param all_bin_domain_combinations_encountered
589 * @param is_gains_analysis
590 * @param protein_length_stats_by_dc
591 * @throws IOException
593 public static void executeFitchGainsAnalysis( final File output_file,
594 final List<BinaryDomainCombination> all_bin_domain_combinations_changed,
595 final int sum_of_all_domains_encountered,
596 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered,
597 final boolean is_gains_analysis ) throws IOException {
598 checkForOutputFileWriteability( output_file );
599 final Writer out = ForesterUtil.createBufferedWriter( output_file );
600 final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
601 .listToSortedCountsMap( all_bin_domain_combinations_changed );
602 final SortedSet<String> all_domains_in_combination_changed_more_than_once = new TreeSet<String>();
603 final SortedSet<String> all_domains_in_combination_changed_only_once = new TreeSet<String>();
606 for( final Object bdc_object : bdc_to_counts.keySet() ) {
607 final BinaryDomainCombination bdc = ( BinaryDomainCombination ) bdc_object;
608 final int count = bdc_to_counts.get( bdc_object );
610 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "count < 1 " );
612 out.write( bdc + "\t" + count + ForesterUtil.LINE_SEPARATOR );
614 all_domains_in_combination_changed_more_than_once.add( bdc.getId0() );
615 all_domains_in_combination_changed_more_than_once.add( bdc.getId1() );
618 else if ( count == 1 ) {
619 all_domains_in_combination_changed_only_once.add( bdc.getId0() );
620 all_domains_in_combination_changed_only_once.add( bdc.getId1() );
624 final int all = all_bin_domain_combinations_encountered.size();
626 if ( !is_gains_analysis ) {
627 all_bin_domain_combinations_encountered.removeAll( all_bin_domain_combinations_changed );
628 never_lost = all_bin_domain_combinations_encountered.size();
629 for( final BinaryDomainCombination bdc : all_bin_domain_combinations_encountered ) {
630 out.write( bdc + "\t" + "0" + ForesterUtil.LINE_SEPARATOR );
633 if ( is_gains_analysis ) {
634 out.write( "Sum of all distinct domain combinations appearing once : " + one
635 + ForesterUtil.LINE_SEPARATOR );
636 out.write( "Sum of all distinct domain combinations appearing more than once : " + above_one
637 + ForesterUtil.LINE_SEPARATOR );
638 out.write( "Sum of all distinct domains in combinations apppearing only once : "
639 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
640 out.write( "Sum of all distinct domains in combinations apppearing more than once: "
641 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
644 out.write( "Sum of all distinct domain combinations never lost : " + never_lost
645 + ForesterUtil.LINE_SEPARATOR );
646 out.write( "Sum of all distinct domain combinations lost once : " + one
647 + ForesterUtil.LINE_SEPARATOR );
648 out.write( "Sum of all distinct domain combinations lost more than once : " + above_one
649 + ForesterUtil.LINE_SEPARATOR );
650 out.write( "Sum of all distinct domains in combinations lost only once : "
651 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
652 out.write( "Sum of all distinct domains in combinations lost more than once: "
653 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
655 out.write( "All binary combinations : " + all
656 + ForesterUtil.LINE_SEPARATOR );
657 out.write( "All domains : "
658 + sum_of_all_domains_encountered );
660 ForesterUtil.programMessage( surfacing.PRG_NAME,
661 "Wrote fitch domain combination dynamics counts analysis to \"" + output_file
667 * @param all_binary_domains_combination_lost_fitch
668 * @param use_last_in_fitch_parsimony
669 * @param consider_directedness_and_adjacency_for_bin_combinations
670 * @param all_binary_domains_combination_gained if null ignored, otherwise this is to list all binary domain combinations
671 * which were gained under unweighted (Fitch) parsimony.
673 public static void executeParsimonyAnalysis( final long random_number_seed_for_fitch_parsimony,
674 final boolean radomize_fitch_parsimony,
675 final String outfile_name,
676 final DomainParsimonyCalculator domain_parsimony,
677 final Phylogeny phylogeny,
678 final Map<String, List<GoId>> domain_id_to_go_ids_map,
679 final Map<GoId, GoTerm> go_id_to_term_map,
680 final GoNameSpace go_namespace_limit,
681 final String parameters_str,
682 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
683 final SortedSet<String> positive_filter,
684 final boolean output_binary_domain_combinations_for_graphs,
685 final List<BinaryDomainCombination> all_binary_domains_combination_gained_fitch,
686 final List<BinaryDomainCombination> all_binary_domains_combination_lost_fitch,
687 final BinaryDomainCombination.DomainCombinationType dc_type,
688 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
689 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
690 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain,
691 final Map<String, Integer> tax_code_to_id_map,
692 final boolean write_to_nexus,
693 final boolean use_last_in_fitch_parsimony ) {
694 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
695 final String date_time = ForesterUtil.getCurrentDateTime();
696 final SortedSet<String> all_pfams_encountered = new TreeSet<String>();
697 final SortedSet<String> all_pfams_gained_as_domains = new TreeSet<String>();
698 final SortedSet<String> all_pfams_lost_as_domains = new TreeSet<String>();
699 final SortedSet<String> all_pfams_gained_as_dom_combinations = new TreeSet<String>();
700 final SortedSet<String> all_pfams_lost_as_dom_combinations = new TreeSet<String>();
701 if ( write_to_nexus ) {
702 writeToNexus( outfile_name, domain_parsimony, phylogeny );
706 Phylogeny local_phylogeny_l = phylogeny.copy();
707 if ( ( positive_filter != null ) && ( positive_filter.size() > 0 ) ) {
708 domain_parsimony.executeDolloParsimonyOnDomainPresence( positive_filter );
711 domain_parsimony.executeDolloParsimonyOnDomainPresence();
713 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
714 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
715 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
716 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS, Format.FORESTER );
717 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
718 CharacterStateMatrix.GainLossStates.GAIN,
719 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_D,
721 ForesterUtil.LINE_SEPARATOR,
723 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
724 CharacterStateMatrix.GainLossStates.LOSS,
725 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_D,
727 ForesterUtil.LINE_SEPARATOR,
729 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
730 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_D, sep, ForesterUtil.LINE_SEPARATOR, null );
732 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
736 domain_parsimony.getGainLossMatrix(),
737 CharacterStateMatrix.GainLossStates.GAIN,
738 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D,
740 ForesterUtil.LINE_SEPARATOR,
741 "Dollo Parsimony | Gains | Domains",
743 domain_id_to_secondary_features_maps,
744 all_pfams_encountered,
745 all_pfams_gained_as_domains,
747 tax_code_to_id_map );
748 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
752 domain_parsimony.getGainLossMatrix(),
753 CharacterStateMatrix.GainLossStates.LOSS,
754 outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D,
756 ForesterUtil.LINE_SEPARATOR,
757 "Dollo Parsimony | Losses | Domains",
759 domain_id_to_secondary_features_maps,
760 all_pfams_encountered,
761 all_pfams_lost_as_domains,
763 tax_code_to_id_map );
764 // writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
765 // go_id_to_term_map,
766 // go_namespace_limit,
768 // domain_parsimony.getGainLossMatrix(),
770 // outfile_name + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D,
772 // ForesterUtil.LINE_SEPARATOR,
773 // "Dollo Parsimony | Present | Domains",
775 // domain_id_to_secondary_features_maps,
776 // all_pfams_encountered,
778 // "_dollo_present_d",
779 // tax_code_to_id_map );
780 preparePhylogeny( local_phylogeny_l,
783 "Dollo parsimony on domain presence/absence",
784 "dollo_on_domains_" + outfile_name,
786 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
787 + surfacing.DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
789 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, true, outfile_name, "_dollo_all_gains_d" );
790 writeAllDomainsChangedOnAllSubtrees( local_phylogeny_l, false, outfile_name, "_dollo_all_losses_d" );
792 catch ( final IOException e ) {
794 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
796 if ( domain_parsimony.calculateNumberOfBinaryDomainCombination() > 0 ) {
797 // FITCH DOMAIN COMBINATIONS
798 // -------------------------
799 local_phylogeny_l = phylogeny.copy();
800 String randomization = "no";
801 if ( radomize_fitch_parsimony ) {
802 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( random_number_seed_for_fitch_parsimony );
803 randomization = "yes, seed = " + random_number_seed_for_fitch_parsimony;
806 domain_parsimony.executeFitchParsimonyOnBinaryDomainCombintion( use_last_in_fitch_parsimony );
808 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossMatrix(), outfile_name
809 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
810 SurfacingUtil.writeMatrixToFile( domain_parsimony.getGainLossCountsMatrix(), outfile_name
811 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS, Format.FORESTER );
813 .writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
814 CharacterStateMatrix.GainLossStates.GAIN,
815 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_BC,
817 ForesterUtil.LINE_SEPARATOR,
819 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
820 CharacterStateMatrix.GainLossStates.LOSS,
822 + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_BC,
824 ForesterUtil.LINE_SEPARATOR,
826 SurfacingUtil.writeBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(), null, outfile_name
827 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC, sep, ForesterUtil.LINE_SEPARATOR, null );
828 if ( all_binary_domains_combination_gained_fitch != null ) {
829 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
831 all_binary_domains_combination_gained_fitch,
834 if ( all_binary_domains_combination_lost_fitch != null ) {
835 collectChangedDomainCombinationsFromBinaryStatesMatrixAsListToFile( domain_parsimony.getGainLossMatrix(),
837 all_binary_domains_combination_lost_fitch,
840 if ( output_binary_domain_combinations_for_graphs ) {
842 .writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( domain_parsimony
843 .getGainLossMatrix(),
846 + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS,
848 ForesterUtil.LINE_SEPARATOR,
849 BinaryDomainCombination.OutputFormat.DOT );
852 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
856 domain_parsimony.getGainLossMatrix(),
857 CharacterStateMatrix.GainLossStates.GAIN,
858 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC,
860 ForesterUtil.LINE_SEPARATOR,
861 "Fitch Parsimony | Gains | Domain Combinations",
864 all_pfams_encountered,
865 all_pfams_gained_as_dom_combinations,
867 tax_code_to_id_map );
868 writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
872 domain_parsimony.getGainLossMatrix(),
873 CharacterStateMatrix.GainLossStates.LOSS,
874 outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC,
876 ForesterUtil.LINE_SEPARATOR,
877 "Fitch Parsimony | Losses | Domain Combinations",
880 all_pfams_encountered,
881 all_pfams_lost_as_dom_combinations,
883 tax_code_to_id_map );
884 // writeBinaryStatesMatrixToList( domain_id_to_go_ids_map,
885 // go_id_to_term_map,
886 // go_namespace_limit,
888 // domain_parsimony.getGainLossMatrix(),
890 // outfile_name + surfacing.PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC,
892 // ForesterUtil.LINE_SEPARATOR,
893 // "Fitch Parsimony | Present | Domain Combinations",
896 // all_pfams_encountered,
898 // "_fitch_present_dc",
899 // tax_code_to_id_map );
900 writeAllEncounteredPfamsToFile( domain_id_to_go_ids_map,
903 all_pfams_encountered );
904 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX, all_pfams_gained_as_domains );
905 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX, all_pfams_lost_as_domains );
906 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_GAINED_AS_DC_SUFFIX,
907 all_pfams_gained_as_dom_combinations );
908 writePfamsToFile( outfile_name + surfacing.ALL_PFAMS_LOST_AS_DC_SUFFIX, all_pfams_lost_as_dom_combinations );
909 preparePhylogeny( local_phylogeny_l,
912 "Fitch parsimony on binary domain combination presence/absence randomization: "
914 "fitch_on_binary_domain_combinations_" + outfile_name,
916 SurfacingUtil.writePhylogenyToFile( local_phylogeny_l, outfile_name
917 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH );
918 calculateIndependentDomainCombinationGains( local_phylogeny_l,
920 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX,
922 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX,
924 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX,
926 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX,
927 outfile_name + "_indep_dc_gains_fitch_lca_ranks.txt",
928 outfile_name + "_indep_dc_gains_fitch_lca_taxonomies.txt",
929 outfile_name + "_indep_dc_gains_fitch_protein_statistics.txt",
930 protein_length_stats_by_dc,
931 domain_number_stats_by_dc,
932 domain_length_stats_by_domain );
936 public static void executeParsimonyAnalysisForSecondaryFeatures( final String outfile_name,
937 final DomainParsimonyCalculator secondary_features_parsimony,
938 final Phylogeny phylogeny,
939 final String parameters_str,
940 final Map<Species, MappingResults> mapping_results_map,
941 final boolean use_last_in_fitch_parsimony ) {
942 final String sep = ForesterUtil.LINE_SEPARATOR + "###################" + ForesterUtil.LINE_SEPARATOR;
943 final String date_time = ForesterUtil.getCurrentDateTime();
944 System.out.println();
945 writeToNexus( outfile_name + surfacing.NEXUS_SECONDARY_FEATURES,
946 secondary_features_parsimony.createMatrixOfSecondaryFeaturePresenceOrAbsence( null ),
948 Phylogeny local_phylogeny_copy = phylogeny.copy();
949 secondary_features_parsimony.executeDolloParsimonyOnSecondaryFeatures( mapping_results_map );
950 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossMatrix(), outfile_name
951 + surfacing.PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
952 SurfacingUtil.writeMatrixToFile( secondary_features_parsimony.getGainLossCountsMatrix(), outfile_name
953 + surfacing.PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES, Format.FORESTER );
955 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
956 CharacterStateMatrix.GainLossStates.GAIN,
958 + surfacing.PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES,
960 ForesterUtil.LINE_SEPARATOR,
963 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
964 CharacterStateMatrix.GainLossStates.LOSS,
966 + surfacing.PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES,
968 ForesterUtil.LINE_SEPARATOR,
971 .writeBinaryStatesMatrixAsListToFile( secondary_features_parsimony.getGainLossMatrix(),
974 + surfacing.PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES,
976 ForesterUtil.LINE_SEPARATOR,
978 preparePhylogeny( local_phylogeny_copy,
979 secondary_features_parsimony,
981 "Dollo parsimony on secondary feature presence/absence",
982 "dollo_on_secondary_features_" + outfile_name,
984 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
985 + surfacing.SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO );
986 // FITCH DOMAIN COMBINATIONS
987 // -------------------------
988 local_phylogeny_copy = phylogeny.copy();
989 final String randomization = "no";
990 secondary_features_parsimony
991 .executeFitchParsimonyOnBinaryDomainCombintionOnSecondaryFeatures( use_last_in_fitch_parsimony );
992 preparePhylogeny( local_phylogeny_copy,
993 secondary_features_parsimony,
995 "Fitch parsimony on secondary binary domain combination presence/absence randomization: "
997 "fitch_on_binary_domain_combinations_" + outfile_name,
999 SurfacingUtil.writePhylogenyToFile( local_phylogeny_copy, outfile_name
1000 + surfacing.BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED );
1001 calculateIndependentDomainCombinationGains( local_phylogeny_copy, outfile_name
1002 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX, outfile_name
1003 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX, outfile_name
1004 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX, outfile_name
1005 + surfacing.INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX, outfile_name
1006 + "_MAPPED_indep_dc_gains_fitch_lca_ranks.txt", outfile_name
1007 + "_MAPPED_indep_dc_gains_fitch_lca_taxonomies.txt", null, null, null, null );
1010 public static void executePlusMinusAnalysis( final File output_file,
1011 final List<String> plus_minus_analysis_high_copy_base,
1012 final List<String> plus_minus_analysis_high_copy_target,
1013 final List<String> plus_minus_analysis_low_copy,
1014 final List<GenomeWideCombinableDomains> gwcd_list,
1015 final SortedMap<Species, List<Protein>> protein_lists_per_species,
1016 final Map<String, List<GoId>> domain_id_to_go_ids_map,
1017 final Map<GoId, GoTerm> go_id_to_term_map,
1018 final List<Object> plus_minus_analysis_numbers ) {
1019 final Set<String> all_spec = new HashSet<String>();
1020 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1021 all_spec.add( gwcd.getSpecies().getSpeciesId() );
1023 final File html_out_dom = new File( output_file + surfacing.PLUS_MINUS_DOM_SUFFIX_HTML );
1024 final File plain_out_dom = new File( output_file + surfacing.PLUS_MINUS_DOM_SUFFIX );
1025 final File html_out_dc = new File( output_file + surfacing.PLUS_MINUS_DC_SUFFIX_HTML );
1026 final File all_domains_go_ids_out_dom = new File( output_file + surfacing.PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX );
1027 final File passing_domains_go_ids_out_dom = new File( output_file
1028 + surfacing.PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX );
1029 final File proteins_file_base = new File( output_file + "" );
1030 final int min_diff = ( ( Integer ) plus_minus_analysis_numbers.get( 0 ) ).intValue();
1031 final double factor = ( ( Double ) plus_minus_analysis_numbers.get( 1 ) ).doubleValue();
1033 DomainCountsDifferenceUtil.calculateCopyNumberDifferences( gwcd_list,
1034 protein_lists_per_species,
1035 plus_minus_analysis_high_copy_base,
1036 plus_minus_analysis_high_copy_target,
1037 plus_minus_analysis_low_copy,
1043 domain_id_to_go_ids_map,
1045 all_domains_go_ids_out_dom,
1046 passing_domains_go_ids_out_dom,
1047 proteins_file_base );
1049 catch ( final IOException e ) {
1050 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1052 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
1053 + html_out_dom + "\"" );
1054 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
1055 + plain_out_dom + "\"" );
1056 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \"" + html_out_dc
1058 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based passing GO ids to \""
1059 + passing_domains_go_ids_out_dom + "\"" );
1060 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based all GO ids to \""
1061 + all_domains_go_ids_out_dom + "\"" );
1064 public static void extractProteinNames( final List<Protein> proteins,
1065 final List<String> query_domain_ids_nc_order,
1067 final String separator,
1068 final String limit_to_species ) throws IOException {
1069 for( final Protein protein : proteins ) {
1070 if ( ForesterUtil.isEmpty( limit_to_species )
1071 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1072 if ( protein.contains( query_domain_ids_nc_order, true ) ) {
1073 out.write( protein.getSpecies().getSpeciesId() );
1074 out.write( separator );
1075 out.write( protein.getProteinId().getId() );
1076 out.write( separator );
1078 final Set<String> visited_domain_ids = new HashSet<String>();
1079 boolean first = true;
1080 for( final Domain domain : protein.getProteinDomains() ) {
1081 if ( !visited_domain_ids.contains( domain.getDomainId() ) ) {
1082 visited_domain_ids.add( domain.getDomainId() );
1089 out.write( domain.getDomainId() );
1091 out.write( "" + domain.getTotalCount() );
1096 out.write( separator );
1097 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1098 .equals( SurfacingConstants.NONE ) ) ) {
1099 out.write( protein.getDescription() );
1101 out.write( separator );
1102 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1103 .equals( SurfacingConstants.NONE ) ) ) {
1104 out.write( protein.getAccession() );
1106 out.write( SurfacingConstants.NL );
1113 public static void extractProteinNames( final SortedMap<Species, List<Protein>> protein_lists_per_species,
1114 final String domain_id,
1116 final String separator,
1117 final String limit_to_species,
1118 final double domain_e_cutoff ) throws IOException {
1119 //System.out.println( "Per domain E-value: " + domain_e_cutoff );
1120 for( final Species species : protein_lists_per_species.keySet() ) {
1121 //System.out.println( species + ":" );
1122 for( final Protein protein : protein_lists_per_species.get( species ) ) {
1123 if ( ForesterUtil.isEmpty( limit_to_species )
1124 || protein.getSpecies().getSpeciesId().equalsIgnoreCase( limit_to_species ) ) {
1125 final List<Domain> domains = protein.getProteinDomains( domain_id );
1126 if ( domains.size() > 0 ) {
1127 out.write( protein.getSpecies().getSpeciesId() );
1128 out.write( separator );
1129 out.write( protein.getProteinId().getId() );
1130 out.write( separator );
1131 out.write( domain_id.toString() );
1132 out.write( separator );
1134 for( final Domain domain : domains ) {
1135 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1137 out.write( domain.getFrom() + "-" + domain.getTo() );
1138 if ( prev_to >= 0 ) {
1139 final int l = domain.getFrom() - prev_to;
1140 // System.out.println( l );
1142 prev_to = domain.getTo();
1146 out.write( separator );
1147 final List<Domain> domain_list = new ArrayList<Domain>();
1148 for( final Domain domain : protein.getProteinDomains() ) {
1149 if ( ( domain_e_cutoff < 0 ) || ( domain.getPerDomainEvalue() <= domain_e_cutoff ) ) {
1150 domain_list.add( domain );
1153 final Domain domain_ary[] = new Domain[ domain_list.size() ];
1154 for( int i = 0; i < domain_list.size(); ++i ) {
1155 domain_ary[ i ] = domain_list.get( i );
1157 Arrays.sort( domain_ary, new DomainComparator( true ) );
1159 boolean first = true;
1160 for( final Domain domain : domain_ary ) {
1167 out.write( domain.getDomainId().toString() );
1168 out.write( ":" + domain.getFrom() + "-" + domain.getTo() );
1169 out.write( ":" + domain.getPerDomainEvalue() );
1172 if ( !( ForesterUtil.isEmpty( protein.getDescription() ) || protein.getDescription()
1173 .equals( SurfacingConstants.NONE ) ) ) {
1174 out.write( protein.getDescription() );
1176 out.write( separator );
1177 if ( !( ForesterUtil.isEmpty( protein.getAccession() ) || protein.getAccession()
1178 .equals( SurfacingConstants.NONE ) ) ) {
1179 out.write( protein.getAccession() );
1181 out.write( SurfacingConstants.NL );
1189 public static SortedSet<String> getAllDomainIds( final List<GenomeWideCombinableDomains> gwcd_list ) {
1190 final SortedSet<String> all_domains_ids = new TreeSet<String>();
1191 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
1192 final Set<String> all_domains = gwcd.getAllDomainIds();
1193 // for( final Domain domain : all_domains ) {
1194 all_domains_ids.addAll( all_domains );
1197 return all_domains_ids;
1200 public static SortedMap<String, Integer> getDomainCounts( final List<Protein> protein_domain_collections ) {
1201 final SortedMap<String, Integer> map = new TreeMap<String, Integer>();
1202 for( final Protein protein_domain_collection : protein_domain_collections ) {
1203 for( final Object name : protein_domain_collection.getProteinDomains() ) {
1204 final BasicDomain protein_domain = ( BasicDomain ) name;
1205 final String id = protein_domain.getDomainId();
1206 if ( map.containsKey( id ) ) {
1207 map.put( id, map.get( id ) + 1 );
1217 public static int getNumberOfNodesLackingName( final Phylogeny p, final StringBuilder names ) {
1218 final PhylogenyNodeIterator it = p.iteratorPostorder();
1220 while ( it.hasNext() ) {
1221 final PhylogenyNode n = it.next();
1222 if ( ForesterUtil.isEmpty( n.getName() )
1223 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1224 .getScientificName() ) )
1225 && ( !n.getNodeData().isHasTaxonomy() || ForesterUtil.isEmpty( n.getNodeData().getTaxonomy()
1226 .getCommonName() ) ) ) {
1227 if ( n.getParent() != null ) {
1228 names.append( " " );
1229 names.append( n.getParent().getName() );
1231 final List l = n.getAllExternalDescendants();
1232 for( final Object object : l ) {
1233 System.out.println( l.toString() );
1241 public static void log( final String msg, final Writer w ) {
1244 w.write( ForesterUtil.LINE_SEPARATOR );
1246 catch ( final IOException e ) {
1247 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1251 public static Phylogeny[] obtainAndPreProcessIntrees( final File[] intree_files,
1252 final int number_of_genomes,
1253 final String[][] input_file_properties ) {
1254 final Phylogeny[] intrees = new Phylogeny[ intree_files.length ];
1256 for( final File intree_file : intree_files ) {
1257 Phylogeny intree = null;
1258 final String error = ForesterUtil.isReadableFile( intree_file );
1259 if ( !ForesterUtil.isEmpty( error ) ) {
1260 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: "
1264 final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
1265 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
1266 if ( p_array.length < 1 ) {
1267 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1268 + "] does not contain any phylogeny in phyloXML format" );
1270 else if ( p_array.length > 1 ) {
1271 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1272 + "] contains more than one phylogeny in phyloXML format" );
1274 intree = p_array[ 0 ];
1276 catch ( final Exception e ) {
1277 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file
1280 if ( ( intree == null ) || intree.isEmpty() ) {
1281 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
1283 if ( !intree.isRooted() ) {
1284 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
1286 if ( intree.getNumberOfExternalNodes() < number_of_genomes ) {
1287 ForesterUtil.fatalError( surfacing.PRG_NAME,
1288 "number of external nodes [" + intree.getNumberOfExternalNodes()
1289 + "] of input tree [" + intree_file
1290 + "] is smaller than the number of genomes the be analyzed ["
1291 + number_of_genomes + "]" );
1293 final StringBuilder parent_names = new StringBuilder();
1294 final int nodes_lacking_name = getNumberOfNodesLackingName( intree, parent_names );
1295 if ( nodes_lacking_name > 0 ) {
1296 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] has "
1297 + nodes_lacking_name + " node(s) lacking a name [parent names:" + parent_names + "]" );
1299 preparePhylogenyForParsimonyAnalyses( intree, input_file_properties );
1300 if ( !intree.isCompletelyBinary() ) {
1301 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "input tree [" + intree_file
1302 + "] is not completely binary" );
1304 intrees[ i++ ] = intree;
1309 public static Phylogeny obtainFirstIntree( final File intree_file ) {
1310 Phylogeny intree = null;
1311 final String error = ForesterUtil.isReadableFile( intree_file );
1312 if ( !ForesterUtil.isEmpty( error ) ) {
1313 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: " + error );
1316 final Phylogeny[] phys = ParserBasedPhylogenyFactory.getInstance()
1317 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
1318 if ( phys.length < 1 ) {
1319 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1320 + "] does not contain any phylogeny in phyloXML format" );
1322 else if ( phys.length > 1 ) {
1323 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
1324 + "] contains more than one phylogeny in phyloXML format" );
1328 catch ( final Exception e ) {
1329 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file + "]: "
1332 if ( ( intree == null ) || intree.isEmpty() ) {
1333 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
1335 if ( !intree.isRooted() ) {
1336 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
1341 public static String obtainHexColorStringDependingOnTaxonomyGroup( final String tax_code, final Phylogeny phy )
1342 throws IllegalArgumentException {
1343 if ( !_TAXCODE_HEXCOLORSTRING_MAP.containsKey( tax_code ) ) {
1344 if ( ( phy != null ) && !phy.isEmpty() ) {
1345 final List<PhylogenyNode> nodes = phy.getNodesViaTaxonomyCode( tax_code );
1347 if ( ( nodes == null ) || nodes.isEmpty() ) {
1348 throw new IllegalArgumentException( "code " + tax_code + " is not found" );
1350 if ( nodes.size() != 1 ) {
1351 throw new IllegalArgumentException( "code " + tax_code + " is not unique" );
1353 PhylogenyNode n = nodes.get( 0 );
1354 while ( n != null ) {
1355 if ( n.getNodeData().isHasTaxonomy()
1356 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1357 c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( n.getNodeData().getTaxonomy()
1358 .getScientificName(), tax_code );
1360 if ( ( c == null ) && !ForesterUtil.isEmpty( n.getName() ) ) {
1361 c = ForesterUtil.obtainColorDependingOnTaxonomyGroup( n.getName(), tax_code );
1369 throw new IllegalArgumentException( "no color found for taxonomy code \"" + tax_code + "\"" );
1371 final String hex = String.format( "#%02x%02x%02x", c.getRed(), c.getGreen(), c.getBlue() );
1372 _TAXCODE_HEXCOLORSTRING_MAP.put( tax_code, hex );
1375 throw new IllegalArgumentException( "unable to obtain color for code " + tax_code
1376 + " (tree is null or empty and code is not in map)" );
1379 return _TAXCODE_HEXCOLORSTRING_MAP.get( tax_code );
1382 public static void performDomainArchitectureAnalysis( final SortedMap<String, Set<String>> domain_architecutures,
1383 final SortedMap<String, Integer> domain_architecuture_counts,
1384 final int min_count,
1385 final File da_counts_outfile,
1386 final File unique_da_outfile ) {
1387 checkForOutputFileWriteability( da_counts_outfile );
1388 checkForOutputFileWriteability( unique_da_outfile );
1390 final BufferedWriter da_counts_out = new BufferedWriter( new FileWriter( da_counts_outfile ) );
1391 final BufferedWriter unique_da_out = new BufferedWriter( new FileWriter( unique_da_outfile ) );
1392 final Iterator<Entry<String, Integer>> it = domain_architecuture_counts.entrySet().iterator();
1393 while ( it.hasNext() ) {
1394 final Map.Entry<String, Integer> e = it.next();
1395 final String da = e.getKey();
1396 final int count = e.getValue();
1397 if ( count >= min_count ) {
1398 da_counts_out.write( da );
1399 da_counts_out.write( "\t" );
1400 da_counts_out.write( String.valueOf( count ) );
1401 da_counts_out.write( ForesterUtil.LINE_SEPARATOR );
1404 final Iterator<Entry<String, Set<String>>> it2 = domain_architecutures.entrySet().iterator();
1405 while ( it2.hasNext() ) {
1406 final Map.Entry<String, Set<String>> e2 = it2.next();
1407 final String genome = e2.getKey();
1408 final Set<String> das = e2.getValue();
1409 if ( das.contains( da ) ) {
1410 unique_da_out.write( genome );
1411 unique_da_out.write( "\t" );
1412 unique_da_out.write( da );
1413 unique_da_out.write( ForesterUtil.LINE_SEPARATOR );
1418 unique_da_out.close();
1419 da_counts_out.close();
1421 catch ( final IOException e ) {
1422 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1424 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + da_counts_outfile + "\"" );
1425 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + unique_da_outfile + "\"" );
1429 public static void preparePhylogeny( final Phylogeny p,
1430 final DomainParsimonyCalculator domain_parsimony,
1431 final String date_time,
1432 final String method,
1434 final String parameters_str ) {
1435 domain_parsimony.decoratePhylogenyWithDomains( p );
1436 final StringBuilder desc = new StringBuilder();
1437 desc.append( "[Method: " + method + "] [Date: " + date_time + "] " );
1438 desc.append( "[Cost: " + domain_parsimony.getCost() + "] " );
1439 desc.append( "[Gains: " + domain_parsimony.getTotalGains() + "] " );
1440 desc.append( "[Losses: " + domain_parsimony.getTotalLosses() + "] " );
1441 desc.append( "[Unchanged: " + domain_parsimony.getTotalUnchanged() + "] " );
1442 desc.append( "[Parameters: " + parameters_str + "]" );
1444 p.setDescription( desc.toString() );
1445 p.setConfidence( new Confidence( domain_parsimony.getCost(), "parsimony" ) );
1446 p.setRerootable( false );
1447 p.setRooted( true );
1450 public static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
1451 final String[][] input_file_properties ) {
1452 final String[] genomes = new String[ input_file_properties.length ];
1453 for( int i = 0; i < input_file_properties.length; ++i ) {
1454 if ( intree.getNodes( input_file_properties[ i ][ 1 ] ).size() > 1 ) {
1455 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
1456 + "] is not unique in input tree " + intree.getName() );
1458 genomes[ i ] = input_file_properties[ i ][ 1 ];
1461 final PhylogenyNodeIterator it = intree.iteratorPostorder();
1462 while ( it.hasNext() ) {
1463 final PhylogenyNode n = it.next();
1464 if ( ForesterUtil.isEmpty( n.getName() ) ) {
1465 if ( n.getNodeData().isHasTaxonomy()
1466 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
1467 n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
1469 else if ( n.getNodeData().isHasTaxonomy()
1470 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
1471 n.setName( n.getNodeData().getTaxonomy().getScientificName() );
1473 else if ( n.getNodeData().isHasTaxonomy()
1474 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
1475 n.setName( n.getNodeData().getTaxonomy().getCommonName() );
1479 .fatalError( surfacing.PRG_NAME,
1480 "node with no name, scientific name, common name, or taxonomy code present" );
1485 final List<String> igns = PhylogenyMethods.deleteExternalNodesPositiveSelection( genomes, intree );
1486 if ( igns.size() > 0 ) {
1487 System.out.println( "Not using the following " + igns.size() + " nodes:" );
1488 for( int i = 0; i < igns.size(); ++i ) {
1489 System.out.println( " " + i + ": " + igns.get( i ) );
1491 System.out.println( "--" );
1493 for( final String[] input_file_propertie : input_file_properties ) {
1495 intree.getNode( input_file_propertie[ 1 ] );
1497 catch ( final IllegalArgumentException e ) {
1498 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
1499 + "] not present/not unique in input tree" );
1504 public static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
1505 final Writer log_writer ) {
1507 for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
1508 sum += entry.getValue();
1510 final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
1511 ForesterUtil.programMessage( surfacing.PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
1512 log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
1515 public static void processFilter( final File filter_file, final SortedSet<String> filter ) {
1516 SortedSet<String> filter_str = null;
1518 filter_str = ForesterUtil.file2set( filter_file );
1520 catch ( final IOException e ) {
1521 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1523 if ( filter_str != null ) {
1524 for( final String string : filter_str ) {
1525 filter.add( string );
1528 if ( surfacing.VERBOSE ) {
1529 System.out.println( "Filter:" );
1530 for( final String domainId : filter ) {
1531 System.out.println( domainId );
1536 public static String[][] processInputGenomesFile( final File input_genomes ) {
1537 String[][] input_file_properties = null;
1539 input_file_properties = ForesterUtil.file22dArray( input_genomes );
1541 catch ( final IOException e ) {
1542 ForesterUtil.fatalError( surfacing.PRG_NAME,
1543 "genomes files is to be in the following format \"<hmmpfam output file> <species>\": "
1544 + e.getLocalizedMessage() );
1546 final Set<String> specs = new HashSet<String>();
1547 final Set<String> paths = new HashSet<String>();
1548 for( int i = 0; i < input_file_properties.length; ++i ) {
1549 if ( !PhyloXmlUtil.TAXOMONY_CODE_PATTERN.matcher( input_file_properties[ i ][ 1 ] ).matches() ) {
1550 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for species code: "
1551 + input_file_properties[ i ][ 1 ] );
1553 if ( specs.contains( input_file_properties[ i ][ 1 ] ) ) {
1554 ForesterUtil.fatalError( surfacing.PRG_NAME, "species code " + input_file_properties[ i ][ 1 ]
1555 + " is not unique" );
1557 specs.add( input_file_properties[ i ][ 1 ] );
1558 if ( paths.contains( input_file_properties[ i ][ 0 ] ) ) {
1559 ForesterUtil.fatalError( surfacing.PRG_NAME, "path " + input_file_properties[ i ][ 0 ]
1560 + " is not unique" );
1562 paths.add( input_file_properties[ i ][ 0 ] );
1563 final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
1564 if ( !ForesterUtil.isEmpty( error ) ) {
1565 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
1568 return input_file_properties;
1571 public static void processPlusMinusAnalysisOption( final CommandLineArguments cla,
1572 final List<String> high_copy_base,
1573 final List<String> high_copy_target,
1574 final List<String> low_copy,
1575 final List<Object> numbers ) {
1576 if ( cla.isOptionSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
1577 if ( !cla.isOptionValueSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
1578 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for 'plus-minus' file: -"
1579 + surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>" );
1581 final File plus_minus_file = new File( cla.getOptionValue( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) );
1582 final String msg = ForesterUtil.isReadableFile( plus_minus_file );
1583 if ( !ForesterUtil.isEmpty( msg ) ) {
1584 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + plus_minus_file + "\": " + msg );
1586 processPlusMinusFile( plus_minus_file, high_copy_base, high_copy_target, low_copy, numbers );
1590 // First numbers is minimal difference, second is factor.
1591 public static void processPlusMinusFile( final File plus_minus_file,
1592 final List<String> high_copy_base,
1593 final List<String> high_copy_target,
1594 final List<String> low_copy,
1595 final List<Object> numbers ) {
1596 Set<String> species_set = null;
1597 int min_diff = surfacing.PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT;
1598 double factor = surfacing.PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT;
1600 species_set = ForesterUtil.file2set( plus_minus_file );
1602 catch ( final IOException e ) {
1603 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1605 if ( species_set != null ) {
1606 for( final String species : species_set ) {
1607 final String species_trimmed = species.substring( 1 );
1608 if ( species.startsWith( "+" ) ) {
1609 if ( low_copy.contains( species_trimmed ) ) {
1610 ForesterUtil.fatalError( surfacing.PRG_NAME,
1611 "species/genome names can not appear with both '+' and '-' suffix, as appears the case for: \""
1612 + species_trimmed + "\"" );
1614 high_copy_base.add( species_trimmed );
1616 else if ( species.startsWith( "*" ) ) {
1617 if ( low_copy.contains( species_trimmed ) ) {
1618 ForesterUtil.fatalError( surfacing.PRG_NAME,
1619 "species/genome names can not appear with both '*' and '-' suffix, as appears the case for: \""
1620 + species_trimmed + "\"" );
1622 high_copy_target.add( species_trimmed );
1624 else if ( species.startsWith( "-" ) ) {
1625 if ( high_copy_base.contains( species_trimmed ) || high_copy_target.contains( species_trimmed ) ) {
1626 ForesterUtil.fatalError( surfacing.PRG_NAME,
1627 "species/genome names can not appear with both '+' or '*' and '-' suffix, as appears the case for: \""
1628 + species_trimmed + "\"" );
1630 low_copy.add( species_trimmed );
1632 else if ( species.startsWith( "$D" ) ) {
1634 min_diff = Integer.parseInt( species.substring( 3 ) );
1636 catch ( final NumberFormatException e ) {
1637 ForesterUtil.fatalError( surfacing.PRG_NAME,
1638 "could not parse integer value for minimal difference from: \""
1639 + species.substring( 3 ) + "\"" );
1642 else if ( species.startsWith( "$F" ) ) {
1644 factor = Double.parseDouble( species.substring( 3 ) );
1646 catch ( final NumberFormatException e ) {
1647 ForesterUtil.fatalError( surfacing.PRG_NAME, "could not parse double value for factor from: \""
1648 + species.substring( 3 ) + "\"" );
1651 else if ( species.startsWith( "#" ) ) {
1656 .fatalError( surfacing.PRG_NAME,
1657 "species/genome names in 'plus minus' file must begin with '*' (high copy target genome), '+' (high copy base genomes), '-' (low copy genomes), '$D=<integer>' minimal Difference (default is 1), '$F=<double>' factor (default is 1.0), double), or '#' (ignore) suffix, encountered: \""
1660 numbers.add( new Integer( min_diff + "" ) );
1661 numbers.add( new Double( factor + "" ) );
1665 ForesterUtil.fatalError( surfacing.PRG_NAME, "'plus minus' file [" + plus_minus_file + "] appears empty" );
1670 * species | protein id | n-terminal domain | c-terminal domain | n-terminal domain per domain E-value | c-terminal domain per domain E-value
1674 static public StringBuffer proteinToDomainCombinations( final Protein protein,
1675 final String protein_id,
1676 final String separator ) {
1677 final StringBuffer sb = new StringBuffer();
1678 if ( protein.getSpecies() == null ) {
1679 throw new IllegalArgumentException( "species must not be null" );
1681 if ( ForesterUtil.isEmpty( protein.getSpecies().getSpeciesId() ) ) {
1682 throw new IllegalArgumentException( "species id must not be empty" );
1684 final List<Domain> domains = protein.getProteinDomains();
1685 if ( domains.size() > 1 ) {
1686 final Map<String, Integer> counts = new HashMap<String, Integer>();
1687 for( final Domain domain : domains ) {
1688 final String id = domain.getDomainId();
1689 if ( counts.containsKey( id ) ) {
1690 counts.put( id, counts.get( id ) + 1 );
1693 counts.put( id, 1 );
1696 final Set<String> dcs = new HashSet<String>();
1697 for( int i = 1; i < domains.size(); ++i ) {
1698 for( int j = 0; j < i; ++j ) {
1699 Domain domain_n = domains.get( i );
1700 Domain domain_c = domains.get( j );
1701 if ( domain_n.getFrom() > domain_c.getFrom() ) {
1702 domain_n = domains.get( j );
1703 domain_c = domains.get( i );
1705 final String dc = domain_n.getDomainId() + domain_c.getDomainId();
1706 if ( !dcs.contains( dc ) ) {
1708 sb.append( protein.getSpecies() );
1709 sb.append( separator );
1710 sb.append( protein_id );
1711 sb.append( separator );
1712 sb.append( domain_n.getDomainId() );
1713 sb.append( separator );
1714 sb.append( domain_c.getDomainId() );
1715 sb.append( separator );
1716 sb.append( domain_n.getPerDomainEvalue() );
1717 sb.append( separator );
1718 sb.append( domain_c.getPerDomainEvalue() );
1719 sb.append( separator );
1720 sb.append( counts.get( domain_n.getDomainId() ) );
1721 sb.append( separator );
1722 sb.append( counts.get( domain_c.getDomainId() ) );
1723 sb.append( ForesterUtil.LINE_SEPARATOR );
1728 else if ( domains.size() == 1 ) {
1729 sb.append( protein.getSpecies() );
1730 sb.append( separator );
1731 sb.append( protein_id );
1732 sb.append( separator );
1733 sb.append( domains.get( 0 ).getDomainId() );
1734 sb.append( separator );
1735 sb.append( separator );
1736 sb.append( domains.get( 0 ).getPerDomainEvalue() );
1737 sb.append( separator );
1738 sb.append( separator );
1740 sb.append( separator );
1741 sb.append( ForesterUtil.LINE_SEPARATOR );
1744 sb.append( protein.getSpecies() );
1745 sb.append( separator );
1746 sb.append( protein_id );
1747 sb.append( separator );
1748 sb.append( separator );
1749 sb.append( separator );
1750 sb.append( separator );
1751 sb.append( separator );
1752 sb.append( separator );
1753 sb.append( ForesterUtil.LINE_SEPARATOR );
1758 public static List<Domain> sortDomainsWithAscendingConfidenceValues( final Protein protein ) {
1759 final List<Domain> domains = new ArrayList<Domain>();
1760 for( final Domain d : protein.getProteinDomains() ) {
1763 Collections.sort( domains, SurfacingUtil.ASCENDING_CONFIDENCE_VALUE_ORDER );
1767 public static int storeDomainArchitectures( final String genome,
1768 final SortedMap<String, Set<String>> domain_architecutures,
1769 final List<Protein> protein_list,
1770 final Map<String, Integer> distinct_domain_architecuture_counts ) {
1771 final Set<String> da = new HashSet<String>();
1772 domain_architecutures.put( genome, da );
1773 for( final Protein protein : protein_list ) {
1774 final String da_str = ( ( BasicProtein ) protein ).toDomainArchitectureString( "~", 3, "=" );
1775 if ( !da.contains( da_str ) ) {
1776 if ( !distinct_domain_architecuture_counts.containsKey( da_str ) ) {
1777 distinct_domain_architecuture_counts.put( da_str, 1 );
1780 distinct_domain_architecuture_counts.put( da_str,
1781 distinct_domain_architecuture_counts.get( da_str ) + 1 );
1789 public static void writeAllDomainsChangedOnAllSubtrees( final Phylogeny p,
1790 final boolean get_gains,
1791 final String outdir,
1792 final String suffix_for_filename ) throws IOException {
1793 CharacterStateMatrix.GainLossStates state = CharacterStateMatrix.GainLossStates.GAIN;
1795 state = CharacterStateMatrix.GainLossStates.LOSS;
1797 final File base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES,
1801 for( final PhylogenyNodeIterator it = p.iteratorPostorder(); it.hasNext(); ) {
1802 final PhylogenyNode node = it.next();
1803 if ( !node.isExternal() ) {
1804 final SortedSet<String> domains = collectAllDomainsChangedOnSubtree( node, get_gains );
1805 if ( domains.size() > 0 ) {
1806 final Writer writer = ForesterUtil.createBufferedWriter( base_dir + ForesterUtil.FILE_SEPARATOR
1807 + node.getName() + suffix_for_filename );
1808 for( final String domain : domains ) {
1809 writer.write( domain );
1810 writer.write( ForesterUtil.LINE_SEPARATOR );
1818 public static void writeBinaryDomainCombinationsFileForGraphAnalysis( final String[][] input_file_properties,
1819 final File output_dir,
1820 final GenomeWideCombinableDomains gwcd,
1822 final GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
1823 File dc_outfile_dot = new File( input_file_properties[ i ][ 1 ]
1824 + surfacing.DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS );
1825 if ( output_dir != null ) {
1826 dc_outfile_dot = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile_dot );
1828 checkForOutputFileWriteability( dc_outfile_dot );
1829 final SortedSet<BinaryDomainCombination> binary_combinations = createSetOfAllBinaryDomainCombinationsPerGenome( gwcd );
1831 final BufferedWriter out_dot = new BufferedWriter( new FileWriter( dc_outfile_dot ) );
1832 for( final BinaryDomainCombination bdc : binary_combinations ) {
1833 out_dot.write( bdc.toGraphDescribingLanguage( BinaryDomainCombination.OutputFormat.DOT, null, null )
1835 out_dot.write( SurfacingConstants.NL );
1839 catch ( final IOException e ) {
1840 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1842 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote binary domain combination for \""
1843 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
1844 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile_dot + "\"" );
1847 public static void writeBinaryStatesMatrixAsListToFile( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1848 final CharacterStateMatrix.GainLossStates state,
1849 final String filename,
1850 final String indentifier_characters_separator,
1851 final String character_separator,
1852 final Map<String, String> descriptions ) {
1853 final File outfile = new File( filename );
1854 checkForOutputFileWriteability( outfile );
1855 final SortedSet<String> sorted_ids = new TreeSet<String>();
1856 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1857 sorted_ids.add( matrix.getIdentifier( i ) );
1860 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1861 for( final String id : sorted_ids ) {
1862 out.write( indentifier_characters_separator );
1863 out.write( "#" + id );
1864 out.write( indentifier_characters_separator );
1865 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1867 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1868 if ( ( matrix.getState( id, c ) == state )
1869 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1870 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1871 out.write( matrix.getCharacter( c ) );
1872 if ( ( descriptions != null ) && !descriptions.isEmpty()
1873 && descriptions.containsKey( matrix.getCharacter( c ) ) ) {
1875 out.write( descriptions.get( matrix.getCharacter( c ) ) );
1877 out.write( character_separator );
1884 catch ( final IOException e ) {
1885 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1887 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1890 public static void writeBinaryStatesMatrixAsListToFileForBinaryCombinationsForGraphAnalysis( final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1891 final CharacterStateMatrix.GainLossStates state,
1892 final String filename,
1893 final String indentifier_characters_separator,
1894 final String character_separator,
1895 final BinaryDomainCombination.OutputFormat bc_output_format ) {
1896 final File outfile = new File( filename );
1897 checkForOutputFileWriteability( outfile );
1898 final SortedSet<String> sorted_ids = new TreeSet<String>();
1899 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1900 sorted_ids.add( matrix.getIdentifier( i ) );
1903 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
1904 for( final String id : sorted_ids ) {
1905 out.write( indentifier_characters_separator );
1906 out.write( "#" + id );
1907 out.write( indentifier_characters_separator );
1908 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
1910 // using null to indicate either UNCHANGED_PRESENT or GAIN.
1911 if ( ( matrix.getState( id, c ) == state )
1912 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) || ( matrix
1913 .getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) ) ) ) {
1914 BinaryDomainCombination bdc = null;
1916 bdc = BasicBinaryDomainCombination.createInstance( matrix.getCharacter( c ) );
1918 catch ( final Exception e ) {
1919 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
1921 out.write( bdc.toGraphDescribingLanguage( bc_output_format, null, null ).toString() );
1922 out.write( character_separator );
1929 catch ( final IOException e ) {
1930 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1932 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters list: \"" + filename + "\"" );
1935 public static void writeBinaryStatesMatrixToList( final Map<String, List<GoId>> domain_id_to_go_ids_map,
1936 final Map<GoId, GoTerm> go_id_to_term_map,
1937 final GoNameSpace go_namespace_limit,
1938 final boolean domain_combinations,
1939 final CharacterStateMatrix<CharacterStateMatrix.GainLossStates> matrix,
1940 final CharacterStateMatrix.GainLossStates state,
1941 final String filename,
1942 final String indentifier_characters_separator,
1943 final String character_separator,
1944 final String title_for_html,
1945 final String prefix_for_html,
1946 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
1947 final SortedSet<String> all_pfams_encountered,
1948 final SortedSet<String> pfams_gained_or_lost,
1949 final String suffix_for_per_node_events_file,
1950 final Map<String, Integer> tax_code_to_id_map ) {
1951 if ( ( go_namespace_limit != null ) && ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1952 throw new IllegalArgumentException( "attempt to use GO namespace limit without a GO-id to term map" );
1954 else if ( ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) ) {
1955 throw new IllegalArgumentException( "attempt to output detailed HTML without a Pfam to GO map" );
1957 else if ( ( ( go_id_to_term_map == null ) || ( go_id_to_term_map.size() < 1 ) ) ) {
1958 throw new IllegalArgumentException( "attempt to output detailed HTML without a GO-id to term map" );
1960 final File outfile = new File( filename );
1961 checkForOutputFileWriteability( outfile );
1962 final SortedSet<String> sorted_ids = new TreeSet<String>();
1963 for( int i = 0; i < matrix.getNumberOfIdentifiers(); ++i ) {
1964 sorted_ids.add( matrix.getIdentifier( i ) );
1967 final Writer out = new BufferedWriter( new FileWriter( outfile ) );
1968 final File per_node_go_mapped_domain_gain_loss_files_base_dir = createBaseDirForPerNodeDomainFiles( surfacing.BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES,
1969 domain_combinations,
1972 Writer per_node_go_mapped_domain_gain_loss_outfile_writer = null;
1973 File per_node_go_mapped_domain_gain_loss_outfile = null;
1974 int per_node_counter = 0;
1975 out.write( "<html>" );
1976 out.write( SurfacingConstants.NL );
1977 writeHtmlHead( out, title_for_html );
1978 out.write( SurfacingConstants.NL );
1979 out.write( "<body>" );
1980 out.write( SurfacingConstants.NL );
1981 out.write( "<h1>" );
1982 out.write( SurfacingConstants.NL );
1983 out.write( title_for_html );
1984 out.write( SurfacingConstants.NL );
1985 out.write( "</h1>" );
1986 out.write( SurfacingConstants.NL );
1987 out.write( "<table>" );
1988 out.write( SurfacingConstants.NL );
1989 for( final String id : sorted_ids ) {
1990 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
1991 if ( matcher.matches() ) {
1994 out.write( "<tr>" );
1995 out.write( "<td>" );
1996 out.write( "<a href=\"#" + id + "\">" + id + "</a>" );
1997 out.write( "</td>" );
1998 out.write( "</tr>" );
1999 out.write( SurfacingConstants.NL );
2001 out.write( "</table>" );
2002 out.write( SurfacingConstants.NL );
2003 for( final String id : sorted_ids ) {
2004 final Matcher matcher = PATTERN_SP_STYLE_TAXONOMY.matcher( id );
2005 if ( matcher.matches() ) {
2008 out.write( SurfacingConstants.NL );
2009 out.write( "<h2>" );
2010 out.write( "<a name=\"" + id + "\">" + id + "</a>" );
2011 writeTaxonomyLinks( out, id, tax_code_to_id_map );
2012 out.write( "</h2>" );
2013 out.write( SurfacingConstants.NL );
2014 out.write( "<table>" );
2015 out.write( SurfacingConstants.NL );
2016 out.write( "<tr>" );
2017 out.write( "<td><b>" );
2018 out.write( "Pfam domain(s)" );
2019 out.write( "</b></td><td><b>" );
2020 out.write( "GO term acc" );
2021 out.write( "</b></td><td><b>" );
2022 out.write( "GO term" );
2023 out.write( "</b></td><td><b>" );
2024 out.write( "GO namespace" );
2025 out.write( "</b></td>" );
2026 out.write( "</tr>" );
2027 out.write( SurfacingConstants.NL );
2028 out.write( "</tr>" );
2029 out.write( SurfacingConstants.NL );
2030 per_node_counter = 0;
2031 if ( matrix.getNumberOfCharacters() > 0 ) {
2032 per_node_go_mapped_domain_gain_loss_outfile = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2033 + ForesterUtil.FILE_SEPARATOR + id + suffix_for_per_node_events_file );
2034 SurfacingUtil.checkForOutputFileWriteability( per_node_go_mapped_domain_gain_loss_outfile );
2035 per_node_go_mapped_domain_gain_loss_outfile_writer = ForesterUtil
2036 .createBufferedWriter( per_node_go_mapped_domain_gain_loss_outfile );
2039 per_node_go_mapped_domain_gain_loss_outfile = null;
2040 per_node_go_mapped_domain_gain_loss_outfile_writer = null;
2042 for( int c = 0; c < matrix.getNumberOfCharacters(); ++c ) {
2044 // using null to indicate either UNCHANGED_PRESENT or GAIN.
2045 if ( ( matrix.getState( id, c ) == state )
2046 || ( ( state == null ) && ( ( matrix.getState( id, c ) == CharacterStateMatrix.GainLossStates.UNCHANGED_PRESENT ) || ( matrix
2047 .getState( id, c ) == CharacterStateMatrix.GainLossStates.GAIN ) ) ) ) {
2048 final String character = matrix.getCharacter( c );
2049 String domain_0 = "";
2050 String domain_1 = "";
2051 if ( character.indexOf( BinaryDomainCombination.SEPARATOR ) > 0 ) {
2052 final String[] s = character.split( BinaryDomainCombination.SEPARATOR );
2053 if ( s.length != 2 ) {
2054 throw new AssertionError( "this should not have happened: unexpected format for domain combination: ["
2055 + character + "]" );
2061 domain_0 = character;
2063 writeDomainData( domain_id_to_go_ids_map,
2070 character_separator,
2071 domain_id_to_secondary_features_maps,
2073 all_pfams_encountered.add( domain_0 );
2074 if ( pfams_gained_or_lost != null ) {
2075 pfams_gained_or_lost.add( domain_0 );
2077 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
2078 all_pfams_encountered.add( domain_1 );
2079 if ( pfams_gained_or_lost != null ) {
2080 pfams_gained_or_lost.add( domain_1 );
2083 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
2084 writeDomainsToIndividualFilePerTreeNode( per_node_go_mapped_domain_gain_loss_outfile_writer,
2091 if ( per_node_go_mapped_domain_gain_loss_outfile_writer != null ) {
2092 per_node_go_mapped_domain_gain_loss_outfile_writer.close();
2093 if ( per_node_counter < 1 ) {
2094 per_node_go_mapped_domain_gain_loss_outfile.delete();
2096 per_node_counter = 0;
2098 out.write( "</table>" );
2099 out.write( SurfacingConstants.NL );
2100 out.write( "<hr>" );
2101 out.write( SurfacingConstants.NL );
2102 } // for( final String id : sorted_ids ) {
2103 out.write( "</body>" );
2104 out.write( SurfacingConstants.NL );
2105 out.write( "</html>" );
2106 out.write( SurfacingConstants.NL );
2110 catch ( final IOException e ) {
2111 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2113 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
2116 public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
2117 final File output_dir,
2118 final Writer per_genome_domain_promiscuity_statistics_writer,
2119 final GenomeWideCombinableDomains gwcd,
2121 final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
2122 File dc_outfile = new File( input_file_properties[ i ][ 1 ]
2123 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
2124 if ( output_dir != null ) {
2125 dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
2127 checkForOutputFileWriteability( dc_outfile );
2129 final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
2130 out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
2133 catch ( final IOException e ) {
2134 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2136 final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
2138 per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 1 ] + "\t" );
2139 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
2140 if ( stats.getN() < 2 ) {
2141 per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
2144 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
2145 .sampleStandardDeviation() ) + "\t" );
2147 per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
2148 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
2149 per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
2150 per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
2151 final SortedSet<String> mpds = gwcd.getMostPromiscuosDomain();
2152 for( final String mpd : mpds ) {
2153 per_genome_domain_promiscuity_statistics_writer.write( mpd + " " );
2155 per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
2157 catch ( final IOException e ) {
2158 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2160 if ( input_file_properties[ i ].length == 3 ) {
2161 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2162 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
2163 + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
2166 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
2167 + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
2168 + dc_outfile + "\"" );
2172 public static void writeDomainSimilaritiesToFile( final StringBuilder html_desc,
2173 final StringBuilder html_title,
2174 final Writer simple_tab_writer,
2175 final Writer single_writer,
2176 Map<Character, Writer> split_writers,
2177 final SortedSet<DomainSimilarity> similarities,
2178 final boolean treat_as_binary,
2179 final List<Species> species_order,
2180 final PrintableDomainSimilarity.PRINT_OPTION print_option,
2181 final DomainSimilarity.DomainSimilarityScoring scoring,
2182 final boolean verbose,
2183 final Map<String, Integer> tax_code_to_id_map,
2184 final Phylogeny phy ) throws IOException {
2185 if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
2186 split_writers = new HashMap<Character, Writer>();
2187 split_writers.put( '_', single_writer );
2189 switch ( print_option ) {
2190 case SIMPLE_TAB_DELIMITED:
2193 for( final Character key : split_writers.keySet() ) {
2194 final Writer w = split_writers.get( key );
2195 w.write( "<html>" );
2196 w.write( SurfacingConstants.NL );
2198 writeHtmlHead( w, "DC analysis (" + html_title + ") " + key.toString().toUpperCase() );
2201 writeHtmlHead( w, "DC analysis (" + html_title + ")" );
2203 w.write( SurfacingConstants.NL );
2204 w.write( "<body>" );
2205 w.write( SurfacingConstants.NL );
2206 w.write( html_desc.toString() );
2207 w.write( SurfacingConstants.NL );
2209 w.write( SurfacingConstants.NL );
2211 w.write( SurfacingConstants.NL );
2212 w.write( "<table>" );
2213 w.write( SurfacingConstants.NL );
2214 w.write( "<tr><td><b>Domains:</b></td></tr>" );
2215 w.write( SurfacingConstants.NL );
2220 for( final DomainSimilarity similarity : similarities ) {
2221 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2222 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
2224 if ( single_writer != null ) {
2225 single_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
2226 + similarity.getDomainId() + "</a></b></td></tr>" );
2227 single_writer.write( SurfacingConstants.NL );
2230 Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
2232 if ( local_writer == null ) {
2233 local_writer = split_writers.get( '0' );
2235 local_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
2236 + similarity.getDomainId() + "</a></b></td></tr>" );
2237 local_writer.write( SurfacingConstants.NL );
2240 for( final Writer w : split_writers.values() ) {
2241 w.write( "</table>" );
2242 w.write( SurfacingConstants.NL );
2244 w.write( SurfacingConstants.NL );
2246 w.write( "<table>" );
2247 w.write( SurfacingConstants.NL );
2248 w.write( "<tr><td><b>" );
2249 w.write( "Species group colors:" );
2250 w.write( "</b></td></tr>" );
2251 w.write( SurfacingConstants.NL );
2252 writeColorLabels( "Deuterostomia", TaxonomyColors.DEUTEROSTOMIA_COLOR, w );
2253 writeColorLabels( "Protostomia", TaxonomyColors.PROTOSTOMIA_COLOR, w );
2254 writeColorLabels( "Cnidaria", TaxonomyColors.CNIDARIA_COLOR, w );
2255 writeColorLabels( "Placozoa", TaxonomyColors.PLACOZOA_COLOR, w );
2256 writeColorLabels( "Ctenophora (comb jellies)", TaxonomyColors.CTENOPHORA_COLOR, w );
2257 writeColorLabels( "Porifera (sponges)", TaxonomyColors.PORIFERA_COLOR, w );
2258 writeColorLabels( "Choanoflagellida", TaxonomyColors.CHOANOFLAGELLIDA, w );
2259 writeColorLabels( "Ichthyosporea & Filasterea", TaxonomyColors.ICHTHYOSPOREA_AND_FILASTEREA, w );
2260 writeColorLabels( "Dikarya (Ascomycota & Basidiomycota, so-called \"higher fungi\")",
2261 TaxonomyColors.DIKARYA_COLOR,
2263 writeColorLabels( "other Fungi", TaxonomyColors.OTHER_FUNGI_COLOR, w );
2264 writeColorLabels( "Nucleariidae and Fonticula group",
2265 TaxonomyColors.NUCLEARIIDAE_AND_FONTICULA_GROUP_COLOR,
2267 writeColorLabels( "Amoebozoa", TaxonomyColors.AMOEBOZOA_COLOR, w );
2268 writeColorLabels( "Embryophyta (plants)", TaxonomyColors.EMBRYOPHYTA_COLOR, w );
2269 writeColorLabels( "Chlorophyta (green algae)", TaxonomyColors.CHLOROPHYTA_COLOR, w );
2270 writeColorLabels( "Rhodophyta (red algae)", TaxonomyColors.RHODOPHYTA_COLOR, w );
2271 writeColorLabels( "Glaucocystophyce (Glaucophyta)", TaxonomyColors.GLAUCOPHYTA_COLOR, w );
2272 writeColorLabels( "Hacrobia (Cryptophyta & Haptophyceae & Centroheliozoa)",
2273 TaxonomyColors.HACROBIA_COLOR,
2275 writeColorLabels( "Stramenopiles (Chromophyta, heterokonts)", TaxonomyColors.STRAMENOPILES_COLOR, w );
2276 writeColorLabels( "Alveolata", TaxonomyColors.ALVEOLATA_COLOR, w );
2277 writeColorLabels( "Rhizaria", TaxonomyColors.RHIZARIA_COLOR, w );
2278 writeColorLabels( "Excavata", TaxonomyColors.EXCAVATA_COLOR, w );
2279 writeColorLabels( "Apusozoa", TaxonomyColors.APUSOZOA_COLOR, w );
2280 writeColorLabels( "Archaea", TaxonomyColors.ARCHAEA_COLOR, w );
2281 writeColorLabels( "Bacteria", TaxonomyColors.BACTERIA_COLOR, w );
2282 w.write( "</table>" );
2283 w.write( SurfacingConstants.NL );
2286 w.write( SurfacingConstants.NL );
2287 w.write( "<table>" );
2288 w.write( SurfacingConstants.NL );
2291 for( final DomainSimilarity similarity : similarities ) {
2292 if ( ( species_order != null ) && !species_order.isEmpty() ) {
2293 ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
2295 if ( simple_tab_writer != null ) {
2296 simple_tab_writer.write( similarity.toStringBuffer( PRINT_OPTION.SIMPLE_TAB_DELIMITED,
2298 null ).toString() );
2300 if ( single_writer != null ) {
2301 single_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map, phy ).toString() );
2302 single_writer.write( SurfacingConstants.NL );
2305 Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
2307 if ( local_writer == null ) {
2308 local_writer = split_writers.get( '0' );
2310 local_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map, phy ).toString() );
2311 local_writer.write( SurfacingConstants.NL );
2314 switch ( print_option ) {
2316 for( final Writer w : split_writers.values() ) {
2317 w.write( SurfacingConstants.NL );
2318 w.write( "</table>" );
2319 w.write( SurfacingConstants.NL );
2320 w.write( "</font>" );
2321 w.write( SurfacingConstants.NL );
2322 w.write( "</body>" );
2323 w.write( SurfacingConstants.NL );
2324 w.write( "</html>" );
2325 w.write( SurfacingConstants.NL );
2331 for( final Writer w : split_writers.values() ) {
2336 public static void writeHtmlHead( final Writer w, final String title ) throws IOException {
2337 w.write( SurfacingConstants.NL );
2338 w.write( "<head>" );
2339 w.write( "<title>" );
2341 w.write( "</title>" );
2342 w.write( SurfacingConstants.NL );
2343 w.write( "<style>" );
2344 w.write( SurfacingConstants.NL );
2345 w.write( "a:visited { color : #000066; text-decoration : none; }" );
2346 w.write( SurfacingConstants.NL );
2347 w.write( "a:link { color : #000066; text-decoration : none; }" );
2348 w.write( SurfacingConstants.NL );
2349 w.write( "a:active { color : ##000066; text-decoration : none; }" );
2350 w.write( SurfacingConstants.NL );
2351 w.write( "a:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; }" );
2352 w.write( SurfacingConstants.NL );
2354 w.write( "a.pl:visited { color : #505050; text-decoration : none; font-size: 7px;}" );
2355 w.write( SurfacingConstants.NL );
2356 w.write( "a.pl:link { color : #505050; text-decoration : none; font-size: 7px;}" );
2357 w.write( SurfacingConstants.NL );
2358 w.write( "a.pl:active { color : #505050; text-decoration : none; font-size: 7px;}" );
2359 w.write( SurfacingConstants.NL );
2360 w.write( "a.pl:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; font-size: 7px;}" );
2361 w.write( SurfacingConstants.NL );
2363 w.write( "a.ps:visited { color : #707070; text-decoration : none; font-size: 7px;}" );
2364 w.write( SurfacingConstants.NL );
2365 w.write( "a.ps:link { color : #707070; text-decoration : none; font-size: 7px;}" );
2366 w.write( SurfacingConstants.NL );
2367 w.write( "a.ps:active { color : #707070; text-decoration : none; font-size: 7px;}" );
2368 w.write( SurfacingConstants.NL );
2369 w.write( "a.ps:hover { color : #FFFFFF; background-color : #000000; text-decoration : none; font-size: 7px;}" );
2370 w.write( SurfacingConstants.NL );
2372 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
2373 w.write( SurfacingConstants.NL );
2374 w.write( "h1 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
2375 w.write( SurfacingConstants.NL );
2376 w.write( "h2 { color : #0000FF; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
2377 w.write( SurfacingConstants.NL );
2378 w.write( "</style>" );
2379 w.write( SurfacingConstants.NL );
2380 w.write( "</head>" );
2381 w.write( SurfacingConstants.NL );
2384 public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
2385 final String filename,
2386 final Format format ) {
2387 final File outfile = new File( filename );
2388 checkForOutputFileWriteability( outfile );
2390 final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
2391 matrix.toWriter( out, format );
2395 catch ( final IOException e ) {
2396 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2398 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
2401 public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
2402 checkForOutputFileWriteability( matrix_outfile );
2404 final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
2405 for( final DistanceMatrix distance_matrix : matrices ) {
2406 out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
2407 out.write( ForesterUtil.LINE_SEPARATOR );
2412 catch ( final IOException e ) {
2413 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2415 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
2418 public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
2419 final PhylogenyWriter writer = new PhylogenyWriter();
2421 writer.toPhyloXML( new File( filename ), phylogeny, 1 );
2423 catch ( final IOException e ) {
2424 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
2427 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
2430 public static void writePresentToNexus( final File output_file,
2431 final File positive_filter_file,
2432 final SortedSet<String> filter,
2433 final List<GenomeWideCombinableDomains> gwcd_list ) {
2435 writeMatrixToFile( DomainParsimonyCalculator.createMatrixOfDomainPresenceOrAbsence( gwcd_list,
2436 positive_filter_file == null ? null
2438 output_file + surfacing.DOMAINS_PRESENT_NEXUS,
2439 Format.NEXUS_BINARY );
2440 writeMatrixToFile( DomainParsimonyCalculator.createMatrixOfBinaryDomainCombinationPresenceOrAbsence( gwcd_list ),
2441 output_file + surfacing.BDC_PRESENT_NEXUS,
2442 Format.NEXUS_BINARY );
2444 catch ( final Exception e ) {
2445 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2449 public static void writeProteinListsForAllSpecies( final File output_dir,
2450 final SortedMap<Species, List<Protein>> protein_lists_per_species,
2451 final List<GenomeWideCombinableDomains> gwcd_list,
2452 final double domain_e_cutoff ) {
2453 final SortedSet<String> all_domains = new TreeSet<String>();
2454 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
2455 all_domains.addAll( gwcd.getAllDomainIds() );
2457 for( final String domain : all_domains ) {
2458 final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain + surfacing.SEQ_EXTRACT_SUFFIX );
2459 checkForOutputFileWriteability( out );
2461 final Writer proteins_file_writer = new BufferedWriter( new FileWriter( out ) );
2462 extractProteinNames( protein_lists_per_species,
2464 proteins_file_writer,
2466 surfacing.LIMIT_SPEC_FOR_PROT_EX,
2468 proteins_file_writer.close();
2470 catch ( final IOException e ) {
2471 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2473 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote proteins list to \"" + out + "\"" );
2477 public static void writeTaxonomyLinks( final Writer writer,
2478 final String species,
2479 final Map<String, Integer> tax_code_to_id_map ) throws IOException {
2480 if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
2481 writer.write( " [" );
2482 if ( ( tax_code_to_id_map != null ) && tax_code_to_id_map.containsKey( species ) ) {
2483 writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_TAXONOMY_ID_LINK
2484 + tax_code_to_id_map.get( species ) + "\" target=\"taxonomy_window\">uniprot</a>" );
2487 writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
2488 + "\" target=\"taxonomy_window\">eol</a>" );
2489 writer.write( "|" );
2490 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_SCHOLAR_SEARCH + species
2491 + "\" target=\"taxonomy_window\">scholar</a>" );
2492 writer.write( "|" );
2493 writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_WEB_SEARCH_LINK + species
2494 + "\" target=\"taxonomy_window\">google</a>" );
2496 writer.write( "]" );
2500 private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
2501 if ( map.containsKey( s ) ) {
2502 map.put( s, map.get( s ) + 1 );
2509 private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
2510 final String outfilename_for_counts,
2511 final String outfilename_for_dc,
2512 final String outfilename_for_dc_for_go_mapping,
2513 final String outfilename_for_dc_for_go_mapping_unique,
2514 final String outfilename_for_rank_counts,
2515 final String outfilename_for_ancestor_species_counts,
2516 final String outfilename_for_protein_stats,
2517 final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
2518 final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
2519 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
2522 // if ( protein_length_stats_by_dc != null ) {
2523 // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
2524 // System.out.print( entry.getKey().toString() );
2525 // System.out.print( ": " );
2526 // double[] a = entry.getValue().getDataAsDoubleArray();
2527 // for( int i = 0; i < a.length; i++ ) {
2528 // System.out.print( a[ i ] + " " );
2530 // System.out.println();
2533 // if ( domain_number_stats_by_dc != null ) {
2534 // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
2535 // System.out.print( entry.getKey().toString() );
2536 // System.out.print( ": " );
2537 // double[] a = entry.getValue().getDataAsDoubleArray();
2538 // for( int i = 0; i < a.length; i++ ) {
2539 // System.out.print( a[ i ] + " " );
2541 // System.out.println();
2545 final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
2546 final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
2547 final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
2548 final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
2549 final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
2550 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
2551 final PhylogenyNode n = it.next();
2552 final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
2553 for( final String dc : gained_dc ) {
2554 if ( dc_gain_counts.containsKey( dc ) ) {
2555 dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
2558 dc_gain_counts.put( dc, 1 );
2562 final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
2563 final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
2564 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
2565 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
2566 final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
2567 final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
2568 final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
2569 final Set<String> dcs = dc_gain_counts.keySet();
2570 final SortedSet<String> more_than_once = new TreeSet<String>();
2571 DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
2572 DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
2573 DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
2574 final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
2575 long gained_multiple_times_domain_length_sum = 0;
2576 long gained_once_domain_length_sum = 0;
2577 long gained_multiple_times_domain_length_count = 0;
2578 long gained_once_domain_length_count = 0;
2579 for( final String dc : dcs ) {
2580 final int count = dc_gain_counts.get( dc );
2581 if ( histogram.containsKey( count ) ) {
2582 histogram.put( count, histogram.get( count ) + 1 );
2583 domain_lists.get( count ).append( ", " + dc );
2584 domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
2585 domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
2588 histogram.put( count, 1 );
2589 domain_lists.put( count, new StringBuilder( dc ) );
2590 final PriorityQueue<String> q = new PriorityQueue<String>();
2591 q.addAll( splitDomainCombination( dc ) );
2592 domain_lists_go.put( count, q );
2593 final SortedSet<String> set = new TreeSet<String>();
2594 set.addAll( splitDomainCombination( dc ) );
2595 domain_lists_go_unique.put( count, set );
2597 if ( protein_length_stats_by_dc != null ) {
2598 if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
2599 dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
2601 dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
2602 .arithmeticMean() );
2604 if ( domain_number_stats_by_dc != null ) {
2605 if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
2606 dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
2608 dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
2609 .arithmeticMean() );
2611 if ( domain_length_stats_by_domain != null ) {
2612 if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
2613 dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
2615 final String[] ds = dc.split( "=" );
2616 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2617 .get( ds[ 0 ] ).arithmeticMean() );
2618 dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
2619 .get( ds[ 1 ] ).arithmeticMean() );
2622 more_than_once.add( dc );
2623 if ( protein_length_stats_by_dc != null ) {
2624 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2625 for( final double element : s.getData() ) {
2626 gained_multiple_times_lengths_stats.addValue( element );
2629 if ( domain_number_stats_by_dc != null ) {
2630 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2631 for( final double element : s.getData() ) {
2632 gained_multiple_times_domain_count_stats.addValue( element );
2635 if ( domain_length_stats_by_domain != null ) {
2636 final String[] ds = dc.split( "=" );
2637 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2638 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2639 for( final double element : s0.getData() ) {
2640 gained_multiple_times_domain_length_sum += element;
2641 ++gained_multiple_times_domain_length_count;
2643 for( final double element : s1.getData() ) {
2644 gained_multiple_times_domain_length_sum += element;
2645 ++gained_multiple_times_domain_length_count;
2650 if ( protein_length_stats_by_dc != null ) {
2651 final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
2652 for( final double element : s.getData() ) {
2653 gained_once_lengths_stats.addValue( element );
2656 if ( domain_number_stats_by_dc != null ) {
2657 final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
2658 for( final double element : s.getData() ) {
2659 gained_once_domain_count_stats.addValue( element );
2662 if ( domain_length_stats_by_domain != null ) {
2663 final String[] ds = dc.split( "=" );
2664 final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
2665 final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
2666 for( final double element : s0.getData() ) {
2667 gained_once_domain_length_sum += element;
2668 ++gained_once_domain_length_count;
2670 for( final double element : s1.getData() ) {
2671 gained_once_domain_length_sum += element;
2672 ++gained_once_domain_length_count;
2677 final Set<Integer> histogram_keys = histogram.keySet();
2678 for( final Integer histogram_key : histogram_keys ) {
2679 final int count = histogram.get( histogram_key );
2680 final StringBuilder dc = domain_lists.get( histogram_key );
2681 out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
2682 out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
2683 out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2684 final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
2685 Arrays.sort( sorted );
2686 for( final Object domain : sorted ) {
2687 out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
2689 out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
2690 for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
2691 out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
2696 out_dc_for_go_mapping.close();
2697 out_dc_for_go_mapping_unique.close();
2698 final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
2699 final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
2700 for( final String dc : more_than_once ) {
2701 final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
2702 for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
2703 final PhylogenyNode n = it.next();
2704 if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
2708 for( int i = 0; i < ( nodes.size() - 1 ); ++i ) {
2709 for( int j = i + 1; j < nodes.size(); ++j ) {
2710 final PhylogenyNode lca = PhylogenyMethods.calculateLCA( nodes.get( i ), nodes.get( j ) );
2711 String rank = "unknown";
2712 if ( lca.getNodeData().isHasTaxonomy()
2713 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
2714 rank = lca.getNodeData().getTaxonomy().getRank();
2716 addToCountMap( lca_rank_counts, rank );
2718 if ( lca.getNodeData().isHasTaxonomy()
2719 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
2720 lca_species = lca.getNodeData().getTaxonomy().getScientificName();
2722 else if ( lca.getNodeData().isHasTaxonomy()
2723 && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
2724 lca_species = lca.getNodeData().getTaxonomy().getCommonName();
2727 lca_species = lca.getName();
2729 addToCountMap( lca_ancestor_species_counts, lca_species );
2733 final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
2734 final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
2735 ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
2736 ForesterUtil.map2writer( out_for_ancestor_species_counts,
2737 lca_ancestor_species_counts,
2739 ForesterUtil.LINE_SEPARATOR );
2740 out_for_rank_counts.close();
2741 out_for_ancestor_species_counts.close();
2742 if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
2743 && ( ( domain_length_stats_by_domain != null ) || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
2744 final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
2745 w.write( "Domain Lengths: " );
2747 if ( domain_length_stats_by_domain != null ) {
2748 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
2750 w.write( entry.getKey().toString() );
2751 w.write( "\t" + entry.getValue().arithmeticMean() );
2752 w.write( "\t" + entry.getValue().median() );
2759 w.write( "Protein Lengths: " );
2761 if ( protein_length_stats_by_dc != null ) {
2762 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
2764 w.write( entry.getKey().toString() );
2765 w.write( "\t" + entry.getValue().arithmeticMean() );
2766 w.write( "\t" + entry.getValue().median() );
2773 w.write( "Number of domains: " );
2775 if ( domain_number_stats_by_dc != null ) {
2776 for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
2778 w.write( entry.getKey().toString() );
2779 w.write( "\t" + entry.getValue().arithmeticMean() );
2780 w.write( "\t" + entry.getValue().median() );
2787 w.write( "Gained once, domain lengths:" );
2789 w.write( "N: " + gained_once_domain_length_count );
2791 w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
2794 w.write( "Gained multiple times, domain lengths:" );
2796 w.write( "N: " + gained_multiple_times_domain_length_count );
2799 + ( ( double ) gained_multiple_times_domain_length_sum / gained_multiple_times_domain_length_count ) );
2804 w.write( "Gained once, protein lengths:" );
2806 w.write( gained_once_lengths_stats.toString() );
2807 gained_once_lengths_stats = null;
2810 w.write( "Gained once, domain counts:" );
2812 w.write( gained_once_domain_count_stats.toString() );
2813 gained_once_domain_count_stats = null;
2816 w.write( "Gained multiple times, protein lengths:" );
2818 w.write( gained_multiple_times_lengths_stats.toString() );
2819 gained_multiple_times_lengths_stats = null;
2822 w.write( "Gained multiple times, domain counts:" );
2824 w.write( gained_multiple_times_domain_count_stats.toString() );
2829 catch ( final IOException e ) {
2830 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
2832 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
2833 + outfilename_for_counts + "]" );
2834 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
2835 + outfilename_for_dc + "]" );
2836 ForesterUtil.programMessage( surfacing.PRG_NAME,
2837 "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
2838 + outfilename_for_dc_for_go_mapping + "]" );
2839 ForesterUtil.programMessage( surfacing.PRG_NAME,
2840 "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
2841 + outfilename_for_dc_for_go_mapping_unique + "]" );
2844 private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
2845 final boolean get_gains ) {
2846 final SortedSet<String> domains = new TreeSet<String>();
2847 for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
2848 final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
2850 domains.addAll( chars.getGainedCharacters() );
2853 domains.addAll( chars.getLostCharacters() );
2859 private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
2860 final boolean domain_combinations,
2861 final CharacterStateMatrix.GainLossStates state,
2862 final String outfile ) {
2863 File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
2864 + ForesterUtil.FILE_SEPARATOR + base_dir );
2865 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2866 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2868 if ( domain_combinations ) {
2869 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2870 + ForesterUtil.FILE_SEPARATOR + "DC" );
2873 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2874 + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
2876 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2877 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2879 if ( state == GainLossStates.GAIN ) {
2880 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2881 + ForesterUtil.FILE_SEPARATOR + "GAINS" );
2883 else if ( state == GainLossStates.LOSS ) {
2884 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2885 + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
2888 per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
2889 + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
2891 if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
2892 per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
2894 return per_node_go_mapped_domain_gain_loss_files_base_dir;
2897 private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
2898 final SortedMap<String, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
2899 final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
2900 for( final String domain_id : cds.keySet() ) {
2901 final CombinableDomains cd = cds.get( domain_id );
2902 binary_combinations.addAll( cd.toBinaryDomainCombinations() );
2904 return binary_combinations;
2907 private static void printSomeStats( final DescriptiveStatistics stats, final AsciiHistogram histo, final Writer w )
2908 throws IOException {
2911 w.write( SurfacingConstants.NL );
2912 w.write( "<tt><pre>" );
2913 w.write( SurfacingConstants.NL );
2914 if ( histo != null ) {
2915 w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
2916 w.write( SurfacingConstants.NL );
2918 w.write( "</pre></tt>" );
2919 w.write( SurfacingConstants.NL );
2920 w.write( "<table>" );
2921 w.write( SurfacingConstants.NL );
2922 w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
2923 w.write( SurfacingConstants.NL );
2924 w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
2925 w.write( SurfacingConstants.NL );
2926 w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
2927 w.write( SurfacingConstants.NL );
2928 w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
2929 w.write( SurfacingConstants.NL );
2930 if ( stats.getN() > 1 ) {
2931 w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
2934 w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
2936 w.write( SurfacingConstants.NL );
2937 w.write( "</table>" );
2938 w.write( SurfacingConstants.NL );
2940 w.write( SurfacingConstants.NL );
2943 private static List<String> splitDomainCombination( final String dc ) {
2944 final String[] s = dc.split( "=" );
2945 if ( s.length != 2 ) {
2946 ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
2950 final List<String> l = new ArrayList<String>( 2 );
2956 private static void writeAllEncounteredPfamsToFile( final Map<String, List<GoId>> domain_id_to_go_ids_map,
2957 final Map<GoId, GoTerm> go_id_to_term_map,
2958 final String outfile_name,
2959 final SortedSet<String> all_pfams_encountered ) {
2960 final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
2961 final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
2962 + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
2963 final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
2964 int biological_process_counter = 0;
2965 int cellular_component_counter = 0;
2966 int molecular_function_counter = 0;
2967 int pfams_with_mappings_counter = 0;
2968 int pfams_without_mappings_counter = 0;
2969 int pfams_without_mappings_to_bp_or_mf_counter = 0;
2970 int pfams_with_mappings_to_bp_or_mf_counter = 0;
2972 final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
2973 final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
2974 final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
2975 summary_writer.write( "# Pfam to GO mapping summary" );
2976 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2977 summary_writer.write( "# Actual summary is at the end of this file." );
2978 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2979 summary_writer.write( "# Encountered Pfams without a GO mapping:" );
2980 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
2981 for( final String pfam : all_pfams_encountered ) {
2982 all_pfams_encountered_writer.write( pfam );
2983 all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
2984 final String domain_id = new String( pfam );
2985 if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
2986 ++pfams_with_mappings_counter;
2987 all_pfams_encountered_with_go_annotation_writer.write( pfam );
2988 all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
2989 final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
2990 boolean maps_to_bp = false;
2991 boolean maps_to_cc = false;
2992 boolean maps_to_mf = false;
2993 for( final GoId go_id : go_ids ) {
2994 final GoTerm go_term = go_id_to_term_map.get( go_id );
2995 if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
2998 else if ( go_term.getGoNameSpace().isCellularComponent() ) {
3001 else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
3006 ++biological_process_counter;
3009 ++cellular_component_counter;
3012 ++molecular_function_counter;
3014 if ( maps_to_bp || maps_to_mf ) {
3015 ++pfams_with_mappings_to_bp_or_mf_counter;
3018 ++pfams_without_mappings_to_bp_or_mf_counter;
3022 ++pfams_without_mappings_to_bp_or_mf_counter;
3023 ++pfams_without_mappings_counter;
3024 summary_writer.write( pfam );
3025 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3028 all_pfams_encountered_writer.close();
3029 all_pfams_encountered_with_go_annotation_writer.close();
3030 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + all_pfams_encountered.size()
3031 + "] encountered Pfams to: \"" + all_pfams_encountered_file + "\"" );
3032 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote all [" + pfams_with_mappings_counter
3033 + "] encountered Pfams with GO mappings to: \"" + all_pfams_encountered_with_go_annotation_file
3035 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote summary (including all ["
3036 + pfams_without_mappings_counter + "] encountered Pfams without GO mappings) to: \""
3037 + encountered_pfams_summary_file + "\"" );
3038 ForesterUtil.programMessage( surfacing.PRG_NAME, "Sum of Pfams encountered : "
3039 + all_pfams_encountered.size() );
3040 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without a mapping : "
3041 + pfams_without_mappings_counter + " ["
3042 + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3043 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams without mapping to proc. or func. : "
3044 + pfams_without_mappings_to_bp_or_mf_counter + " ["
3045 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3046 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping : "
3047 + pfams_with_mappings_counter + " ["
3048 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3049 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with a mapping to proc. or func. : "
3050 + pfams_with_mappings_to_bp_or_mf_counter + " ["
3051 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3052 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to biological process: "
3053 + biological_process_counter + " ["
3054 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
3055 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to molecular function: "
3056 + molecular_function_counter + " ["
3057 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
3058 ForesterUtil.programMessage( surfacing.PRG_NAME, "Pfams with mapping to cellular component: "
3059 + cellular_component_counter + " ["
3060 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
3061 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3062 summary_writer.write( "# Sum of Pfams encountered : " + all_pfams_encountered.size() );
3063 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3064 summary_writer.write( "# Pfams without a mapping : " + pfams_without_mappings_counter
3065 + " [" + ( ( 100 * pfams_without_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3066 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3067 summary_writer.write( "# Pfams without mapping to proc. or func. : "
3068 + pfams_without_mappings_to_bp_or_mf_counter + " ["
3069 + ( ( 100 * pfams_without_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3070 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3071 summary_writer.write( "# Pfams with a mapping : " + pfams_with_mappings_counter + " ["
3072 + ( ( 100 * pfams_with_mappings_counter ) / all_pfams_encountered.size() ) + "%]" );
3073 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3074 summary_writer.write( "# Pfams with a mapping to proc. or func. : "
3075 + pfams_with_mappings_to_bp_or_mf_counter + " ["
3076 + ( ( 100 * pfams_with_mappings_to_bp_or_mf_counter ) / all_pfams_encountered.size() ) + "%]" );
3077 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3078 summary_writer.write( "# Pfams with mapping to biological process: " + biological_process_counter + " ["
3079 + ( ( 100 * biological_process_counter ) / all_pfams_encountered.size() ) + "%]" );
3080 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3081 summary_writer.write( "# Pfams with mapping to molecular function: " + molecular_function_counter + " ["
3082 + ( ( 100 * molecular_function_counter ) / all_pfams_encountered.size() ) + "%]" );
3083 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3084 summary_writer.write( "# Pfams with mapping to cellular component: " + cellular_component_counter + " ["
3085 + ( ( 100 * cellular_component_counter ) / all_pfams_encountered.size() ) + "%]" );
3086 summary_writer.write( ForesterUtil.LINE_SEPARATOR );
3087 summary_writer.close();
3089 catch ( final IOException e ) {
3090 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
3094 private final static void writeColorLabels( final String l, final Color c, final Writer w ) throws IOException {
3095 w.write( "<tr><td><b><span style=\"color:" );
3096 w.write( String.format( "#%02x%02x%02x", c.getRed(), c.getGreen(), c.getBlue() ) );
3099 w.write( "</span></b></td></tr>" );
3100 w.write( SurfacingConstants.NL );
3103 private static void writeDomainData( final Map<String, List<GoId>> domain_id_to_go_ids_map,
3104 final Map<GoId, GoTerm> go_id_to_term_map,
3105 final GoNameSpace go_namespace_limit,
3107 final String domain_0,
3108 final String domain_1,
3109 final String prefix_for_html,
3110 final String character_separator_for_non_html_output,
3111 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps,
3112 final Set<GoId> all_go_ids ) throws IOException {
3113 boolean any_go_annotation_present = false;
3114 boolean first_has_no_go = false;
3115 int domain_count = 2; // To distinguish between domains and binary domain combinations.
3116 if ( ForesterUtil.isEmpty( domain_1 ) ) {
3119 // The following has a difficult to understand logic.
3120 for( int d = 0; d < domain_count; ++d ) {
3121 List<GoId> go_ids = null;
3122 boolean go_annotation_present = false;
3124 if ( domain_id_to_go_ids_map.containsKey( domain_0 ) ) {
3125 go_annotation_present = true;
3126 any_go_annotation_present = true;
3127 go_ids = domain_id_to_go_ids_map.get( domain_0 );
3130 first_has_no_go = true;
3134 if ( domain_id_to_go_ids_map.containsKey( domain_1 ) ) {
3135 go_annotation_present = true;
3136 any_go_annotation_present = true;
3137 go_ids = domain_id_to_go_ids_map.get( domain_1 );
3140 if ( go_annotation_present ) {
3141 boolean first = ( ( d == 0 ) || ( ( d == 1 ) && first_has_no_go ) );
3142 for( final GoId go_id : go_ids ) {
3143 out.write( "<tr>" );
3146 writeDomainIdsToHtml( out,
3150 domain_id_to_secondary_features_maps );
3153 out.write( "<td></td>" );
3155 if ( !go_id_to_term_map.containsKey( go_id ) ) {
3156 throw new IllegalArgumentException( "GO-id [" + go_id + "] not found in GO-id to GO-term map" );
3158 final GoTerm go_term = go_id_to_term_map.get( go_id );
3159 if ( ( go_namespace_limit == null ) || go_namespace_limit.equals( go_term.getGoNameSpace() ) ) {
3160 // final String top = GoUtils.getPenultimateGoTerm( go_term, go_id_to_term_map ).getName();
3161 final String go_id_str = go_id.getId();
3162 out.write( "<td>" );
3163 out.write( "<a href=\"" + SurfacingConstants.AMIGO_LINK + go_id_str
3164 + "\" target=\"amigo_window\">" + go_id_str + "</a>" );
3165 out.write( "</td><td>" );
3166 out.write( go_term.getName() );
3167 if ( domain_count == 2 ) {
3168 out.write( " (" + d + ")" );
3170 out.write( "</td><td>" );
3171 // out.write( top );
3172 // out.write( "</td><td>" );
3174 out.write( go_term.getGoNameSpace().toShortString() );
3176 out.write( "</td>" );
3177 if ( all_go_ids != null ) {
3178 all_go_ids.add( go_id );
3182 out.write( "<td>" );
3183 out.write( "</td><td>" );
3184 out.write( "</td><td>" );
3185 out.write( "</td><td>" );
3186 out.write( "</td>" );
3188 out.write( "</tr>" );
3189 out.write( SurfacingConstants.NL );
3192 } // for( int d = 0; d < domain_count; ++d )
3193 if ( !any_go_annotation_present ) {
3194 out.write( "<tr>" );
3195 writeDomainIdsToHtml( out, domain_0, domain_1, prefix_for_html, domain_id_to_secondary_features_maps );
3196 out.write( "<td>" );
3197 out.write( "</td><td>" );
3198 out.write( "</td><td>" );
3199 out.write( "</td><td>" );
3200 out.write( "</td>" );
3201 out.write( "</tr>" );
3202 out.write( SurfacingConstants.NL );
3206 private static void writeDomainIdsToHtml( final Writer out,
3207 final String domain_0,
3208 final String domain_1,
3209 final String prefix_for_detailed_html,
3210 final Map<String, Set<String>>[] domain_id_to_secondary_features_maps )
3211 throws IOException {
3212 out.write( "<td>" );
3213 if ( !ForesterUtil.isEmpty( prefix_for_detailed_html ) ) {
3214 out.write( prefix_for_detailed_html );
3217 out.write( "<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_0 + "\">" + domain_0 + "</a>" );
3218 out.write( "</td>" );
3221 private static void writeDomainsToIndividualFilePerTreeNode( final Writer individual_files_writer,
3222 final String domain_0,
3223 final String domain_1 ) throws IOException {
3224 individual_files_writer.write( domain_0 );
3225 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
3226 if ( !ForesterUtil.isEmpty( domain_1 ) ) {
3227 individual_files_writer.write( domain_1 );
3228 individual_files_writer.write( ForesterUtil.LINE_SEPARATOR );
3232 private static void writePfamsToFile( final String outfile_name, final SortedSet<String> pfams ) {
3234 final Writer writer = new BufferedWriter( new FileWriter( new File( outfile_name ) ) );
3235 for( final String pfam : pfams ) {
3236 writer.write( pfam );
3237 writer.write( ForesterUtil.LINE_SEPARATOR );
3240 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote " + pfams.size() + " pfams to [" + outfile_name
3243 catch ( final IOException e ) {
3244 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
3248 private static void writeToNexus( final String outfile_name,
3249 final CharacterStateMatrix<BinaryStates> matrix,
3250 final Phylogeny phylogeny ) {
3251 if ( !( matrix instanceof BasicCharacterStateMatrix ) ) {
3252 throw new IllegalArgumentException( "can only write matrices of type [" + BasicCharacterStateMatrix.class
3255 final BasicCharacterStateMatrix<BinaryStates> my_matrix = ( org.forester.evoinference.matrix.character.BasicCharacterStateMatrix<BinaryStates> ) matrix;
3256 final List<Phylogeny> phylogenies = new ArrayList<Phylogeny>( 1 );
3257 phylogenies.add( phylogeny );
3259 final BufferedWriter w = new BufferedWriter( new FileWriter( outfile_name ) );
3260 w.write( NexusConstants.NEXUS );
3261 w.write( ForesterUtil.LINE_SEPARATOR );
3262 my_matrix.writeNexusTaxaBlock( w );
3263 my_matrix.writeNexusBinaryChractersBlock( w );
3264 PhylogenyWriter.writeNexusTreesBlock( w, phylogenies, NH_CONVERSION_SUPPORT_VALUE_STYLE.NONE );
3267 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote Nexus file: \"" + outfile_name + "\"" );
3269 catch ( final IOException e ) {
3270 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
3274 private static void writeToNexus( final String outfile_name,
3275 final DomainParsimonyCalculator domain_parsimony,
3276 final Phylogeny phylogeny ) {
3277 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAINS,
3278 domain_parsimony.createMatrixOfDomainPresenceOrAbsence(),
3280 writeToNexus( outfile_name + surfacing.NEXUS_EXTERNAL_DOMAIN_COMBINATIONS,
3281 domain_parsimony.createMatrixOfBinaryDomainCombinationPresenceOrAbsence(),
3285 final static class DomainComparator implements Comparator<Domain> {
3287 final private boolean _ascending;
3289 public DomainComparator( final boolean ascending ) {
3290 _ascending = ascending;
3294 public final int compare( final Domain d0, final Domain d1 ) {
3295 if ( d0.getFrom() < d1.getFrom() ) {
3296 return _ascending ? -1 : 1;
3298 else if ( d0.getFrom() > d1.getFrom() ) {
3299 return _ascending ? 1 : -1;