+ out.write( "</table>" );
+ out.write( SurfacingConstants.NL );
+ out.write( "<hr>" );
+ out.write( SurfacingConstants.NL );
+ } // for( final String id : sorted_ids ) {
+ out.write( "</body>" );
+ out.write( SurfacingConstants.NL );
+ out.write( "</html>" );
+ out.write( SurfacingConstants.NL );
+ out.flush();
+ out.close();
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
+ }
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote characters detailed HTML list: \"" + filename + "\"" );
+ }
+
+ public static void writeDomainCombinationsCountsFile( final String[][] input_file_properties,
+ final File output_dir,
+ final Writer per_genome_domain_promiscuity_statistics_writer,
+ final GenomeWideCombinableDomains gwcd,
+ final int i,
+ final GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order ) {
+ File dc_outfile = new File( input_file_properties[ i ][ 1 ]
+ + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
+ if ( output_dir != null ) {
+ dc_outfile = new File( output_dir + ForesterUtil.FILE_SEPARATOR + dc_outfile );
+ }
+ checkForOutputFileWriteability( dc_outfile );
+ try {
+ final BufferedWriter out = new BufferedWriter( new FileWriter( dc_outfile ) );
+ out.write( gwcd.toStringBuilder( dc_sort_order ).toString() );
+ out.close();
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
+ }
+ final DescriptiveStatistics stats = gwcd.getPerGenomeDomainPromiscuityStatistics();
+ try {
+ per_genome_domain_promiscuity_statistics_writer.write( input_file_properties[ i ][ 1 ] + "\t" );
+ per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.arithmeticMean() ) + "\t" );
+ if ( stats.getN() < 2 ) {
+ per_genome_domain_promiscuity_statistics_writer.write( "n/a" + "\t" );
+ }
+ else {
+ per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats
+ .sampleStandardDeviation() ) + "\t" );
+ }
+ per_genome_domain_promiscuity_statistics_writer.write( FORMATTER_3.format( stats.median() ) + "\t" );
+ per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMin() + "\t" );
+ per_genome_domain_promiscuity_statistics_writer.write( ( int ) stats.getMax() + "\t" );
+ per_genome_domain_promiscuity_statistics_writer.write( stats.getN() + "\t" );
+ final SortedSet<String> mpds = gwcd.getMostPromiscuosDomain();
+ for( final String mpd : mpds ) {
+ per_genome_domain_promiscuity_statistics_writer.write( mpd + " " );
+ }
+ per_genome_domain_promiscuity_statistics_writer.write( ForesterUtil.LINE_SEPARATOR );
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
+ }
+ if ( input_file_properties[ i ].length == 3 ) {
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
+ + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ", "
+ + input_file_properties[ i ][ 2 ] + ") to: \"" + dc_outfile + "\"" );
+ }
+ else {
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote domain combination counts for \""
+ + input_file_properties[ i ][ 0 ] + "\" (" + input_file_properties[ i ][ 1 ] + ") to: \""
+ + dc_outfile + "\"" );
+ }
+ }
+
+ public static void writeDomainSimilaritiesToFile( final StringBuilder html_desc,
+ final StringBuilder html_title,
+ final Writer simple_tab_writer,
+ final Writer single_writer,
+ Map<Character, Writer> split_writers,
+ final SortedSet<DomainSimilarity> similarities,
+ final boolean treat_as_binary,
+ final List<Species> species_order,
+ final PrintableDomainSimilarity.PRINT_OPTION print_option,
+ final DomainSimilarity.DomainSimilarityScoring scoring,
+ final boolean verbose,
+ final Map<String, Integer> tax_code_to_id_map )
+ throws IOException {
+ if ( ( single_writer != null ) && ( ( split_writers == null ) || split_writers.isEmpty() ) ) {
+ split_writers = new HashMap<Character, Writer>();
+ split_writers.put( '_', single_writer );
+ }
+ switch ( print_option ) {
+ case SIMPLE_TAB_DELIMITED:
+ break;
+ case HTML:
+ for( final Character key : split_writers.keySet() ) {
+ final Writer w = split_writers.get( key );
+ w.write( "<html>" );
+ w.write( SurfacingConstants.NL );
+ if ( key != '_' ) {
+ addHtmlHead( w, "DC analysis (" + html_title + ") " + key.toString().toUpperCase() );
+ }
+ else {
+ addHtmlHead( w, "DC analysis (" + html_title + ")" );
+ }
+ w.write( SurfacingConstants.NL );
+ w.write( "<body>" );
+ w.write( SurfacingConstants.NL );
+ w.write( html_desc.toString() );
+ w.write( SurfacingConstants.NL );
+ w.write( "<hr>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<br>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<table>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<tr><td><b>Domains:</b></td></tr>" );
+ w.write( SurfacingConstants.NL );
+ }
+ break;
+ }
+ //
+ for( final DomainSimilarity similarity : similarities ) {
+ if ( ( species_order != null ) && !species_order.isEmpty() ) {
+ ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
+ }
+ if ( single_writer != null ) {
+ single_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
+ + similarity.getDomainId() + "</a></b></td></tr>" );
+ single_writer.write( SurfacingConstants.NL );
+ }
+ else {
+ Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
+ .charAt( 0 ) );
+ if ( local_writer == null ) {
+ local_writer = split_writers.get( '0' );
+ }
+ local_writer.write( "<tr><td><b><a href=\"#" + similarity.getDomainId() + "\">"
+ + similarity.getDomainId() + "</a></b></td></tr>" );
+ local_writer.write( SurfacingConstants.NL );
+ }
+ }
+ for( final Writer w : split_writers.values() ) {
+ w.write( "</table>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<hr>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<table>" );
+ w.write( SurfacingConstants.NL );
+ }
+ //
+ for( final DomainSimilarity similarity : similarities ) {
+ if ( ( species_order != null ) && !species_order.isEmpty() ) {
+ ( ( PrintableDomainSimilarity ) similarity ).setSpeciesOrder( species_order );
+ }
+ if ( simple_tab_writer != null ) {
+ simple_tab_writer.write( similarity.toStringBuffer( PRINT_OPTION.SIMPLE_TAB_DELIMITED,
+ tax_code_to_id_map ).toString() );
+ }
+ if ( single_writer != null ) {
+ single_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map ).toString() );
+ single_writer.write( SurfacingConstants.NL );
+ }
+ else {
+ Writer local_writer = split_writers.get( ( similarity.getDomainId().charAt( 0 ) + "" ).toLowerCase()
+ .charAt( 0 ) );
+ if ( local_writer == null ) {
+ local_writer = split_writers.get( '0' );
+ }
+ local_writer.write( similarity.toStringBuffer( print_option, tax_code_to_id_map ).toString() );
+ local_writer.write( SurfacingConstants.NL );
+ }
+ }
+ switch ( print_option ) {
+ case HTML:
+ for( final Writer w : split_writers.values() ) {
+ w.write( SurfacingConstants.NL );
+ w.write( "</table>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "</font>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "</body>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "</html>" );
+ w.write( SurfacingConstants.NL );
+ }
+ break;
+ }
+ for( final Writer w : split_writers.values() ) {
+ w.close();
+ }
+ }
+
+ private static void printSomeStats( final DescriptiveStatistics stats, final AsciiHistogram histo, final Writer w )
+ throws IOException {
+ w.write( "<hr>" );
+ w.write( "<br>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<tt><pre>" );
+ w.write( SurfacingConstants.NL );
+ if ( histo != null ) {
+ w.write( histo.toStringBuffer( 20, '|', 40, 5 ).toString() );
+ w.write( SurfacingConstants.NL );
+ }
+ w.write( "</pre></tt>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<table>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<tr><td>N: </td><td>" + stats.getN() + "</td></tr>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<tr><td>Min: </td><td>" + stats.getMin() + "</td></tr>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<tr><td>Max: </td><td>" + stats.getMax() + "</td></tr>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<tr><td>Mean: </td><td>" + stats.arithmeticMean() + "</td></tr>" );
+ w.write( SurfacingConstants.NL );
+ if ( stats.getN() > 1 ) {
+ w.write( "<tr><td>SD: </td><td>" + stats.sampleStandardDeviation() + "</td></tr>" );
+ }
+ else {
+ w.write( "<tr><td>SD: </td><td>n/a</td></tr>" );
+ }
+ w.write( SurfacingConstants.NL );
+ w.write( "</table>" );
+ w.write( SurfacingConstants.NL );
+ w.write( "<br>" );
+ w.write( SurfacingConstants.NL );
+ }
+
+ public static void writeMatrixToFile( final CharacterStateMatrix<?> matrix,
+ final String filename,
+ final Format format ) {
+ final File outfile = new File( filename );
+ checkForOutputFileWriteability( outfile );
+ try {
+ final BufferedWriter out = new BufferedWriter( new FileWriter( outfile ) );
+ matrix.toWriter( out, format );
+ out.flush();
+ out.close();
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
+ }
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote matrix: \"" + filename + "\"" );
+ }
+
+ public static void writeMatrixToFile( final File matrix_outfile, final List<DistanceMatrix> matrices ) {
+ checkForOutputFileWriteability( matrix_outfile );
+ try {
+ final BufferedWriter out = new BufferedWriter( new FileWriter( matrix_outfile ) );
+ for( final DistanceMatrix distance_matrix : matrices ) {
+ out.write( distance_matrix.toStringBuffer( DistanceMatrix.Format.PHYLIP ).toString() );
+ out.write( ForesterUtil.LINE_SEPARATOR );
+ out.flush();
+ }
+ out.close();
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
+ }
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote distance matrices to \"" + matrix_outfile + "\"" );
+ }
+
+ public static void writePhylogenyToFile( final Phylogeny phylogeny, final String filename ) {
+ final PhylogenyWriter writer = new PhylogenyWriter();
+ try {
+ writer.toPhyloXML( new File( filename ), phylogeny, 1 );
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "failed to write phylogeny to \"" + filename + "\": "
+ + e );
+ }
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote phylogeny to \"" + filename + "\"" );
+ }
+
+ public static void writeTaxonomyLinks( final Writer writer,
+ final String species,
+ final Map<String, Integer> tax_code_to_id_map ) throws IOException {
+ if ( ( species.length() > 1 ) && ( species.indexOf( '_' ) < 1 ) ) {
+ writer.write( " [" );
+ if ( ( tax_code_to_id_map != null ) && tax_code_to_id_map.containsKey( species ) ) {
+ writer.write( "<a href=\"" + SurfacingConstants.UNIPROT_TAXONOMY_ID_LINK
+ + tax_code_to_id_map.get( species ) + "\" target=\"taxonomy_window\">uniprot</a>" );
+ }
+ else {
+ writer.write( "<a href=\"" + SurfacingConstants.EOL_LINK + species
+ + "\" target=\"taxonomy_window\">eol</a>" );
+ writer.write( "|" );
+ writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_SCHOLAR_SEARCH + species
+ + "\" target=\"taxonomy_window\">scholar</a>" );
+ writer.write( "|" );
+ writer.write( "<a href=\"" + SurfacingConstants.GOOGLE_WEB_SEARCH_LINK + species
+ + "\" target=\"taxonomy_window\">google</a>" );
+ }
+ writer.write( "]" );
+ }
+ }
+
+ private final static void addToCountMap( final Map<String, Integer> map, final String s ) {
+ if ( map.containsKey( s ) ) {
+ map.put( s, map.get( s ) + 1 );
+ }
+ else {
+ map.put( s, 1 );
+ }
+ }
+
+ private static void calculateIndependentDomainCombinationGains( final Phylogeny local_phylogeny_l,
+ final String outfilename_for_counts,
+ final String outfilename_for_dc,
+ final String outfilename_for_dc_for_go_mapping,
+ final String outfilename_for_dc_for_go_mapping_unique,
+ final String outfilename_for_rank_counts,
+ final String outfilename_for_ancestor_species_counts,
+ final String outfilename_for_protein_stats,
+ final Map<String, DescriptiveStatistics> protein_length_stats_by_dc,
+ final Map<String, DescriptiveStatistics> domain_number_stats_by_dc,
+ final Map<String, DescriptiveStatistics> domain_length_stats_by_domain ) {
+ try {
+ //
+ // if ( protein_length_stats_by_dc != null ) {
+ // for( final Entry<?, DescriptiveStatistics> entry : protein_length_stats_by_dc.entrySet() ) {
+ // System.out.print( entry.getKey().toString() );
+ // System.out.print( ": " );
+ // double[] a = entry.getValue().getDataAsDoubleArray();
+ // for( int i = 0; i < a.length; i++ ) {
+ // System.out.print( a[ i ] + " " );
+ // }
+ // System.out.println();
+ // }
+ // }
+ // if ( domain_number_stats_by_dc != null ) {
+ // for( final Entry<?, DescriptiveStatistics> entry : domain_number_stats_by_dc.entrySet() ) {
+ // System.out.print( entry.getKey().toString() );
+ // System.out.print( ": " );
+ // double[] a = entry.getValue().getDataAsDoubleArray();
+ // for( int i = 0; i < a.length; i++ ) {
+ // System.out.print( a[ i ] + " " );
+ // }
+ // System.out.println();
+ // }
+ // }
+ //
+ final BufferedWriter out_counts = new BufferedWriter( new FileWriter( outfilename_for_counts ) );
+ final BufferedWriter out_dc = new BufferedWriter( new FileWriter( outfilename_for_dc ) );
+ final BufferedWriter out_dc_for_go_mapping = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping ) );
+ final BufferedWriter out_dc_for_go_mapping_unique = new BufferedWriter( new FileWriter( outfilename_for_dc_for_go_mapping_unique ) );
+ final SortedMap<String, Integer> dc_gain_counts = new TreeMap<String, Integer>();
+ for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorPostorder(); it.hasNext(); ) {
+ final PhylogenyNode n = it.next();
+ final Set<String> gained_dc = n.getNodeData().getBinaryCharacters().getGainedCharacters();
+ for( final String dc : gained_dc ) {
+ if ( dc_gain_counts.containsKey( dc ) ) {
+ dc_gain_counts.put( dc, dc_gain_counts.get( dc ) + 1 );
+ }
+ else {
+ dc_gain_counts.put( dc, 1 );
+ }
+ }
+ }
+ final SortedMap<Integer, Integer> histogram = new TreeMap<Integer, Integer>();
+ final SortedMap<Integer, StringBuilder> domain_lists = new TreeMap<Integer, StringBuilder>();
+ final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_protein_length_stats = new TreeMap<Integer, DescriptiveStatistics>();
+ final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_number_stats = new TreeMap<Integer, DescriptiveStatistics>();
+ final SortedMap<Integer, DescriptiveStatistics> dc_reapp_counts_to_domain_lengths_stats = new TreeMap<Integer, DescriptiveStatistics>();
+ final SortedMap<Integer, PriorityQueue<String>> domain_lists_go = new TreeMap<Integer, PriorityQueue<String>>();
+ final SortedMap<Integer, SortedSet<String>> domain_lists_go_unique = new TreeMap<Integer, SortedSet<String>>();
+ final Set<String> dcs = dc_gain_counts.keySet();
+ final SortedSet<String> more_than_once = new TreeSet<String>();
+ DescriptiveStatistics gained_once_lengths_stats = new BasicDescriptiveStatistics();
+ DescriptiveStatistics gained_once_domain_count_stats = new BasicDescriptiveStatistics();
+ DescriptiveStatistics gained_multiple_times_lengths_stats = new BasicDescriptiveStatistics();
+ final DescriptiveStatistics gained_multiple_times_domain_count_stats = new BasicDescriptiveStatistics();
+ long gained_multiple_times_domain_length_sum = 0;
+ long gained_once_domain_length_sum = 0;
+ long gained_multiple_times_domain_length_count = 0;
+ long gained_once_domain_length_count = 0;
+ for( final String dc : dcs ) {
+ final int count = dc_gain_counts.get( dc );
+ if ( histogram.containsKey( count ) ) {
+ histogram.put( count, histogram.get( count ) + 1 );
+ domain_lists.get( count ).append( ", " + dc );
+ domain_lists_go.get( count ).addAll( splitDomainCombination( dc ) );
+ domain_lists_go_unique.get( count ).addAll( splitDomainCombination( dc ) );
+ }
+ else {
+ histogram.put( count, 1 );
+ domain_lists.put( count, new StringBuilder( dc ) );
+ final PriorityQueue<String> q = new PriorityQueue<String>();
+ q.addAll( splitDomainCombination( dc ) );
+ domain_lists_go.put( count, q );
+ final SortedSet<String> set = new TreeSet<String>();
+ set.addAll( splitDomainCombination( dc ) );
+ domain_lists_go_unique.put( count, set );
+ }
+ if ( protein_length_stats_by_dc != null ) {
+ if ( !dc_reapp_counts_to_protein_length_stats.containsKey( count ) ) {
+ dc_reapp_counts_to_protein_length_stats.put( count, new BasicDescriptiveStatistics() );
+ }
+ dc_reapp_counts_to_protein_length_stats.get( count ).addValue( protein_length_stats_by_dc.get( dc )
+ .arithmeticMean() );
+ }
+ if ( domain_number_stats_by_dc != null ) {
+ if ( !dc_reapp_counts_to_domain_number_stats.containsKey( count ) ) {
+ dc_reapp_counts_to_domain_number_stats.put( count, new BasicDescriptiveStatistics() );
+ }
+ dc_reapp_counts_to_domain_number_stats.get( count ).addValue( domain_number_stats_by_dc.get( dc )
+ .arithmeticMean() );
+ }
+ if ( domain_length_stats_by_domain != null ) {
+ if ( !dc_reapp_counts_to_domain_lengths_stats.containsKey( count ) ) {
+ dc_reapp_counts_to_domain_lengths_stats.put( count, new BasicDescriptiveStatistics() );
+ }
+ final String[] ds = dc.split( "=" );
+ dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
+ .get( ds[ 0 ] ).arithmeticMean() );
+ dc_reapp_counts_to_domain_lengths_stats.get( count ).addValue( domain_length_stats_by_domain
+ .get( ds[ 1 ] ).arithmeticMean() );
+ }
+ if ( count > 1 ) {
+ more_than_once.add( dc );
+ if ( protein_length_stats_by_dc != null ) {
+ final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
+ for( final double element : s.getData() ) {
+ gained_multiple_times_lengths_stats.addValue( element );
+ }
+ }
+ if ( domain_number_stats_by_dc != null ) {
+ final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
+ for( final double element : s.getData() ) {
+ gained_multiple_times_domain_count_stats.addValue( element );
+ }
+ }
+ if ( domain_length_stats_by_domain != null ) {
+ final String[] ds = dc.split( "=" );
+ final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
+ final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
+ for( final double element : s0.getData() ) {
+ gained_multiple_times_domain_length_sum += element;
+ ++gained_multiple_times_domain_length_count;
+ }
+ for( final double element : s1.getData() ) {
+ gained_multiple_times_domain_length_sum += element;
+ ++gained_multiple_times_domain_length_count;
+ }
+ }
+ }
+ else {
+ if ( protein_length_stats_by_dc != null ) {
+ final DescriptiveStatistics s = protein_length_stats_by_dc.get( dc );
+ for( final double element : s.getData() ) {
+ gained_once_lengths_stats.addValue( element );
+ }
+ }
+ if ( domain_number_stats_by_dc != null ) {
+ final DescriptiveStatistics s = domain_number_stats_by_dc.get( dc );
+ for( final double element : s.getData() ) {
+ gained_once_domain_count_stats.addValue( element );
+ }
+ }
+ if ( domain_length_stats_by_domain != null ) {
+ final String[] ds = dc.split( "=" );
+ final DescriptiveStatistics s0 = domain_length_stats_by_domain.get( ds[ 0 ] );
+ final DescriptiveStatistics s1 = domain_length_stats_by_domain.get( ds[ 1 ] );
+ for( final double element : s0.getData() ) {
+ gained_once_domain_length_sum += element;
+ ++gained_once_domain_length_count;
+ }
+ for( final double element : s1.getData() ) {
+ gained_once_domain_length_sum += element;
+ ++gained_once_domain_length_count;
+ }
+ }
+ }
+ }
+ final Set<Integer> histogram_keys = histogram.keySet();
+ for( final Integer histogram_key : histogram_keys ) {
+ final int count = histogram.get( histogram_key );
+ final StringBuilder dc = domain_lists.get( histogram_key );
+ out_counts.write( histogram_key + "\t" + count + ForesterUtil.LINE_SEPARATOR );
+ out_dc.write( histogram_key + "\t" + dc + ForesterUtil.LINE_SEPARATOR );
+ out_dc_for_go_mapping.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
+ final Object[] sorted = domain_lists_go.get( histogram_key ).toArray();
+ Arrays.sort( sorted );
+ for( final Object domain : sorted ) {
+ out_dc_for_go_mapping.write( domain + ForesterUtil.LINE_SEPARATOR );
+ }
+ out_dc_for_go_mapping_unique.write( "#" + histogram_key + ForesterUtil.LINE_SEPARATOR );
+ for( final String domain : domain_lists_go_unique.get( histogram_key ) ) {
+ out_dc_for_go_mapping_unique.write( domain + ForesterUtil.LINE_SEPARATOR );
+ }
+ }
+ out_counts.close();
+ out_dc.close();
+ out_dc_for_go_mapping.close();
+ out_dc_for_go_mapping_unique.close();
+ final SortedMap<String, Integer> lca_rank_counts = new TreeMap<String, Integer>();
+ final SortedMap<String, Integer> lca_ancestor_species_counts = new TreeMap<String, Integer>();
+ for( final String dc : more_than_once ) {
+ final List<PhylogenyNode> nodes = new ArrayList<PhylogenyNode>();
+ for( final PhylogenyNodeIterator it = local_phylogeny_l.iteratorExternalForward(); it.hasNext(); ) {
+ final PhylogenyNode n = it.next();
+ if ( n.getNodeData().getBinaryCharacters().getGainedCharacters().contains( dc ) ) {
+ nodes.add( n );
+ }
+ }
+ for( int i = 0; i < ( nodes.size() - 1 ); ++i ) {
+ for( int j = i + 1; j < nodes.size(); ++j ) {
+ final PhylogenyNode lca = PhylogenyMethods.calculateLCA( nodes.get( i ), nodes.get( j ) );
+ String rank = "unknown";
+ if ( lca.getNodeData().isHasTaxonomy()
+ && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getRank() ) ) {
+ rank = lca.getNodeData().getTaxonomy().getRank();
+ }
+ addToCountMap( lca_rank_counts, rank );
+ String lca_species;
+ if ( lca.getNodeData().isHasTaxonomy()
+ && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getScientificName() ) ) {
+ lca_species = lca.getNodeData().getTaxonomy().getScientificName();
+ }
+ else if ( lca.getNodeData().isHasTaxonomy()
+ && !ForesterUtil.isEmpty( lca.getNodeData().getTaxonomy().getCommonName() ) ) {
+ lca_species = lca.getNodeData().getTaxonomy().getCommonName();
+ }
+ else {
+ lca_species = lca.getName();
+ }
+ addToCountMap( lca_ancestor_species_counts, lca_species );
+ }
+ }
+ }
+ final BufferedWriter out_for_rank_counts = new BufferedWriter( new FileWriter( outfilename_for_rank_counts ) );
+ final BufferedWriter out_for_ancestor_species_counts = new BufferedWriter( new FileWriter( outfilename_for_ancestor_species_counts ) );
+ ForesterUtil.map2writer( out_for_rank_counts, lca_rank_counts, "\t", ForesterUtil.LINE_SEPARATOR );
+ ForesterUtil.map2writer( out_for_ancestor_species_counts,
+ lca_ancestor_species_counts,
+ "\t",
+ ForesterUtil.LINE_SEPARATOR );
+ out_for_rank_counts.close();
+ out_for_ancestor_species_counts.close();
+ if ( !ForesterUtil.isEmpty( outfilename_for_protein_stats )
+ && ( ( domain_length_stats_by_domain != null ) || ( protein_length_stats_by_dc != null ) || ( domain_number_stats_by_dc != null ) ) ) {
+ final BufferedWriter w = new BufferedWriter( new FileWriter( outfilename_for_protein_stats ) );
+ w.write( "Domain Lengths: " );
+ w.write( "\n" );
+ if ( domain_length_stats_by_domain != null ) {
+ for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_lengths_stats
+ .entrySet() ) {
+ w.write( entry.getKey().toString() );
+ w.write( "\t" + entry.getValue().arithmeticMean() );
+ w.write( "\t" + entry.getValue().median() );
+ w.write( "\n" );
+ }
+ }
+ w.flush();
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Protein Lengths: " );
+ w.write( "\n" );
+ if ( protein_length_stats_by_dc != null ) {
+ for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_protein_length_stats
+ .entrySet() ) {
+ w.write( entry.getKey().toString() );
+ w.write( "\t" + entry.getValue().arithmeticMean() );
+ w.write( "\t" + entry.getValue().median() );
+ w.write( "\n" );
+ }
+ }
+ w.flush();
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Number of domains: " );
+ w.write( "\n" );
+ if ( domain_number_stats_by_dc != null ) {
+ for( final Entry<Integer, DescriptiveStatistics> entry : dc_reapp_counts_to_domain_number_stats
+ .entrySet() ) {
+ w.write( entry.getKey().toString() );
+ w.write( "\t" + entry.getValue().arithmeticMean() );
+ w.write( "\t" + entry.getValue().median() );
+ w.write( "\n" );
+ }
+ }
+ w.flush();
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Gained once, domain lengths:" );
+ w.write( "\n" );
+ w.write( "N: " + gained_once_domain_length_count );
+ w.write( "\n" );
+ w.write( "Avg: " + ( ( double ) gained_once_domain_length_sum / gained_once_domain_length_count ) );
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Gained multiple times, domain lengths:" );
+ w.write( "\n" );
+ w.write( "N: " + gained_multiple_times_domain_length_count );
+ w.write( "\n" );
+ w.write( "Avg: "
+ + ( ( double ) gained_multiple_times_domain_length_sum / gained_multiple_times_domain_length_count ) );
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Gained once, protein lengths:" );
+ w.write( "\n" );
+ w.write( gained_once_lengths_stats.toString() );
+ gained_once_lengths_stats = null;
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Gained once, domain counts:" );
+ w.write( "\n" );
+ w.write( gained_once_domain_count_stats.toString() );
+ gained_once_domain_count_stats = null;
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Gained multiple times, protein lengths:" );
+ w.write( "\n" );
+ w.write( gained_multiple_times_lengths_stats.toString() );
+ gained_multiple_times_lengths_stats = null;
+ w.write( "\n" );
+ w.write( "\n" );
+ w.write( "Gained multiple times, domain counts:" );
+ w.write( "\n" );
+ w.write( gained_multiple_times_domain_count_stats.toString() );
+ w.flush();
+ w.close();
+ }
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "Failure to write: " + e );
+ }
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch counts to ["
+ + outfilename_for_counts + "]" );
+ ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote independent domain combination gains fitch lists to ["
+ + outfilename_for_dc + "]" );
+ ForesterUtil.programMessage( surfacing.PRG_NAME,
+ "Wrote independent domain combination gains fitch lists to (for GO mapping) ["
+ + outfilename_for_dc_for_go_mapping + "]" );
+ ForesterUtil.programMessage( surfacing.PRG_NAME,
+ "Wrote independent domain combination gains fitch lists to (for GO mapping, unique) ["
+ + outfilename_for_dc_for_go_mapping_unique + "]" );
+ }
+
+ private static SortedSet<String> collectAllDomainsChangedOnSubtree( final PhylogenyNode subtree_root,
+ final boolean get_gains ) {
+ final SortedSet<String> domains = new TreeSet<String>();
+ for( final PhylogenyNode descendant : PhylogenyMethods.getAllDescendants( subtree_root ) ) {
+ final BinaryCharacters chars = descendant.getNodeData().getBinaryCharacters();
+ if ( get_gains ) {
+ domains.addAll( chars.getGainedCharacters() );
+ }
+ else {
+ domains.addAll( chars.getLostCharacters() );
+ }
+ }
+ return domains;
+ }
+
+ private static File createBaseDirForPerNodeDomainFiles( final String base_dir,
+ final boolean domain_combinations,
+ final CharacterStateMatrix.GainLossStates state,
+ final String outfile ) {
+ File per_node_go_mapped_domain_gain_loss_files_base_dir = new File( new File( outfile ).getParent()
+ + ForesterUtil.FILE_SEPARATOR + base_dir );
+ if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
+ per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
+ }
+ if ( domain_combinations ) {
+ per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
+ + ForesterUtil.FILE_SEPARATOR + "DC" );
+ }
+ else {
+ per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
+ + ForesterUtil.FILE_SEPARATOR + "DOMAINS" );
+ }
+ if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
+ per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
+ }
+ if ( state == GainLossStates.GAIN ) {
+ per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
+ + ForesterUtil.FILE_SEPARATOR + "GAINS" );
+ }
+ else if ( state == GainLossStates.LOSS ) {
+ per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
+ + ForesterUtil.FILE_SEPARATOR + "LOSSES" );
+ }
+ else {
+ per_node_go_mapped_domain_gain_loss_files_base_dir = new File( per_node_go_mapped_domain_gain_loss_files_base_dir
+ + ForesterUtil.FILE_SEPARATOR + "PRESENT" );
+ }
+ if ( !per_node_go_mapped_domain_gain_loss_files_base_dir.exists() ) {
+ per_node_go_mapped_domain_gain_loss_files_base_dir.mkdir();
+ }
+ return per_node_go_mapped_domain_gain_loss_files_base_dir;
+ }
+
+ private static SortedSet<BinaryDomainCombination> createSetOfAllBinaryDomainCombinationsPerGenome( final GenomeWideCombinableDomains gwcd ) {
+ final SortedMap<String, CombinableDomains> cds = gwcd.getAllCombinableDomainsIds();
+ final SortedSet<BinaryDomainCombination> binary_combinations = new TreeSet<BinaryDomainCombination>();
+ for( final String domain_id : cds.keySet() ) {
+ final CombinableDomains cd = cds.get( domain_id );
+ binary_combinations.addAll( cd.toBinaryDomainCombinations() );
+ }
+ return binary_combinations;
+ }
+
+ private static List<String> splitDomainCombination( final String dc ) {
+ final String[] s = dc.split( "=" );
+ if ( s.length != 2 ) {
+ ForesterUtil.printErrorMessage( surfacing.PRG_NAME, "Stringyfied domain combination has illegal format: "
+ + dc );
+ System.exit( -1 );
+ }
+ final List<String> l = new ArrayList<String>( 2 );
+ l.add( s[ 0 ] );
+ l.add( s[ 1 ] );
+ return l;
+ }
+
+ private static void writeAllEncounteredPfamsToFile( final Map<String, List<GoId>> domain_id_to_go_ids_map,
+ final Map<GoId, GoTerm> go_id_to_term_map,
+ final String outfile_name,
+ final SortedSet<String> all_pfams_encountered ) {
+ final File all_pfams_encountered_file = new File( outfile_name + surfacing.ALL_PFAMS_ENCOUNTERED_SUFFIX );
+ final File all_pfams_encountered_with_go_annotation_file = new File( outfile_name
+ + surfacing.ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX );
+ final File encountered_pfams_summary_file = new File( outfile_name + surfacing.ENCOUNTERED_PFAMS_SUMMARY_SUFFIX );
+ int biological_process_counter = 0;
+ int cellular_component_counter = 0;
+ int molecular_function_counter = 0;
+ int pfams_with_mappings_counter = 0;
+ int pfams_without_mappings_counter = 0;
+ int pfams_without_mappings_to_bp_or_mf_counter = 0;
+ int pfams_with_mappings_to_bp_or_mf_counter = 0;
+ try {
+ final Writer all_pfams_encountered_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_file ) );
+ final Writer all_pfams_encountered_with_go_annotation_writer = new BufferedWriter( new FileWriter( all_pfams_encountered_with_go_annotation_file ) );
+ final Writer summary_writer = new BufferedWriter( new FileWriter( encountered_pfams_summary_file ) );
+ summary_writer.write( "# Pfam to GO mapping summary" );
+ summary_writer.write( ForesterUtil.LINE_SEPARATOR );
+ summary_writer.write( "# Actual summary is at the end of this file." );
+ summary_writer.write( ForesterUtil.LINE_SEPARATOR );
+ summary_writer.write( "# Encountered Pfams without a GO mapping:" );
+ summary_writer.write( ForesterUtil.LINE_SEPARATOR );
+ for( final String pfam : all_pfams_encountered ) {
+ all_pfams_encountered_writer.write( pfam );
+ all_pfams_encountered_writer.write( ForesterUtil.LINE_SEPARATOR );
+ final String domain_id = new String( pfam );
+ if ( domain_id_to_go_ids_map.containsKey( domain_id ) ) {
+ ++pfams_with_mappings_counter;
+ all_pfams_encountered_with_go_annotation_writer.write( pfam );
+ all_pfams_encountered_with_go_annotation_writer.write( ForesterUtil.LINE_SEPARATOR );
+ final List<GoId> go_ids = domain_id_to_go_ids_map.get( domain_id );
+ boolean maps_to_bp = false;
+ boolean maps_to_cc = false;
+ boolean maps_to_mf = false;
+ for( final GoId go_id : go_ids ) {
+ final GoTerm go_term = go_id_to_term_map.get( go_id );
+ if ( go_term.getGoNameSpace().isBiologicalProcess() ) {
+ maps_to_bp = true;
+ }
+ else if ( go_term.getGoNameSpace().isCellularComponent() ) {
+ maps_to_cc = true;
+ }
+ else if ( go_term.getGoNameSpace().isMolecularFunction() ) {
+ maps_to_mf = true;
+ }
+ }
+ if ( maps_to_bp ) {
+ ++biological_process_counter;
+ }
+ if ( maps_to_cc ) {
+ ++cellular_component_counter;
+ }
+ if ( maps_to_mf ) {
+ ++molecular_function_counter;
+ }
+ if ( maps_to_bp || maps_to_mf ) {
+ ++pfams_with_mappings_to_bp_or_mf_counter;
+ }
+ else {
+ ++pfams_without_mappings_to_bp_or_mf_counter;
+ }
+ }
+ else {
+ ++pfams_without_mappings_to_bp_or_mf_counter;
+ ++pfams_without_mappings_counter;
+ summary_writer.write( pfam );
+ summary_writer.write( ForesterUtil.LINE_SEPARATOR );
+ }