in progress
[jalview.git] / forester / java / src / org / forester / application / surfacing.java
index 9d2c987..4218c4b 100644 (file)
@@ -33,9 +33,11 @@ import java.io.IOException;
 import java.io.Writer;
 import java.util.ArrayList;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 import java.util.SortedMap;
 import java.util.SortedSet;
@@ -54,6 +56,7 @@ import org.forester.go.PfamToGoMapping;
 import org.forester.go.PfamToGoParser;
 import org.forester.io.parsers.HmmscanPerDomainTableParser;
 import org.forester.io.parsers.HmmscanPerDomainTableParser.INDIVIDUAL_SCORE_CUTOFF;
+import org.forester.io.parsers.util.ParserUtils;
 import org.forester.io.writers.PhylogenyWriter;
 import org.forester.phylogeny.Phylogeny;
 import org.forester.phylogeny.PhylogenyMethods;
@@ -86,6 +89,7 @@ import org.forester.surfacing.Protein;
 import org.forester.surfacing.ProteinCountsBasedPairwiseDomainSimilarityCalculator;
 import org.forester.surfacing.Species;
 import org.forester.surfacing.SurfacingUtil;
+import org.forester.util.BasicDescriptiveStatistics;
 import org.forester.util.BasicTable;
 import org.forester.util.BasicTableParser;
 import org.forester.util.CommandLineArguments;
@@ -95,6 +99,7 @@ import org.forester.util.ForesterUtil;
 
 public class surfacing {
 
+    private static final int                                  MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING                           = 1000;
     public final static String                                DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS                    = "graph_analysis_out";
     public final static String                                DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS                = "_dc.dot";
     public final static String                                PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS = "_fitch_present_dc.dot";
@@ -229,8 +234,8 @@ public class surfacing {
     final static private String                               INPUT_SPECIES_TREE_OPTION                                              = "species_tree";
     final static private String                               SEQ_EXTRACT_OPTION                                                     = "prot_extract";
     final static private char                                 SEPARATOR_FOR_INPUT_VALUES                                             = '#';
-    final static private String                               PRG_VERSION                                                            = "2.100";
-    final static private String                               PRG_DATE                                                               = "2011.06.17";
+    final static private String                               PRG_VERSION                                                            = "2.210";
+    final static private String                               PRG_DATE                                                               = "2011.12.08";
     final static private String                               E_MAIL                                                                 = "czmasek@burnham.org";
     final static private String                               WWW                                                                    = "www.phylosoft.org/forester/applications/surfacing";
     final static private boolean                              IGNORE_DUFS_DEFAULT                                                    = true;
@@ -272,6 +277,7 @@ public class surfacing {
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX                   = "_indep_dc_gains_fitch_counts.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX                       = "_indep_dc_gains_fitch_lists.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX        = "_indep_dc_gains_fitch_lists_for_go_mapping.txt";
+    public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique.txt";
 
     private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
                                                                  final String[][] input_file_properties,
@@ -486,7 +492,7 @@ public class surfacing {
             }
             try {
                 final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
-                        .create( intree_file, ForesterUtil.createParserDependingOnFileType( intree_file, true ) );
+                        .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
                 if ( p_array.length < 1 ) {
                     ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
                             + "] does not contain any phylogeny in phyloXML format" );
@@ -1635,7 +1641,7 @@ public class surfacing {
             }
         } // if ( perform_pwc ) {
         System.out.println();
-        html_desc.append( "<tr><td>Command line:</td><td>" + cla.getCommandLineArgsAsString() + "</td></tr>" + nl );
+        html_desc.append( "<tr><td>Command line:</td><td>\n" + cla.getCommandLineArgsAsString() + "\n</td></tr>" + nl );
         System.out.println( "Command line                : " + cla.getCommandLineArgsAsString() );
         BufferedWriter[] query_domains_writer_ary = null;
         List<DomainId>[] query_domain_ids_array = null;
@@ -1718,6 +1724,35 @@ public class surfacing {
         catch ( final IOException e2 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
         }
+        final DescriptiveStatistics protein_coverage_stats = new BasicDescriptiveStatistics();
+        final DescriptiveStatistics all_genomes_domains_per_potein_stats = new BasicDescriptiveStatistics();
+        final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo = new TreeMap<Integer, Integer>();
+        final SortedSet<String> domains_which_are_always_single = new TreeSet<String>();
+        final SortedSet<String> domains_which_are_sometimes_single_sometimes_not = new TreeSet<String>();
+        final SortedSet<String> domains_which_never_single = new TreeSet<String>();
+        BufferedWriter domains_per_potein_stats_writer = null;
+        try {
+            domains_per_potein_stats_writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR
+                    + output_file + "__domains_per_potein_stats.txt" ) );
+            domains_per_potein_stats_writer.write( "Genome" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( "Mean" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( "SD" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( "Median" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( "N" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( "Min" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( "Max" );
+            domains_per_potein_stats_writer.write( "\n" );
+        }
+        catch ( final IOException e3 ) {
+            e3.printStackTrace();
+        }
+        // Main loop:
         for( int i = 0; i < number_of_genomes; ++i ) {
             System.out.println();
             System.out.println( ( i + 1 ) + "/" + number_of_genomes );
@@ -1745,12 +1780,14 @@ public class surfacing {
                                                           input_file_properties[ i ][ 1 ],
                                                           filter,
                                                           filter_type,
-                                                          ind_score_cutoff );
+                                                          ind_score_cutoff,
+                                                          true );
             }
             else {
                 parser = new HmmscanPerDomainTableParser( new File( input_file_properties[ i ][ 0 ] ),
                                                           input_file_properties[ i ][ 1 ],
-                                                          ind_score_cutoff );
+                                                          ind_score_cutoff,
+                                                          true );
             }
             if ( e_value_max >= 0.0 ) {
                 parser.setEValueMaximum( e_value_max );
@@ -1781,10 +1818,16 @@ public class surfacing {
                 System.out.println( "Domains ignored due to virus like id: " );
                 ForesterUtil.printCountingMap( parser.getDomainsIgnoredDueToVirusLikeIdCountsMap() );
             }
+            final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
+            protein_coverage_stats.addValue( coverage );
             System.out.println( "Number of proteins encountered                 : " + parser.getProteinsEncountered() );
             log( "Number of proteins encountered                 : " + parser.getProteinsEncountered(), log_writer );
             System.out.println( "Number of proteins stored                      : " + protein_list.size() );
             log( "Number of proteins stored                      : " + protein_list.size(), log_writer );
+            System.out.println( "Coverage                                       : "
+                    + ForesterUtil.roundToInt( 100.0 * coverage ) + "%" );
+            log( "Coverage                                       : " + ForesterUtil.roundToInt( 100.0 * coverage )
+                    + "%", log_writer );
             System.out.println( "Domains encountered                            : " + parser.getDomainsEncountered() );
             log( "Domains encountered                            : " + parser.getDomainsEncountered(), log_writer );
             System.out.println( "Domains stored                                 : " + parser.getDomainsStored() );
@@ -1864,6 +1907,14 @@ public class surfacing {
             catch ( final IOException e ) {
                 ForesterUtil.fatalError( surfacing.PRG_NAME, e.toString() );
             }
+            SurfacingUtil.domainsPerProteinsStatistics( input_file_properties[ i ][ 1 ],
+                                                        protein_list,
+                                                        all_genomes_domains_per_potein_stats,
+                                                        all_genomes_domains_per_potein_histo,
+                                                        domains_which_are_always_single,
+                                                        domains_which_are_sometimes_single_sometimes_not,
+                                                        domains_which_never_single,
+                                                        domains_per_potein_stats_writer );
             gwcd_list.add( BasicGenomeWideCombinableDomains
                     .createInstance( protein_list,
                                      ignore_combination_with_same,
@@ -1914,19 +1965,48 @@ public class surfacing {
             }
             System.gc();
         } // for( int i = 0; i < number_of_genomes; ++i ) {
+        ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
+                + per_genome_domain_promiscuity_statistics_file );
+        //
         try {
-            per_genome_domain_promiscuity_statistics_writer.flush();
-            per_genome_domain_promiscuity_statistics_writer.close();
-            dc_data_writer.flush();
-            dc_data_writer.close();
-            log_writer.flush();
-            log_writer.close();
+            domains_per_potein_stats_writer.write( "ALL" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.arithmeticMean() + "" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.sampleStandardDeviation() + "" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.median() + "" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getN() + "" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMin() + "" );
+            domains_per_potein_stats_writer.write( "\t" );
+            domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMax() + "" );
+            domains_per_potein_stats_writer.write( "\n" );
+            domains_per_potein_stats_writer.close();
+            printOutPercentageOfMultidomainProteins( all_genomes_domains_per_potein_histo, log_writer );
+            ForesterUtil.map2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
+                    + "__all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
+            ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
+                    + "__domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
+            ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
+                    + "__domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
+            ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
+                    + "__domains_always_combined.txt" ), domains_which_never_single, "\n" );
+            ForesterUtil.programMessage( PRG_NAME,
+                                         "Average of proteins with a least one domain assigned: "
+                                                 + ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
+                                                 + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)" );
+            ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: " + 100
+                    * protein_coverage_stats.getMin() + "%-" + 100 * protein_coverage_stats.getMax() + "%" );
+            log( "Average of prot with a least one dom assigned  : " + ( 100 * protein_coverage_stats.arithmeticMean() )
+                    + "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
+            log( "Range of prot with a least one dom assigned    : " + 100 * protein_coverage_stats.getMin() + "%-"
+                    + 100 * protein_coverage_stats.getMax() + "%", log_writer );
         }
         catch ( final IOException e2 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
         }
-        ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
-                + per_genome_domain_promiscuity_statistics_file );
         if ( query_domains_writer_ary != null ) {
             for( int j = 0; j < query_domain_ids_array.length; j++ ) {
                 try {
@@ -1937,6 +2017,14 @@ public class surfacing {
                 }
             }
         }
+        try {
+            per_genome_domain_promiscuity_statistics_writer.close();
+            dc_data_writer.close();
+            log_writer.close();
+        }
+        catch ( final IOException e2 ) {
+            ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
+        }
         if ( PERFORM_DOMAIN_LENGTH_ANALYSIS ) {
             try {
                 SurfacingUtil.executeDomainLengthAnalysis( input_file_properties,
@@ -1987,11 +2075,19 @@ public class surfacing {
         DescriptiveStatistics pw_stats = null;
         try {
             String my_outfile = output_file.toString();
-            if ( !my_outfile.endsWith( ".html" ) ) {
+            Map<Character, Writer> split_writers = null;
+            Writer writer = null;
+            if ( similarities.size() > MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING ) {
+                if ( my_outfile.endsWith( ".html" ) ) {
+                    my_outfile = my_outfile.substring( 0, my_outfile.length() - 5 );
+                }
+                split_writers = new HashMap<Character, Writer>();
+                createSplitWriters( out_dir, my_outfile, split_writers );
+            }
+            else if ( !my_outfile.endsWith( ".html" ) ) {
                 my_outfile += ".html";
+                writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) );
             }
-            final Writer writer = new BufferedWriter( new FileWriter( out_dir == null ? my_outfile : out_dir
-                    + ForesterUtil.FILE_SEPARATOR + my_outfile ) );
             List<Species> species_order = null;
             if ( species_matrix ) {
                 species_order = new ArrayList<Species>();
@@ -2011,6 +2107,7 @@ public class surfacing {
                     .writeDomainSimilaritiesToFile( html_desc,
                                                     new StringBuilder( number_of_genomes + " genomes" ),
                                                     writer,
+                                                    split_writers,
                                                     similarities,
                                                     number_of_genomes == 2,
                                                     species_order,
@@ -2277,6 +2374,76 @@ public class surfacing {
         System.out.println();
     }
 
+    private static void createSplitWriters( final File out_dir,
+                                            final String my_outfile,
+                                            final Map<Character, Writer> split_writers ) throws IOException {
+        split_writers.put( 'a', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_A.html" ) ) );
+        split_writers.put( 'b', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_B.html" ) ) );
+        split_writers.put( 'c', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_C.html" ) ) );
+        split_writers.put( 'd', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_D.html" ) ) );
+        split_writers.put( 'e', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_E.html" ) ) );
+        split_writers.put( 'f', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_F.html" ) ) );
+        split_writers.put( 'g', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_G.html" ) ) );
+        split_writers.put( 'h', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_H.html" ) ) );
+        split_writers.put( 'i', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_I.html" ) ) );
+        split_writers.put( 'j', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_J.html" ) ) );
+        split_writers.put( 'k', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_K.html" ) ) );
+        split_writers.put( 'l', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_L.html" ) ) );
+        split_writers.put( 'm', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_M.html" ) ) );
+        split_writers.put( 'n', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_N.html" ) ) );
+        split_writers.put( 'o', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_O.html" ) ) );
+        split_writers.put( 'p', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_P.html" ) ) );
+        split_writers.put( 'q', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_Q.html" ) ) );
+        split_writers.put( 'r', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_R.html" ) ) );
+        split_writers.put( 's', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_S.html" ) ) );
+        split_writers.put( 't', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_T.html" ) ) );
+        split_writers.put( 'u', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_U.html" ) ) );
+        split_writers.put( 'v', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_V.html" ) ) );
+        split_writers.put( 'w', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_W.html" ) ) );
+        split_writers.put( 'x', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_X.html" ) ) );
+        split_writers.put( 'y', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_Y.html" ) ) );
+        split_writers.put( 'z', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_Z.html" ) ) );
+        split_writers.put( '0', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
+                + "_domains_0.html" ) ) );
+    }
+
+    private static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
+                                                                 final Writer log_writer ) {
+        int sum = 0;
+        for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
+            sum += entry.getValue();
+        }
+        final double percentage = 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) / sum;
+        ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
+        log( "Percentage of multidomain proteins:            : " + percentage + "%", log_writer );
+    }
+
     private static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
                                                               final String[][] input_file_properties ) {
         final String[] genomes = new String[ input_file_properties.length ];
@@ -2293,12 +2460,21 @@ public class surfacing {
             final PhylogenyNode n = it.next();
             if ( ForesterUtil.isEmpty( n.getName() ) ) {
                 if ( n.getNodeData().isHasTaxonomy()
+                        && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
+                    n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
+                }
+                else if ( n.getNodeData().isHasTaxonomy()
                         && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
                     n.setName( n.getNodeData().getTaxonomy().getScientificName() );
                 }
+                else if ( n.getNodeData().isHasTaxonomy()
+                        && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
+                    n.setName( n.getNodeData().getTaxonomy().getCommonName() );
+                }
                 else {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                             "node without both name and scientific taxonomy name found" );
+                    ForesterUtil
+                            .fatalError( surfacing.PRG_NAME,
+                                         "node with no name, scientific name, common name, or taxonomy code present" );
                 }
             }
         }