inprogress
[jalview.git] / forester / java / src / org / forester / application / surfacing.java
index b7a44d7..fc98702 100644 (file)
@@ -22,7 +22,7 @@
 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
 //
 // Contact: phylosoft @ gmail . com
-// WWW: www.phylosoft.org/forester
+// WWW: https://sites.google.com/site/cmzmasek/home/software/forester
 
 package org.forester.application;
 
@@ -44,9 +44,7 @@ import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
-import org.forester.evoinference.distance.NeighborJoining;
 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
-import org.forester.evoinference.matrix.distance.DistanceMatrix;
 import org.forester.go.GoId;
 import org.forester.go.GoNameSpace;
 import org.forester.go.GoTerm;
@@ -56,8 +54,8 @@ import org.forester.go.PfamToGoMapping;
 import org.forester.go.PfamToGoParser;
 import org.forester.io.parsers.HmmscanPerDomainTableParser;
 import org.forester.io.parsers.HmmscanPerDomainTableParser.INDIVIDUAL_SCORE_CUTOFF;
+import org.forester.io.parsers.phyloxml.PhyloXmlUtil;
 import org.forester.io.parsers.util.ParserUtils;
-import org.forester.io.writers.PhylogenyWriter;
 import org.forester.phylogeny.Phylogeny;
 import org.forester.phylogeny.PhylogenyMethods;
 import org.forester.phylogeny.PhylogenyNode;
@@ -65,7 +63,6 @@ import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
 import org.forester.protein.BinaryDomainCombination;
 import org.forester.protein.Domain;
-import org.forester.protein.DomainId;
 import org.forester.protein.Protein;
 import org.forester.species.BasicSpecies;
 import org.forester.species.Species;
@@ -190,6 +187,9 @@ public class surfacing {
     final static private String                               MAX_ALLOWED_OVERLAP_OPTION                                                    = "mo";
     final static private String                               NO_ENGULFING_OVERLAP_OPTION                                                   = "no_eo";
     final static private String                               IGNORE_COMBINATION_WITH_SAME_OPTION                                           = "ignore_self_comb";
+    final static private String                               PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION                                       = "dc_regain_stats";
+    final static private String                               DA_ANALYSIS_OPTION                                                            = "DA_analyis";
+    final static private String                               USE_LAST_IN_FITCH_OPTION                                                      = "last";
     final static private String                               PAIRWISE_DOMAIN_COMPARISONS_PREFIX                                            = "pwc_";
     final static private String                               PAIRWISE_DOMAIN_COMPARISONS_OPTION                                            = "pwc";
     final static private String                               OUTPUT_FILE_OPTION                                                            = "o";
@@ -219,24 +219,14 @@ public class surfacing {
                                                                                                                                                     + ForesterConstants.PHYLO_XML_SUFFIX;
     final static private String                               NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX                  = "_bin_combinations_NJ"
                                                                                                                                                     + ForesterConstants.PHYLO_XML_SUFFIX;
-    final static private String                               JACKNIFE_OPTION                                                               = "jack";
-    final static private String                               JACKNIFE_RANDOM_SEED_OPTION                                                   = "seed";
-    final static private String                               JACKNIFE_RATIO_OPTION                                                         = "jack_ratio";
-    private static final int                                  JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT                                        = 100;
-    final static private long                                 JACKNIFE_RANDOM_SEED_DEFAULT                                                  = 19;
-    final static private double                               JACKNIFE_RATIO_DEFAULT                                                        = 0.5;
-    //final static private String  INFER_SPECIES_TREES_OPTION                                             = "species_tree_inference";
-    final static private String                               INFERRED_SD_BASED_NJ_SPECIES_TREE_SUFFIX                                      = "_sd_nj.nh";
-    final static private String                               INFERRED_SBC_BASED_NJ_SPECIES_TREE_SUFFIX                                     = "_sbc_nj.nh";
     final static private String                               FILTER_POSITIVE_OPTION                                                        = "pos_filter";
     final static private String                               FILTER_NEGATIVE_OPTION                                                        = "neg_filter";
     final static private String                               FILTER_NEGATIVE_DOMAINS_OPTION                                                = "neg_dom_filter";
-    final static private String                               INPUT_FILES_FROM_FILE_OPTION                                                  = "input";
+    final static private String                               INPUT_GENOMES_FILE_OPTION                                                     = "genomes";
     final static private String                               INPUT_SPECIES_TREE_OPTION                                                     = "species_tree";
     final static private String                               SEQ_EXTRACT_OPTION                                                            = "prot_extract";
-    final static private char                                 SEPARATOR_FOR_INPUT_VALUES                                                    = '#';
-    final static private String                               PRG_VERSION                                                                   = "2.250";
-    final static private String                               PRG_DATE                                                                      = "2012.05.07";
+    final static private String                               PRG_VERSION                                                                   = "2.280";
+    final static private String                               PRG_DATE                                                                      = "130701";
     final static private String                               E_MAIL                                                                        = "czmasek@burnham.org";
     final static private String                               WWW                                                                           = "www.phylosoft.org/forester/applications/surfacing";
     final static private boolean                              IGNORE_DUFS_DEFAULT                                                           = true;
@@ -256,6 +246,7 @@ public class surfacing {
     private static final String                               PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX                                              = "_plus_minus_go_ids_all.txt";
     private static final String                               PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX                                          = "_plus_minus_go_ids_passing.txt";
     private static final String                               OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS                                           = "all_prot";
+    final static private String                               OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION                         = "all_prot_e";
     private static final boolean                              VERBOSE                                                                       = false;
     private static final String                               OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX              = "_fitch_dc_gains_counts";
     private static final String                               OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX                = "_fitch_dc_losses_counts";
@@ -274,7 +265,8 @@ public class surfacing {
     private static final String                               LOG_FILE_SUFFIX                                                               = "_log.txt";
     private static final String                               DATA_FILE_SUFFIX                                                              = "_domain_combination_data.txt";
     private static final String                               DATA_FILE_DESC                                                                = "#SPECIES\tPRTEIN_ID\tN_TERM_DOMAIN\tC_TERM_DOMAIN\tN_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tC_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tN_TERM_DOMAIN_COUNTS_PER_PROTEIN\tC_TERM_DOMAIN_COUNTS_PER_PROTEIN";
-    private static final INDIVIDUAL_SCORE_CUTOFF              INDIVIDUAL_SCORE_CUTOFF_DEFAULT                                               = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE;
+    private static final String                               WRITE_TO_NEXUS_OPTION                                                         = "nexus";
+    private static final INDIVIDUAL_SCORE_CUTOFF              INDIVIDUAL_SCORE_CUTOFF_DEFAULT                                               = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE;                                                                                                                                                      //TODO look at me! change?
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX                          = "_indep_dc_gains_fitch_counts.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX                              = "_indep_dc_gains_fitch_lists.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX               = "_indep_dc_gains_fitch_lists_for_go_mapping.txt";
@@ -286,7 +278,6 @@ public class surfacing {
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX                       = "_indep_dc_gains_fitch_lists_MAPPED.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX        = "_indep_dc_gains_fitch_lists_for_go_mapping_MAPPED.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique_MAPPED.txt";
-    private static final boolean                              PERFORM_DC_REGAIN_PROTEINS_STATS                                              = true;
 
     private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
                                                                  final String[][] input_file_properties,
@@ -372,8 +363,8 @@ public class surfacing {
         final Writer out = ForesterUtil.createBufferedWriter( output_file );
         final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
                 .listToSortedCountsMap( all_bin_domain_combinations_changed );
-        final SortedSet<DomainId> all_domains_in_combination_changed_more_than_once = new TreeSet<DomainId>();
-        final SortedSet<DomainId> all_domains_in_combination_changed_only_once = new TreeSet<DomainId>();
+        final SortedSet<String> all_domains_in_combination_changed_more_than_once = new TreeSet<String>();
+        final SortedSet<String> all_domains_in_combination_changed_only_once = new TreeSet<String>();
         int above_one = 0;
         int one = 0;
         for( final Object bdc_object : bdc_to_counts.keySet() ) {
@@ -441,7 +432,7 @@ public class surfacing {
                                                   final List<String> plus_minus_analysis_low_copy,
                                                   final List<GenomeWideCombinableDomains> gwcd_list,
                                                   final SortedMap<Species, List<Protein>> protein_lists_per_species,
-                                                  final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
+                                                  final Map<String, List<GoId>> domain_id_to_go_ids_map,
                                                   final Map<GoId, GoTerm> go_id_to_term_map,
                                                   final List<Object> plus_minus_analysis_numbers ) {
         final Set<String> all_spec = new HashSet<String>();
@@ -546,19 +537,6 @@ public class surfacing {
         return intrees;
     }
 
-    private static List<Phylogeny> inferSpeciesTrees( final File outfile, final List<DistanceMatrix> distances_list ) {
-        final NeighborJoining nj = NeighborJoining.createInstance();
-        final List<Phylogeny> phylogenies = nj.execute( distances_list );
-        final PhylogenyWriter w = new PhylogenyWriter();
-        try {
-            w.toNewHampshire( phylogenies, true, true, outfile, ";" );
-        }
-        catch ( final IOException e ) {
-            ForesterUtil.fatalError( PRG_NAME, "failed to write to outfile [" + outfile + "]: " + e.getMessage() );
-        }
-        return phylogenies;
-    }
-
     private static void log( final String msg, final Writer w ) {
         try {
             w.write( msg );
@@ -622,23 +600,27 @@ public class surfacing {
         allowed_options.add( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
         allowed_options.add( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION );
         allowed_options.add( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS );
-        allowed_options.add( JACKNIFE_OPTION );
-        allowed_options.add( JACKNIFE_RANDOM_SEED_OPTION );
-        allowed_options.add( JACKNIFE_RATIO_OPTION );
+        //allowed_options.add( JACKNIFE_OPTION );
+        // allowed_options.add( JACKNIFE_RANDOM_SEED_OPTION );
+        // allowed_options.add( JACKNIFE_RATIO_OPTION );
         allowed_options.add( INPUT_SPECIES_TREE_OPTION );
-        //allowed_options.add( INFER_SPECIES_TREES_OPTION );
         allowed_options.add( FILTER_POSITIVE_OPTION );
         allowed_options.add( FILTER_NEGATIVE_OPTION );
-        allowed_options.add( INPUT_FILES_FROM_FILE_OPTION );
+        allowed_options.add( INPUT_GENOMES_FILE_OPTION );
         allowed_options.add( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
         allowed_options.add( FILTER_NEGATIVE_DOMAINS_OPTION );
         allowed_options.add( IGNORE_VIRAL_IDS );
         allowed_options.add( SEQ_EXTRACT_OPTION );
+        allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
         allowed_options.add( SECONDARY_FEATURES_PARSIMONY_MAP_FILE );
         allowed_options.add( PLUS_MINUS_ANALYSIS_OPTION );
         allowed_options.add( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS );
         allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS );
         allowed_options.add( CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY );
+        allowed_options.add( WRITE_TO_NEXUS_OPTION );
+        allowed_options.add( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION );
+        allowed_options.add( DA_ANALYSIS_OPTION );
+        allowed_options.add( USE_LAST_IN_FITCH_OPTION );
         boolean ignore_dufs = surfacing.IGNORE_DUFS_DEFAULT;
         boolean ignore_combination_with_same = surfacing.IGNORE_COMBINATION_WITH_SAME_DEFAULLT;
         double e_value_max = surfacing.MAX_E_VALUE_DEFAULT;
@@ -647,6 +629,22 @@ public class surfacing {
         if ( dissallowed_options.length() > 0 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown option(s): " + dissallowed_options );
         }
+        boolean use_last_in_fitch_parsimony = false;
+        if ( cla.isOptionSet( USE_LAST_IN_FITCH_OPTION ) ) {
+            use_last_in_fitch_parsimony = true;
+        }
+        boolean write_to_nexus = false;
+        if ( cla.isOptionSet( WRITE_TO_NEXUS_OPTION ) ) {
+            write_to_nexus = true;
+        }
+        boolean perform_dc_regain_proteins_stats = false;
+        if ( cla.isOptionSet( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION ) ) {
+            perform_dc_regain_proteins_stats = true;
+        }
+        boolean da_analysis = false;
+        if ( cla.isOptionSet( DA_ANALYSIS_OPTION ) ) {
+            da_analysis = true;
+        }
         boolean output_binary_domain_combinationsfor_graph_analysis = false;
         if ( cla.isOptionSet( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS ) ) {
             output_binary_domain_combinationsfor_graph_analysis = true;
@@ -713,7 +711,7 @@ public class surfacing {
                         + error );
             }
             try {
-                final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, " " );
+                final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, ' ' );
                 individual_score_cutoffs = scores_table.getColumnsAsMapDouble( 0, 1 );
             }
             catch ( final IOException e ) {
@@ -805,30 +803,22 @@ public class surfacing {
                                         plus_minus_analysis_high_copy_target_species,
                                         plus_minus_analysis_high_low_copy_species,
                                         plus_minus_analysis_numbers );
-        File input_files_file = null;
-        String[] input_file_names_from_file = null;
-        if ( cla.isOptionSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
-            if ( !cla.isOptionValueSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input files file: -"
-                        + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
-            }
-            input_files_file = new File( cla.getOptionValue( surfacing.INPUT_FILES_FROM_FILE_OPTION ) );
-            final String msg = ForesterUtil.isReadableFile( input_files_file );
+        File input_genomes_file = null;
+        if ( cla.isOptionSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
+            if ( !cla.isOptionValueSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
+                ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input genomes file: -"
+                        + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
+            }
+            input_genomes_file = new File( cla.getOptionValue( surfacing.INPUT_GENOMES_FILE_OPTION ) );
+            final String msg = ForesterUtil.isReadableFile( input_genomes_file );
             if ( !ForesterUtil.isEmpty( msg ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + input_files_file + "\": " + msg );
-            }
-            try {
-                input_file_names_from_file = ForesterUtil.file2array( input_files_file );
-            }
-            catch ( final IOException e ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read from \"" + input_files_file + "\": " + e );
+                ForesterUtil
+                        .fatalError( surfacing.PRG_NAME, "can not read from \"" + input_genomes_file + "\": " + msg );
             }
         }
-        if ( ( cla.getNumberOfNames() < 1 )
-                && ( ( input_file_names_from_file == null ) || ( input_file_names_from_file.length < 1 ) ) ) {
-            ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                     "No hmmpfam output file indicated is input: use comand line directly or "
-                                             + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
+        else {
+            ForesterUtil.fatalError( surfacing.PRG_NAME, "no input genomes file given: "
+                    + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
         }
         DomainSimilarity.DomainSimilarityScoring scoring = SCORING_DEFAULT;
         if ( cla.isOptionSet( surfacing.SCORING_OPTION ) ) {
@@ -866,8 +856,18 @@ public class surfacing {
             species_matrix = true;
         }
         boolean output_protein_lists_for_all_domains = false;
+        double output_list_of_all_proteins_per_domain_e_value_max = -1;
         if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS ) ) {
             output_protein_lists_for_all_domains = true;
+            if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION ) ) {
+                try {
+                    output_list_of_all_proteins_per_domain_e_value_max = cla
+                            .getOptionValueAsDouble( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
+                }
+                catch ( final Exception e ) {
+                    ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for per domain E-value maximum" );
+                }
+            }
         }
         Detailedness detailedness = DETAILEDNESS_DEFAULT;
         if ( cla.isOptionSet( surfacing.DETAILEDNESS_OPTION ) ) {
@@ -992,8 +992,6 @@ public class surfacing {
                 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.HTML;
             }
             else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML ) ) {
-                // domain_similarity_print_option =
-                // DomainSimilarity.PRINT_OPTION.SIMPLE_HTML;
                 ForesterUtil.fatalError( surfacing.PRG_NAME, "simple HTML output not implemented yet :(" );
             }
             else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED ) ) {
@@ -1036,13 +1034,7 @@ public class surfacing {
                         + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
             }
         }
-        String[][] input_file_properties = null;
-        if ( input_file_names_from_file != null ) {
-            input_file_properties = surfacing.processInputFileNames( input_file_names_from_file );
-        }
-        else {
-            input_file_properties = surfacing.processInputFileNames( cla.getNames() );
-        }
+        final String[][] input_file_properties = processInputGenomesFile( input_genomes_file );
         final int number_of_genomes = input_file_properties.length;
         if ( number_of_genomes < 2 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot analyze less than two files" );
@@ -1057,7 +1049,7 @@ public class surfacing {
                                                  automated_pairwise_comparison_suffix,
                                                  out_dir );
         for( int i = 0; i < number_of_genomes; i++ ) {
-            File dcc_outfile = new File( input_file_properties[ i ][ 0 ]
+            File dcc_outfile = new File( input_file_properties[ i ][ 1 ]
                     + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
             if ( out_dir != null ) {
                 dcc_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + dcc_outfile );
@@ -1065,7 +1057,7 @@ public class surfacing {
             SurfacingUtil.checkForOutputFileWriteability( dcc_outfile );
         }
         File pfam_to_go_file = null;
-        Map<DomainId, List<GoId>> domain_id_to_go_ids_map = null;
+        Map<String, List<GoId>> domain_id_to_go_ids_map = null;
         int domain_id_to_go_ids_count = 0;
         if ( cla.isOptionSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
             if ( !cla.isOptionValueSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
@@ -1163,67 +1155,9 @@ public class surfacing {
                 && ( number_of_genomes > 2 ) ) {
             domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
         }
-        boolean jacknifed_distances = false;
-        int jacknife_resamplings = JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT;
-        double jacknife_ratio = JACKNIFE_RATIO_DEFAULT;
-        long random_seed = JACKNIFE_RANDOM_SEED_DEFAULT;
-        if ( cla.isOptionSet( surfacing.JACKNIFE_OPTION ) ) {
-            if ( ( number_of_genomes < 3 ) || !perform_pwc ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use jacknife resampling analysis (-"
-                        + surfacing.JACKNIFE_OPTION + "[=<number of resamplings>]) without pairwise analyses ("
-                        + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
-                        + "=<suffix for pairwise comparison output files>)" );
-            }
-            jacknifed_distances = true;
-            if ( cla.isOptionHasAValue( surfacing.JACKNIFE_OPTION ) ) {
-                try {
-                    jacknife_resamplings = cla.getOptionValueAsInt( surfacing.JACKNIFE_OPTION );
-                }
-                catch ( final IOException e ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for number of resamplings" );
-                }
-                if ( jacknife_resamplings < 2 ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use less than 2 resamplings" );
-                }
-            }
-            if ( cla.isOptionSet( surfacing.JACKNIFE_RATIO_OPTION )
-                    && cla.isOptionHasAValue( surfacing.JACKNIFE_RATIO_OPTION ) ) {
-                try {
-                    jacknife_ratio = cla.getOptionValueAsDouble( surfacing.JACKNIFE_RATIO_OPTION );
-                }
-                catch ( final IOException e ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for jacknife ratio" );
-                }
-                if ( ( jacknife_ratio <= 0.0 ) || ( jacknife_ratio >= 1.0 ) ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use illegal value for jacknife ratio: "
-                            + jacknife_ratio );
-                }
-            }
-            if ( cla.isOptionSet( surfacing.JACKNIFE_RANDOM_SEED_OPTION )
-                    && cla.isOptionHasAValue( surfacing.JACKNIFE_RANDOM_SEED_OPTION ) ) {
-                try {
-                    random_seed = cla.getOptionValueAsLong( surfacing.JACKNIFE_RANDOM_SEED_OPTION );
-                }
-                catch ( final IOException e ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for random generator seed" );
-                }
-            }
-        }
-        //        boolean infer_species_trees = false;
-        //        if ( cla.isOptionSet( surfacing.INFER_SPECIES_TREES_OPTION ) ) {
-        //            if ( ( output_file == null ) || ( number_of_genomes < 3 )
-        //                    || ForesterUtil.isEmpty( automated_pairwise_comparison_suffix ) ) {
-        //                ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer species trees (-"
-        //                        + surfacing.INFER_SPECIES_TREES_OPTION + " without pairwise analyses ("
-        //                        + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
-        //                        + "=<suffix for pairwise comparison output files>)" );
-        //            }
-        //            infer_species_trees = true;
-        //        }
         File[] intree_files = null;
         Phylogeny[] intrees = null;
         if ( cla.isOptionSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
-            // TODO FIXME if jacknife.... maybe not
             if ( number_of_genomes < 3 ) {
                 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer gains and losses on input species trees (-"
                         + surfacing.INPUT_SPECIES_TREE_OPTION + " without pairwise analyses ("
@@ -1265,10 +1199,10 @@ public class surfacing {
             }
             radomize_fitch_parsimony = true;
         }
-        SortedSet<DomainId> filter = null;
+        SortedSet<String> filter = null;
         if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
                 || ( negative_domains_filter_file != null ) ) {
-            filter = new TreeSet<DomainId>();
+            filter = new TreeSet<String>();
             if ( positive_filter_file != null ) {
                 processFilter( positive_filter_file, filter );
             }
@@ -1279,7 +1213,7 @@ public class surfacing {
                 processFilter( negative_domains_filter_file, filter );
             }
         }
-        Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps = null;
+        Map<String, Set<String>>[] domain_id_to_secondary_features_maps = null;
         File[] secondary_features_map_files = null;
         final File domain_lengths_analysis_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
                 + DOMAIN_LENGTHS_ANALYSIS_SUFFIX );
@@ -1336,12 +1270,8 @@ public class surfacing {
                                              + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
         }
         System.out.println( "Output directory            : " + out_dir );
-        if ( input_file_names_from_file != null ) {
-            System.out.println( "Input files names from      : " + input_files_file + " ["
-                    + input_file_names_from_file.length + " input files]" );
-            html_desc.append( "<tr><td>Input files names from:</td><td>" + input_files_file + " ["
-                    + input_file_names_from_file.length + " input files]</td></tr>" + nl );
-        }
+        System.out.println( "Input genomes from          : " + input_genomes_file );
+        html_desc.append( "<tr><td>Input genomes from:</td><td>" + input_genomes_file + "</td></tr>" + nl );
         if ( positive_filter_file != null ) {
             final int filter_size = filter.size();
             System.out.println( "Positive protein filter     : " + positive_filter_file + " [" + filter_size
@@ -1388,6 +1318,11 @@ public class surfacing {
             System.out.println( "E-value maximum (inclusive) : " + e_value_max );
             html_desc.append( "<tr><td>E-value maximum (inclusive):</td><td>" + e_value_max + "</td></tr>" + nl );
         }
+        if ( output_protein_lists_for_all_domains ) {
+            System.out.println( "Domain E-value max          : " + output_list_of_all_proteins_per_domain_e_value_max );
+            html_desc.append( "<tr><td>Protein lists: E-value maximum per domain (inclusive):</td><td>"
+                    + output_list_of_all_proteins_per_domain_e_value_max + "</td></tr>" + nl );
+        }
         System.out.println( "Ignore DUFs                 : " + ignore_dufs );
         if ( ignore_virus_like_ids ) {
             System.out.println( "Ignore virus like ids       : " + ignore_virus_like_ids );
@@ -1426,19 +1361,34 @@ public class surfacing {
                     + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) + "</td></tr>"
                     + nl );
         }
+        System.out.println( "Use last in Fitch parimony  : " + use_last_in_fitch_parsimony );
+        html_desc.append( "<tr><td>Use last in Fitch parimon:</td><td>" + use_last_in_fitch_parsimony + "</td></tr>"
+                + nl );
+        System.out.println( "Write to Nexus files        : " + write_to_nexus );
+        html_desc.append( "<tr><td>Write to Nexus files:</td><td>" + write_to_nexus + "</td></tr>" + nl );
+        System.out.println( "DC regain prot stats        : " + perform_dc_regain_proteins_stats );
+        html_desc.append( "<tr><td>DC regain prot stats:</td><td>" + perform_dc_regain_proteins_stats + "</td></tr>"
+                + nl );
+        System.out.println( "DA analysis                 : " + da_analysis );
+        html_desc.append( "<tr><td>DA analysis :</td><td>" + da_analysis + "</td></tr>" + nl );
         System.out.print( "Domain counts sort order    : " );
+        html_desc.append( "<tr><td>Domain counts sort order:</td><td>" );
         switch ( dc_sort_order ) {
             case ALPHABETICAL_KEY_ID:
                 System.out.println( "alphabetical" );
+                html_desc.append( "alphabetical" + "</td></tr>" + nl );
                 break;
             case KEY_DOMAIN_COUNT:
                 System.out.println( "domain count" );
+                html_desc.append( "domain count" + "</td></tr>" + nl );
                 break;
             case KEY_DOMAIN_PROTEINS_COUNT:
                 System.out.println( "domain proteins count" );
+                html_desc.append( "domain proteins count" + "</td></tr>" + nl );
                 break;
             case COMBINATIONS_COUNT:
                 System.out.println( "domain combinations count" );
+                html_desc.append( "domain combinations count" + "</td></tr>" + nl );
                 break;
             default:
                 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for dc sort order" );
@@ -1602,19 +1552,6 @@ public class surfacing {
             }
             System.out.println();
             html_desc.append( "</td></tr>" + nl );
-            if ( jacknifed_distances ) {
-                html_desc.append( "<tr><td>Jacknife:</td><td>" + jacknife_resamplings + " resamplings</td></tr>" + nl );
-                html_desc.append( "<tr><td>Jacknife ratio:</td><td>" + ForesterUtil.round( jacknife_ratio, 2 )
-                        + "</td></tr>" + nl );
-                html_desc.append( "<tr><td>Jacknife random number seed:</td><td>" + random_seed + "</td></tr>" + nl );
-                System.out.println( "  Jacknife                  : " + jacknife_resamplings + " resamplings" );
-                System.out.println( "    Ratio                   : " + ForesterUtil.round( jacknife_ratio, 2 ) );
-                System.out.println( "    Random number seed      : " + random_seed );
-            }
-            //                if ( infer_species_trees ) {
-            //                    html_desc.append( "<tr><td>Infer species trees:</td><td>true</td></tr>" + nl );
-            //                    System.out.println( "  Infer species trees       : true" );
-            //                }
             if ( ( intrees != null ) && ( intrees.length > 0 ) ) {
                 for( final File intree_file : intree_files ) {
                     html_desc.append( "<tr><td>Intree for gain/loss parsimony analysis:</td><td>" + intree_file
@@ -1636,8 +1573,8 @@ public class surfacing {
                     if ( VERBOSE ) {
                         System.out.println();
                         System.out.println( "Domain ids to secondary features map:" );
-                        for( final DomainId domain_id : domain_id_to_secondary_features_maps[ i ].keySet() ) {
-                            System.out.print( domain_id.getId() );
+                        for( final String domain_id : domain_id_to_secondary_features_maps[ i ].keySet() ) {
+                            System.out.print( domain_id );
                             System.out.print( " => " );
                             for( final String sec : domain_id_to_secondary_features_maps[ i ].get( domain_id ) ) {
                                 System.out.print( sec );
@@ -1653,7 +1590,7 @@ public class surfacing {
         html_desc.append( "<tr><td>Command line:</td><td>\n" + cla.getCommandLineArgsAsString() + "\n</td></tr>" + nl );
         System.out.println( "Command line                : " + cla.getCommandLineArgsAsString() );
         BufferedWriter[] query_domains_writer_ary = null;
-        List<DomainId>[] query_domain_ids_array = null;
+        List<String>[] query_domain_ids_array = null;
         if ( query_domain_ids != null ) {
             final String[] query_domain_ids_str_array = query_domain_ids.split( "#" );
             query_domain_ids_array = new ArrayList[ query_domain_ids_str_array.length ];
@@ -1661,9 +1598,9 @@ public class surfacing {
             for( int i = 0; i < query_domain_ids_str_array.length; i++ ) {
                 String query_domain_ids_str = query_domain_ids_str_array[ i ];
                 final String[] query_domain_ids_str_ary = query_domain_ids_str.split( "~" );
-                final List<DomainId> query = new ArrayList<DomainId>();
+                final List<String> query = new ArrayList<String>();
                 for( final String element : query_domain_ids_str_ary ) {
-                    query.add( new DomainId( element ) );
+                    query.add( element );
                 }
                 query_domain_ids_array[ i ] = query;
                 query_domain_ids_str = query_domain_ids_str.replace( '~', '_' );
@@ -1688,8 +1625,8 @@ public class surfacing {
         if ( need_protein_lists_per_species ) {
             protein_lists_per_species = new TreeMap<Species, List<Protein>>();
         }
-        final List<GenomeWideCombinableDomains> gwcd_list = new ArrayList<GenomeWideCombinableDomains>( number_of_genomes );
-        final SortedSet<DomainId> all_domains_encountered = new TreeSet<DomainId>();
+        List<GenomeWideCombinableDomains> gwcd_list = new ArrayList<GenomeWideCombinableDomains>( number_of_genomes );
+        final SortedSet<String> all_domains_encountered = new TreeSet<String>();
         final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered = new TreeSet<BinaryDomainCombination>();
         List<BinaryDomainCombination> all_bin_domain_combinations_gained_fitch = null;
         List<BinaryDomainCombination> all_bin_domain_combinations_lost_fitch = null;
@@ -1742,7 +1679,7 @@ public class surfacing {
         BufferedWriter domains_per_potein_stats_writer = null;
         try {
             domains_per_potein_stats_writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR
-                    + output_file + "__domains_per_potein_stats.txt" ) );
+                    + output_file + "_domains_per_potein_stats.txt" ) );
             domains_per_potein_stats_writer.write( "Genome" );
             domains_per_potein_stats_writer.write( "\t" );
             domains_per_potein_stats_writer.write( "Mean" );
@@ -1764,17 +1701,21 @@ public class surfacing {
         Map<String, DescriptiveStatistics> protein_length_stats_by_dc = null;
         Map<String, DescriptiveStatistics> domain_number_stats_by_dc = null;
         final Map<String, DescriptiveStatistics> domain_length_stats_by_domain = new HashMap<String, DescriptiveStatistics>();
-        if ( PERFORM_DC_REGAIN_PROTEINS_STATS ) {
+        if ( perform_dc_regain_proteins_stats ) {
             protein_length_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
             domain_number_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
         }
         // Main loop:
+        final SortedMap<String, Set<String>> distinct_domain_architecutures_per_genome = new TreeMap<String, Set<String>>();
+        final SortedMap<String, Integer> distinct_domain_architecuture_counts = new TreeMap<String, Integer>();
         for( int i = 0; i < number_of_genomes; ++i ) {
             System.out.println();
             System.out.println( ( i + 1 ) + "/" + number_of_genomes );
             log( ( i + 1 ) + "/" + number_of_genomes, log_writer );
-            System.out.println( "Processing                                     : " + input_file_properties[ i ][ 0 ] );
-            log( "Genome                                         : " + input_file_properties[ i ][ 0 ], log_writer );
+            System.out.println( "Processing                                     : " + input_file_properties[ i ][ 1 ]
+                    + " [" + input_file_properties[ i ][ 0 ] + "]" );
+            log( "Genome                                         : " + input_file_properties[ i ][ 1 ] + " ["
+                    + input_file_properties[ i ][ 0 ] + "]", log_writer );
             HmmscanPerDomainTableParser parser = null;
             INDIVIDUAL_SCORE_CUTOFF ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF.NONE;
             if ( individual_score_cutoffs != null ) {
@@ -1836,6 +1777,14 @@ public class surfacing {
             }
             final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
             protein_coverage_stats.addValue( coverage );
+            int distinct_das = -1;
+            if ( da_analysis ) {
+                final String genome = input_file_properties[ i ][ 0 ];
+                distinct_das = SurfacingUtil.storeDomainArchitectures( genome,
+                                                                       distinct_domain_architecutures_per_genome,
+                                                                       protein_list,
+                                                                       distinct_domain_architecuture_counts );
+            }
             System.out.println( "Number of proteins encountered                 : " + parser.getProteinsEncountered() );
             log( "Number of proteins encountered                 : " + parser.getProteinsEncountered(), log_writer );
             System.out.println( "Number of proteins stored                      : " + protein_list.size() );
@@ -1889,6 +1838,10 @@ public class surfacing {
                 log( "Proteins ignored due to positive filter        : " + parser.getProteinsIgnoredDueToFilter(),
                      log_writer );
             }
+            if ( da_analysis ) {
+                System.out.println( "Distinct domain architectures stored           : " + distinct_das );
+                log( "Distinct domain architectures stored           : " + distinct_das, log_writer );
+            }
             System.out.println( "Time for processing                            : " + parser.getTime() + "ms" );
             log( "", log_writer );
             html_desc.append( "<tr><td>" + input_file_properties[ i ][ 0 ] + " [species: "
@@ -1908,10 +1861,6 @@ public class surfacing {
                         + parser.getProteinsIgnoredDueToFilter() );
             }
             html_desc.append( "</td></tr>" + nl );
-            // domain_partner_counts_array[ i ] =
-            // Methods.getDomainPartnerCounts( protein_domain_collections_array[
-            // i ],
-            // false, input_file_properties[ i ][ 1 ] );
             try {
                 int count = 0;
                 for( final Protein protein : protein_list ) {
@@ -1938,32 +1887,34 @@ public class surfacing {
                                                         domains_which_are_sometimes_single_sometimes_not,
                                                         domains_which_never_single,
                                                         domains_per_potein_stats_writer );
-            gwcd_list.add( BasicGenomeWideCombinableDomains
-                    .createInstance( protein_list,
-                                     ignore_combination_with_same,
-                                     new BasicSpecies( input_file_properties[ i ][ 1 ] ),
-                                     domain_id_to_go_ids_map,
-                                     dc_type,
-                                     protein_length_stats_by_dc,
-                                     domain_number_stats_by_dc ) );
             domain_lengths_table.addLengths( protein_list );
-            if ( gwcd_list.get( i ).getSize() > 0 ) {
-                SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
-                                                                 out_dir,
-                                                                 per_genome_domain_promiscuity_statistics_writer,
-                                                                 gwcd_list.get( i ),
-                                                                 i,
-                                                                 dc_sort_order );
-                if ( output_binary_domain_combinationsfor_graph_analysis ) {
-                    SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
-                                                                                     out_dir,
-                                                                                     gwcd_list.get( i ),
-                                                                                     i,
-                                                                                     dc_sort_order );
+            if ( !da_analysis ) {
+                gwcd_list.add( BasicGenomeWideCombinableDomains
+                        .createInstance( protein_list,
+                                         ignore_combination_with_same,
+                                         new BasicSpecies( input_file_properties[ i ][ 1 ] ),
+                                         domain_id_to_go_ids_map,
+                                         dc_type,
+                                         protein_length_stats_by_dc,
+                                         domain_number_stats_by_dc ) );
+                if ( gwcd_list.get( i ).getSize() > 0 ) {
+                    SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
+                                                                     out_dir,
+                                                                     per_genome_domain_promiscuity_statistics_writer,
+                                                                     gwcd_list.get( i ),
+                                                                     i,
+                                                                     dc_sort_order );
+                    if ( output_binary_domain_combinationsfor_graph_analysis ) {
+                        SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
+                                                                                         out_dir,
+                                                                                         gwcd_list.get( i ),
+                                                                                         i,
+                                                                                         dc_sort_order );
+                    }
+                    SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
+                    SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
+                                                                      all_bin_domain_combinations_encountered );
                 }
-                SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
-                SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
-                                                                  all_bin_domain_combinations_encountered );
             }
             if ( query_domains_writer_ary != null ) {
                 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
@@ -1994,6 +1945,18 @@ public class surfacing {
         ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
                 + per_genome_domain_promiscuity_statistics_file );
         //
+        if ( da_analysis ) {
+            SurfacingUtil.performDomainArchitectureAnalysis( distinct_domain_architecutures_per_genome,
+                                                             distinct_domain_architecuture_counts,
+                                                             10,
+                                                             new File( out_dir.toString() + "/" + output_file
+                                                                     + "_DA_counts.txt" ),
+                                                             new File( out_dir.toString() + "/" + output_file
+                                                                     + "_unique_DAs.txt" ) );
+            distinct_domain_architecutures_per_genome.clear();
+            distinct_domain_architecuture_counts.clear();
+            System.gc();
+        }
         try {
             domains_per_potein_stats_writer.write( "ALL" );
             domains_per_potein_stats_writer.write( "\t" );
@@ -2012,23 +1975,24 @@ public class surfacing {
             domains_per_potein_stats_writer.close();
             printOutPercentageOfMultidomainProteins( all_genomes_domains_per_potein_histo, log_writer );
             ForesterUtil.map2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
+                    + "_all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
             ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
+                    + "_domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
             ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
+                    + "_domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
             ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__domains_always_combined.txt" ), domains_which_never_single, "\n" );
+                    + "_domains_always_combined.txt" ), domains_which_never_single, "\n" );
             ForesterUtil.programMessage( PRG_NAME,
                                          "Average of proteins with a least one domain assigned: "
                                                  + ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
                                                  + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)" );
-            ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: " + 100
-                    * protein_coverage_stats.getMin() + "%-" + 100 * protein_coverage_stats.getMax() + "%" );
+            ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: "
+                    + ( 100 * protein_coverage_stats.getMin() ) + "%-" + ( 100 * protein_coverage_stats.getMax() )
+                    + "%" );
             log( "Average of prot with a least one dom assigned  : " + ( 100 * protein_coverage_stats.arithmeticMean() )
                     + "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
-            log( "Range of prot with a least one dom assigned    : " + 100 * protein_coverage_stats.getMin() + "%-"
-                    + 100 * protein_coverage_stats.getMax() + "%", log_writer );
+            log( "Range of prot with a least one dom assigned    : " + ( 100 * protein_coverage_stats.getMin() ) + "%-"
+                    + ( 100 * protein_coverage_stats.getMax() ) + "%", log_writer );
         }
         catch ( final IOException e2 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
@@ -2098,7 +2062,7 @@ public class surfacing {
                                                            go_annotation_output,
                                                            go_id_to_term_map,
                                                            go_namespace_limit );
-        DescriptiveStatistics pw_stats = null;
+        final Map<String, Integer> tax_code_to_id_map = SurfacingUtil.createTaxCodeToIdMap( intrees[ 0 ] );
         try {
             String my_outfile = output_file.toString();
             Map<Character, Writer> split_writers = null;
@@ -2129,7 +2093,7 @@ public class surfacing {
                     + new java.text.SimpleDateFormat( "yyyy.MM.dd HH:mm:ss" ).format( new java.util.Date() )
                     + "</td></tr>" + nl );
             html_desc.append( "</table>" + nl );
-            pw_stats = SurfacingUtil
+            final DescriptiveStatistics pw_stats = SurfacingUtil
                     .writeDomainSimilaritiesToFile( html_desc,
                                                     new StringBuilder( number_of_genomes + " genomes" ),
                                                     writer,
@@ -2140,7 +2104,8 @@ public class surfacing {
                                                     domain_similarity_print_option,
                                                     domain_similarity_sort_field,
                                                     scoring,
-                                                    true );
+                                                    true,
+                                                    tax_code_to_id_map );
             ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote main output (includes domain similarities) to: \""
                     + ( out_dir == null ? my_outfile : out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) + "\"" );
         }
@@ -2149,7 +2114,6 @@ public class surfacing {
                     + e.getMessage() + "]" );
         }
         System.out.println();
-        // values_for_all_scores_histogram = pw_stats.getDataAsDoubleArray();
         final Species[] species = new Species[ number_of_genomes ];
         for( int i = 0; i < number_of_genomes; ++i ) {
             species[ i ] = new BasicSpecies( input_file_properties[ i ][ 1 ] );
@@ -2177,7 +2141,8 @@ public class surfacing {
                                              surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX,
                                              surfacing.PRG_NAME,
                                              out_dir,
-                                             write_pwc_files );
+                                             write_pwc_files,
+                                             tax_code_to_id_map );
             String matrix_output_file = new String( output_file.toString() );
             if ( matrix_output_file.indexOf( '.' ) > 1 ) {
                 matrix_output_file = matrix_output_file.substring( 0, matrix_output_file.indexOf( '.' ) );
@@ -2208,40 +2173,13 @@ public class surfacing {
             inferred_trees.add( nj_gd );
             inferred_trees.add( nj_bc );
             inferred_trees.add( nj_d );
-            if ( jacknifed_distances ) {
-                pwgc.performPairwiseComparisonsJacknifed( species,
-                                                          number_of_genomes,
-                                                          gwcd_list,
-                                                          true,
-                                                          jacknife_resamplings,
-                                                          jacknife_ratio,
-                                                          random_seed );
-                SurfacingUtil
-                        .writeMatrixToFile( new File( matrix_output_file
-                                                    + "_"
-                                                    + ForesterUtil.round( jacknife_ratio, 2 )
-                                                    + "_"
-                                                    + jacknife_resamplings
-                                                    + surfacing.MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ),
-                                            pwgc.getSharedBinaryCombinationsBasedDistances() );
-                SurfacingUtil
-                        .writeMatrixToFile( new File( matrix_output_file + "_" + ForesterUtil.round( jacknife_ratio, 2 )
-                                                    + "_" + jacknife_resamplings
-                                                    + surfacing.MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ),
-                                            pwgc.getSharedDomainsBasedDistances() );
-                //                if ( infer_species_trees ) {
-                //                    inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
-                //                            + INFERRED_SBC_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc
-                //                            .getSharedBinaryCombinationsBasedDistances() );
-                //                    inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
-                //                            + INFERRED_SD_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc.getSharedDomainsBasedDistances() );
-                //                }
-            }
         } // if ( ( output_file != null ) && ( number_of_genomes > 2 ) && !isEmpty( automated_pairwise_comparison_suffix ) )
         if ( ( out_dir != null ) && ( !perform_pwc ) ) {
             output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
         }
-        writePresentToNexus( output_file, positive_filter_file, filter, gwcd_list );
+        if ( write_to_nexus ) {
+            writePresentToNexus( output_file, positive_filter_file, filter, gwcd_list );
+        }
         if ( ( ( intrees != null ) && ( intrees.length > 0 ) ) && ( number_of_genomes > 2 ) ) {
             final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
                                                                           e_value_max,
@@ -2276,12 +2214,15 @@ public class surfacing {
                                                         dc_type,
                                                         protein_length_stats_by_dc,
                                                         domain_number_stats_by_dc,
-                                                        domain_length_stats_by_domain );
+                                                        domain_length_stats_by_domain,
+                                                        tax_code_to_id_map,
+                                                        write_to_nexus,
+                                                        use_last_in_fitch_parsimony );
                 // Listing of all domain combinations gained is only done if only one input tree is used. 
                 if ( ( domain_id_to_secondary_features_maps != null )
                         && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
                     int j = 0;
-                    for( final Map<DomainId, Set<String>> domain_id_to_secondary_features_map : domain_id_to_secondary_features_maps ) {
+                    for( final Map<String, Set<String>> domain_id_to_secondary_features_map : domain_id_to_secondary_features_maps ) {
                         final Map<Species, MappingResults> mapping_results_map = new TreeMap<Species, MappingResults>();
                         final DomainParsimonyCalculator secondary_features_parsimony = DomainParsimonyCalculator
                                 .createInstance( intree, gwcd_list, domain_id_to_secondary_features_map );
@@ -2292,7 +2233,8 @@ public class surfacing {
                                                                                secondary_features_parsimony,
                                                                                intree,
                                                                                parameters_sb.toString(),
-                                                                               mapping_results_map );
+                                                                               mapping_results_map,
+                                                                               use_last_in_fitch_parsimony );
                         if ( i == 0 ) {
                             System.out.println();
                             System.out.println( "Mapping to secondary features:" );
@@ -2305,7 +2247,7 @@ public class surfacing {
                                 System.out.print( ", not mapped domains = " + mapping_results.getSumOfFailures() );
                                 if ( total_domains > 0 ) {
                                     System.out.println( ", mapped ratio = "
-                                            + ( 100 * mapping_results.getSumOfSuccesses() / total_domains ) + "%" );
+                                            + ( ( 100 * mapping_results.getSumOfSuccesses() ) / total_domains ) + "%" );
                                 }
                                 else {
                                     System.out.println( ", mapped ratio = n/a (total domains = 0 )" );
@@ -2329,39 +2271,12 @@ public class surfacing {
                                       plus_minus_analysis_numbers );
         }
         if ( output_protein_lists_for_all_domains ) {
-            writeProteinListsForAllSpecies( out_dir, protein_lists_per_species, gwcd_list );
-        }
-        //        if ( ( intrees != null ) && ( intrees.length > 0 ) && ( inferred_trees != null ) && ( inferred_trees.size() > 0 ) ) {
-        //            final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
-        //                                                                          e_value_max,
-        //                                                                          max_allowed_overlap,
-        //                                                                          no_engulfing_overlaps,
-        //                                                                          cutoff_scores_file );
-        //            String s = "_";
-        //            if ( radomize_fitch_parsimony ) {
-        //                s += random_number_seed_for_fitch_parsimony + "_";
-        //            }
-        //            int i = 0;
-        //            for( final Phylogeny inferred_tree : inferred_trees ) {
-        //                if ( !inferred_tree.isRooted() ) { 
-        //                    intrees[ 0 ].getRoot().getName();
-        //                    inferred_tree.r
-        //                }
-        //                final String outfile_name = ForesterUtil.removeSuffix( inferred_tree.getName() ) + s;
-        //                final DomainParsimonyCalculator domain_parsimony = DomainParsimonyCalculator
-        //                        .createInstance( inferred_tree, gwcd_list );
-        //                SurfacingUtil.executeParsimonyAnalysis( random_number_seed_for_fitch_parsimony,
-        //                                                        radomize_fitch_parsimony,
-        //                                                        outfile_name,
-        //                                                        domain_parsimony,
-        //                                                        inferred_tree,
-        //                                                        domain_id_to_go_ids_map,
-        //                                                        go_id_to_term_map,
-        //                                                        go_namespace_limit,
-        //                                                        parameters_sb.toString() );
-        //                i++;
-        //            }
-        //        }
+            writeProteinListsForAllSpecies( out_dir,
+                                            protein_lists_per_species,
+                                            gwcd_list,
+                                            output_list_of_all_proteins_per_domain_e_value_max );
+        }
+        gwcd_list = null;
         if ( all_bin_domain_combinations_gained_fitch != null ) {
             try {
                 executeFitchGainsAnalysis( new File( output_file
@@ -2467,7 +2382,7 @@ public class surfacing {
         for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
             sum += entry.getValue();
         }
-        final double percentage = 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) / sum;
+        final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
         ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
         log( "Percentage of multidomain proteins:            : " + percentage + "%", log_writer );
     }
@@ -2515,34 +2430,17 @@ public class surfacing {
             }
             System.out.println( "--" );
         }
-        for( int i = 0; i < input_file_properties.length; ++i ) {
+        for( final String[] input_file_propertie : input_file_properties ) {
             try {
-                intree.getNode( input_file_properties[ i ][ 1 ] );
+                intree.getNode( input_file_propertie[ 1 ] );
             }
             catch ( final IllegalArgumentException e ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
+                ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
                         + "] not present/not unique in input tree" );
             }
         }
     }
 
-    // public static StringBuffer stringCombinableDomainsMapToStringBuffer(
-    // final SortedMap<String, CombinableDomains> map ) {
-    // final StringBuffer sb = new StringBuffer();
-    // for( final Iterator<String> iter = map.keySet().iterator();
-    // iter.hasNext(); ) {
-    // final Object key = iter.next();
-    // sb.append( ForesterUtil.pad( new StringBuffer( key.toString() ), 18, ' ',
-    // false ) );
-    // final CombinableDomains domain_combination = map.get( key );
-    // sb.append( ForesterUtil.pad( new StringBuffer( "" +
-    // domain_combination.getNumberOfCombiningDomains() ), 8,
-    // ' ', false ) );
-    // sb.append( domain_combination.toStringBuffer() );
-    // sb.append( ForesterUtil.getLineSeparator() );
-    // }
-    // return sb;
-    // }
     private static void printHelp() {
         System.out.println();
         System.out.println( "Usage:" );
@@ -2594,17 +2492,6 @@ public class surfacing {
         System.out.println( surfacing.INPUT_SPECIES_TREE_OPTION
                 + ": species tree, to perform (Dollo, Fitch) parismony analyses" );
         System.out
-                .println( JACKNIFE_OPTION
-                        + ": perform jacknife resampling for domain and binary domain combination based distance matrices [default resamplings: "
-                        + JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT + "]" );
-        System.out.println( JACKNIFE_RATIO_OPTION + ": ratio for jacknife resampling [default: "
-                + JACKNIFE_RATIO_DEFAULT + "]" );
-        System.out.println( JACKNIFE_RANDOM_SEED_OPTION
-                + ": seed for random number generator for jacknife resampling [default: "
-                + JACKNIFE_RANDOM_SEED_DEFAULT + "]" );
-        //        System.out.println( surfacing.INFER_SPECIES_TREES_OPTION
-        //                + ": to infer NJ species trees based on shared domains/binary domain combinations" );
-        System.out
                 .println( surfacing.INPUT_SPECIES_TREE_OPTION
                         + "=<treefiles in phyloXML format, separated by #>: to infer domain/binary domain combination gains/losses on given species trees" );
         System.out.println( surfacing.FILTER_POSITIVE_OPTION
@@ -2613,7 +2500,7 @@ public class surfacing {
                 + "=<file>: to filter out proteins containing at least one domain listed in <file>" );
         System.out.println( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION
                 + "=<file>: to filter out (ignore) domains listed in <file>" );
-        System.out.println( surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>: to read input files from <file>" );
+        System.out.println( surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>: to read input files from <file>" );
         System.out
                 .println( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION
                         + "=<seed>: seed for random number generator for Fitch Parsimony analysis (type: long, default: no randomization - given a choice, prefer absence" );
@@ -2630,18 +2517,27 @@ public class surfacing {
         System.out.println( surfacing.DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS
                 + ": to output binary domain combinations for (downstream) graph analysis" );
         System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS + ": to output all proteins per domain" );
+        System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION
+                + ": e value max per domain for output of all proteins per domain" );
+        System.out.println( surfacing.USE_LAST_IN_FITCH_OPTION + ": to use last in Fitch parsimony" );
+        System.out.println( surfacing.WRITE_TO_NEXUS_OPTION + ": to output in Nexus format" );
+        System.out.println( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION + ": to perform DC regain protein statistics" );
+        System.out.println( DA_ANALYSIS_OPTION + ": to do DA analysis" );
         System.out.println();
+        System.out.println( "Example 1: java -Xms128m -Xmx512m -cp path/to/forester.jar"
+                + " org.forester.application.surfacing p2g=pfam2go_2012_02_07.txt -dufs -cos=Pfam_260_NC1"
+                + " -no_eo -mo=0 -genomes=eukaryotes.txt -out_dir=out -o=o "
+                + " -species_tree=tol.xml -obo=gene_ontology_2012_02_07.obo -pos_filter=f.txt -all_prot" );
         System.out.println();
-        System.out.println( "Example: java -Xms128m -Xmx512m -cp path/to/forester.jar"
+        System.out.println( "Example 2: java -Xms128m -Xmx512m -cp path/to/forester.jar"
                 + " org.forester.application.surfacing -detail=punctilious -o=TEST.html -pwc=TEST"
                 + " -cos=Pfam_ls_22_TC2 -p2g=pfam2go -obo=gene_ontology_edit.obo "
-                + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo "
-                + "-ds_output=detailed_html -scoring=domains -sort=alpha -" + JACKNIFE_OPTION
-                + "=50 human mouse brafl strpu" );
+                + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo -genomes=eukaryotes.txt "
+                + "-ds_output=detailed_html -scoring=domains -sort=alpha " );
         System.out.println();
     }
 
-    private static void processFilter( final File filter_file, final SortedSet<DomainId> filter ) {
+    private static void processFilter( final File filter_file, final SortedSet<String> filter ) {
         SortedSet<String> filter_str = null;
         try {
             filter_str = ForesterUtil.file2set( filter_file );
@@ -2651,35 +2547,44 @@ public class surfacing {
         }
         if ( filter_str != null ) {
             for( final String string : filter_str ) {
-                filter.add( new DomainId( string ) );
+                filter.add( string );
             }
         }
         if ( VERBOSE ) {
             System.out.println( "Filter:" );
-            for( final DomainId domainId : filter ) {
-                System.out.println( domainId.getId() );
+            for( final String domainId : filter ) {
+                System.out.println( domainId );
             }
         }
     }
 
-    private static String[][] processInputFileNames( final String[] names ) {
-        final String[][] input_file_properties = new String[ names.length ][];
-        for( int i = 0; i < names.length; ++i ) {
-            if ( names[ i ].indexOf( SEPARATOR_FOR_INPUT_VALUES ) < 0 ) {
-                input_file_properties[ i ] = new String[ 2 ];
-                input_file_properties[ i ][ 0 ] = names[ i ];
-                input_file_properties[ i ][ 1 ] = names[ i ];
+    private static String[][] processInputGenomesFile( final File input_genomes ) {
+        String[][] input_file_properties = null;
+        try {
+            input_file_properties = ForesterUtil.file22dArray( input_genomes );
+        }
+        catch ( final IOException e ) {
+            ForesterUtil.fatalError( surfacing.PRG_NAME,
+                                     "genomes files is to be in the following format \"<hmmpfam output file> <species>\": "
+                                             + e.getLocalizedMessage() );
+        }
+        final Set<String> specs = new HashSet<String>();
+        final Set<String> paths = new HashSet<String>();
+        for( int i = 0; i < input_file_properties.length; ++i ) {
+            if ( !PhyloXmlUtil.TAXOMONY_CODE_PATTERN.matcher( input_file_properties[ i ][ 1 ] ).matches() ) {
+                ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for species code: "
+                        + input_file_properties[ i ][ 1 ] );
             }
-            else {
-                input_file_properties[ i ] = names[ i ].split( surfacing.SEPARATOR_FOR_INPUT_VALUES + "" );
-                if ( input_file_properties[ i ].length != 3 ) {
-                    ForesterUtil
-                            .fatalError( surfacing.PRG_NAME,
-                                         "properties for the input files (hmmpfam output) are expected "
-                                                 + "to be in the following format \"<hmmpfam output file>#<species>\" (or just one word, which is both the filename and the species id), instead received \""
-                                                 + names[ i ] + "\"" );
-                }
+            if ( specs.contains( input_file_properties[ i ][ 1 ] ) ) {
+                ForesterUtil.fatalError( surfacing.PRG_NAME, "species code " + input_file_properties[ i ][ 1 ]
+                        + " is not unique" );
+            }
+            specs.add( input_file_properties[ i ][ 1 ] );
+            if ( paths.contains( input_file_properties[ i ][ 0 ] ) ) {
+                ForesterUtil.fatalError( surfacing.PRG_NAME, "path " + input_file_properties[ i ][ 0 ]
+                        + " is not unique" );
             }
+            paths.add( input_file_properties[ i ][ 0 ] );
             final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
             if ( !ForesterUtil.isEmpty( error ) ) {
                 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
@@ -2788,7 +2693,7 @@ public class surfacing {
 
     private static void writePresentToNexus( final File output_file,
                                              final File positive_filter_file,
-                                             final SortedSet<DomainId> filter,
+                                             final SortedSet<String> filter,
                                              final List<GenomeWideCombinableDomains> gwcd_list ) {
         try {
             SurfacingUtil
@@ -2806,12 +2711,13 @@ public class surfacing {
 
     private static void writeProteinListsForAllSpecies( final File output_dir,
                                                         final SortedMap<Species, List<Protein>> protein_lists_per_species,
-                                                        final List<GenomeWideCombinableDomains> gwcd_list ) {
-        final SortedSet<DomainId> all_domains = new TreeSet<DomainId>();
+                                                        final List<GenomeWideCombinableDomains> gwcd_list,
+                                                        final double domain_e_cutoff ) {
+        final SortedSet<String> all_domains = new TreeSet<String>();
         for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
             all_domains.addAll( gwcd.getAllDomainIds() );
         }
-        for( final DomainId domain : all_domains ) {
+        for( final String domain : all_domains ) {
             final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain + SEQ_EXTRACT_SUFFIX );
             SurfacingUtil.checkForOutputFileWriteability( out );
             try {
@@ -2820,7 +2726,8 @@ public class surfacing {
                                                    domain,
                                                    proteins_file_writer,
                                                    "\t",
-                                                   LIMIT_SPEC_FOR_PROT_EX );
+                                                   LIMIT_SPEC_FOR_PROT_EX,
+                                                   domain_e_cutoff );
                 proteins_file_writer.close();
             }
             catch ( final IOException e ) {