inprogress
[jalview.git] / forester / java / src / org / forester / application / surfacing.java
index 2218f40..ec4878c 100644 (file)
@@ -22,7 +22,7 @@
 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
 //
 // Contact: phylosoft @ gmail . com
-// WWW: www.phylosoft.org/forester
+// WWW: https://sites.google.com/site/cmzmasek/home/software/forester
 
 package org.forester.application;
 
@@ -34,17 +34,14 @@ import java.io.Writer;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Set;
 import java.util.SortedMap;
 import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
-import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
 import org.forester.go.GoId;
 import org.forester.go.GoNameSpace;
 import org.forester.go.GoTerm;
@@ -54,15 +51,9 @@ import org.forester.go.PfamToGoMapping;
 import org.forester.go.PfamToGoParser;
 import org.forester.io.parsers.HmmscanPerDomainTableParser;
 import org.forester.io.parsers.HmmscanPerDomainTableParser.INDIVIDUAL_SCORE_CUTOFF;
-import org.forester.io.parsers.util.ParserUtils;
 import org.forester.phylogeny.Phylogeny;
-import org.forester.phylogeny.PhylogenyMethods;
-import org.forester.phylogeny.PhylogenyNode;
-import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
-import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
 import org.forester.protein.BinaryDomainCombination;
 import org.forester.protein.Domain;
-import org.forester.protein.DomainId;
 import org.forester.protein.Protein;
 import org.forester.species.BasicSpecies;
 import org.forester.species.Species;
@@ -70,7 +61,6 @@ import org.forester.surfacing.BasicDomainSimilarityCalculator;
 import org.forester.surfacing.BasicGenomeWideCombinableDomains;
 import org.forester.surfacing.CombinationsBasedPairwiseDomainSimilarityCalculator;
 import org.forester.surfacing.DomainCountsBasedPairwiseSimilarityCalculator;
-import org.forester.surfacing.DomainCountsDifferenceUtil;
 import org.forester.surfacing.DomainLengthsTable;
 import org.forester.surfacing.DomainParsimonyCalculator;
 import org.forester.surfacing.DomainSimilarity;
@@ -187,7 +177,10 @@ public class surfacing {
     final static private String                               MAX_ALLOWED_OVERLAP_OPTION                                                    = "mo";
     final static private String                               NO_ENGULFING_OVERLAP_OPTION                                                   = "no_eo";
     final static private String                               IGNORE_COMBINATION_WITH_SAME_OPTION                                           = "ignore_self_comb";
-    final static private String                               PAIRWISE_DOMAIN_COMPARISONS_PREFIX                                            = "pwc_";
+    final static private String                               PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION                                       = "dc_regain_stats";
+    final static private String                               DA_ANALYSIS_OPTION                                                            = "DA_analyis";
+    final static private String                               USE_LAST_IN_FITCH_OPTION                                                      = "last";
+    public final static String                                PAIRWISE_DOMAIN_COMPARISONS_PREFIX                                            = "pwc_";
     final static private String                               PAIRWISE_DOMAIN_COMPARISONS_OPTION                                            = "pwc";
     final static private String                               OUTPUT_FILE_OPTION                                                            = "o";
     final static private String                               PFAM_TO_GO_FILE_USE_OPTION                                                    = "p2g";
@@ -216,43 +209,35 @@ public class surfacing {
                                                                                                                                                     + ForesterConstants.PHYLO_XML_SUFFIX;
     final static private String                               NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX                  = "_bin_combinations_NJ"
                                                                                                                                                     + ForesterConstants.PHYLO_XML_SUFFIX;
-    final static private String                               JACKNIFE_OPTION                                                               = "jack";
-    final static private String                               JACKNIFE_RANDOM_SEED_OPTION                                                   = "seed";
-    final static private String                               JACKNIFE_RATIO_OPTION                                                         = "jack_ratio";
-    private static final int                                  JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT                                        = 100;
-    final static private long                                 JACKNIFE_RANDOM_SEED_DEFAULT                                                  = 19;
-    final static private double                               JACKNIFE_RATIO_DEFAULT                                                        = 0.5;
-    //final static private String  INFER_SPECIES_TREES_OPTION                                             = "species_tree_inference";
     final static private String                               FILTER_POSITIVE_OPTION                                                        = "pos_filter";
     final static private String                               FILTER_NEGATIVE_OPTION                                                        = "neg_filter";
     final static private String                               FILTER_NEGATIVE_DOMAINS_OPTION                                                = "neg_dom_filter";
-    final static private String                               INPUT_FILES_FROM_FILE_OPTION                                                  = "input";
+    final static private String                               INPUT_GENOMES_FILE_OPTION                                                     = "genomes";
     final static private String                               INPUT_SPECIES_TREE_OPTION                                                     = "species_tree";
     final static private String                               SEQ_EXTRACT_OPTION                                                            = "prot_extract";
-    final static private char                                 SEPARATOR_FOR_INPUT_VALUES                                                    = '#';
-    final static private String                               PRG_VERSION                                                                   = "2.252";
-    final static private String                               PRG_DATE                                                                      = "2012.08.01";
+    final static private String                               PRG_VERSION                                                                   = "2.304";
+    final static private String                               PRG_DATE                                                                      = "131024";
     final static private String                               E_MAIL                                                                        = "czmasek@burnham.org";
-    final static private String                               WWW                                                                           = "www.phylosoft.org/forester/applications/surfacing";
+    final static private String                               WWW                                                                           = "https://sites.google.com/site/cmzmasek/home/software/forester/surfacing";
     final static private boolean                              IGNORE_DUFS_DEFAULT                                                           = true;
     final static private boolean                              IGNORE_COMBINATION_WITH_SAME_DEFAULLT                                         = false;
     final static private double                               MAX_E_VALUE_DEFAULT                                                           = -1;
-    final static private int                                  MAX_ALLOWED_OVERLAP_DEFAULT                                                   = -1;
+    public final static int                                   MAX_ALLOWED_OVERLAP_DEFAULT                                                   = -1;
     private static final String                               RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION                                        = "random_seed";
     private static final String                               CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS                                      = "consider_bdc_direction";
     private static final String                               CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY                        = "consider_bdc_adj";
-    private static final String                               SEQ_EXTRACT_SUFFIX                                                            = ".prot";
-    private static final String                               PLUS_MINUS_ANALYSIS_OPTION                                                    = "plus_minus";
-    private static final String                               PLUS_MINUS_DOM_SUFFIX                                                         = "_plus_minus_dom.txt";
-    private static final String                               PLUS_MINUS_DOM_SUFFIX_HTML                                                    = "_plus_minus_dom.html";
-    private static final String                               PLUS_MINUS_DC_SUFFIX_HTML                                                     = "_plus_minus_dc.html";
-    private static final int                                  PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT                                          = 0;
-    private static final double                               PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT                                            = 1.0;
-    private static final String                               PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX                                              = "_plus_minus_go_ids_all.txt";
-    private static final String                               PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX                                          = "_plus_minus_go_ids_passing.txt";
+    public static final String                                SEQ_EXTRACT_SUFFIX                                                            = ".prot";
+    public static final String                                PLUS_MINUS_ANALYSIS_OPTION                                                    = "plus_minus";
+    public static final String                                PLUS_MINUS_DOM_SUFFIX                                                         = "_plus_minus_dom.txt";
+    public static final String                                PLUS_MINUS_DOM_SUFFIX_HTML                                                    = "_plus_minus_dom.html";
+    public static final String                                PLUS_MINUS_DC_SUFFIX_HTML                                                     = "_plus_minus_dc.html";
+    public static final int                                   PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT                                          = 0;
+    public static final double                                PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT                                            = 1.0;
+    public static final String                                PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX                                              = "_plus_minus_go_ids_all.txt";
+    public static final String                                PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX                                          = "_plus_minus_go_ids_passing.txt";
     private static final String                               OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS                                           = "all_prot";
     final static private String                               OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION                         = "all_prot_e";
-    private static final boolean                              VERBOSE                                                                       = false;
+    public static final boolean                               VERBOSE                                                                       = false;
     private static final String                               OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX              = "_fitch_dc_gains_counts";
     private static final String                               OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX                = "_fitch_dc_losses_counts";
     private static final String                               DOMAIN_LENGTHS_ANALYSIS_SUFFIX                                                = "_domain_lengths_analysis";
@@ -270,6 +255,7 @@ public class surfacing {
     private static final String                               LOG_FILE_SUFFIX                                                               = "_log.txt";
     private static final String                               DATA_FILE_SUFFIX                                                              = "_domain_combination_data.txt";
     private static final String                               DATA_FILE_DESC                                                                = "#SPECIES\tPRTEIN_ID\tN_TERM_DOMAIN\tC_TERM_DOMAIN\tN_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tC_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tN_TERM_DOMAIN_COUNTS_PER_PROTEIN\tC_TERM_DOMAIN_COUNTS_PER_PROTEIN";
+    private static final String                               WRITE_TO_NEXUS_OPTION                                                         = "nexus";
     private static final INDIVIDUAL_SCORE_CUTOFF              INDIVIDUAL_SCORE_CUTOFF_DEFAULT                                               = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE;                                                                                                                                                      //TODO look at me! change?
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX                          = "_indep_dc_gains_fitch_counts.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX                              = "_indep_dc_gains_fitch_lists.txt";
@@ -282,275 +268,7 @@ public class surfacing {
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX                       = "_indep_dc_gains_fitch_lists_MAPPED.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX        = "_indep_dc_gains_fitch_lists_for_go_mapping_MAPPED.txt";
     public static final String                                INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique_MAPPED.txt";
-    private static final boolean                              PERFORM_DC_REGAIN_PROTEINS_STATS                                              = true;
-
-    private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
-                                                                 final String[][] input_file_properties,
-                                                                 final String automated_pairwise_comparison_suffix,
-                                                                 final File outdir ) {
-        for( int i = 0; i < input_file_properties.length; ++i ) {
-            for( int j = 0; j < i; ++j ) {
-                final String species_i = input_file_properties[ i ][ 1 ];
-                final String species_j = input_file_properties[ j ][ 1 ];
-                String pairwise_similarities_output_file_str = PAIRWISE_DOMAIN_COMPARISONS_PREFIX + species_i + "_"
-                        + species_j + automated_pairwise_comparison_suffix;
-                switch ( domain_similarity_print_option ) {
-                    case HTML:
-                        if ( !pairwise_similarities_output_file_str.endsWith( ".html" ) ) {
-                            pairwise_similarities_output_file_str += ".html";
-                        }
-                        break;
-                }
-                final String error = ForesterUtil
-                        .isWritableFile( new File( outdir == null ? pairwise_similarities_output_file_str : outdir
-                                + ForesterUtil.FILE_SEPARATOR + pairwise_similarities_output_file_str ) );
-                if ( !ForesterUtil.isEmpty( error ) ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, error );
-                }
-            }
-        }
-    }
-
-    private static StringBuilder createParametersAsString( final boolean ignore_dufs,
-                                                           final double e_value_max,
-                                                           final int max_allowed_overlap,
-                                                           final boolean no_engulfing_overlaps,
-                                                           final File cutoff_scores_file,
-                                                           final BinaryDomainCombination.DomainCombinationType dc_type ) {
-        final StringBuilder parameters_sb = new StringBuilder();
-        parameters_sb.append( "E-value: " + e_value_max );
-        if ( cutoff_scores_file != null ) {
-            parameters_sb.append( ", Cutoff-scores-file: " + cutoff_scores_file );
-        }
-        else {
-            parameters_sb.append( ", Cutoff-scores-file: not-set" );
-        }
-        if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
-            parameters_sb.append( ", Max-overlap: " + max_allowed_overlap );
-        }
-        else {
-            parameters_sb.append( ", Max-overlap: not-set" );
-        }
-        if ( no_engulfing_overlaps ) {
-            parameters_sb.append( ", Engulfing-overlaps: not-allowed" );
-        }
-        else {
-            parameters_sb.append( ", Engulfing-overlaps: allowed" );
-        }
-        if ( ignore_dufs ) {
-            parameters_sb.append( ", Ignore-dufs: true" );
-        }
-        else {
-            parameters_sb.append( ", Ignore-dufs: false" );
-        }
-        parameters_sb.append( ", DC type (if applicable): " + dc_type );
-        return parameters_sb;
-    }
-
-    /**
-     * Warning: This side-effects 'all_bin_domain_combinations_encountered'!
-     * 
-     * 
-     * @param output_file
-     * @param all_bin_domain_combinations_changed
-     * @param sum_of_all_domains_encountered
-     * @param all_bin_domain_combinations_encountered
-     * @param is_gains_analysis
-     * @param protein_length_stats_by_dc 
-     * @throws IOException
-     */
-    private static void executeFitchGainsAnalysis( final File output_file,
-                                                   final List<BinaryDomainCombination> all_bin_domain_combinations_changed,
-                                                   final int sum_of_all_domains_encountered,
-                                                   final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered,
-                                                   final boolean is_gains_analysis ) throws IOException {
-        SurfacingUtil.checkForOutputFileWriteability( output_file );
-        final Writer out = ForesterUtil.createBufferedWriter( output_file );
-        final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
-                .listToSortedCountsMap( all_bin_domain_combinations_changed );
-        final SortedSet<DomainId> all_domains_in_combination_changed_more_than_once = new TreeSet<DomainId>();
-        final SortedSet<DomainId> all_domains_in_combination_changed_only_once = new TreeSet<DomainId>();
-        int above_one = 0;
-        int one = 0;
-        for( final Object bdc_object : bdc_to_counts.keySet() ) {
-            final BinaryDomainCombination bdc = ( BinaryDomainCombination ) bdc_object;
-            final int count = bdc_to_counts.get( bdc_object );
-            if ( count < 1 ) {
-                ForesterUtil.unexpectedFatalError( PRG_NAME, "count < 1 " );
-            }
-            out.write( bdc + "\t" + count + ForesterUtil.LINE_SEPARATOR );
-            if ( count > 1 ) {
-                all_domains_in_combination_changed_more_than_once.add( bdc.getId0() );
-                all_domains_in_combination_changed_more_than_once.add( bdc.getId1() );
-                above_one++;
-            }
-            else if ( count == 1 ) {
-                all_domains_in_combination_changed_only_once.add( bdc.getId0() );
-                all_domains_in_combination_changed_only_once.add( bdc.getId1() );
-                one++;
-            }
-        }
-        final int all = all_bin_domain_combinations_encountered.size();
-        int never_lost = -1;
-        if ( !is_gains_analysis ) {
-            all_bin_domain_combinations_encountered.removeAll( all_bin_domain_combinations_changed );
-            never_lost = all_bin_domain_combinations_encountered.size();
-            for( final BinaryDomainCombination bdc : all_bin_domain_combinations_encountered ) {
-                out.write( bdc + "\t" + "0" + ForesterUtil.LINE_SEPARATOR );
-            }
-        }
-        if ( is_gains_analysis ) {
-            out.write( "Sum of all distinct domain combinations appearing once               : " + one
-                    + ForesterUtil.LINE_SEPARATOR );
-            out.write( "Sum of all distinct domain combinations appearing more than once     : " + above_one
-                    + ForesterUtil.LINE_SEPARATOR );
-            out.write( "Sum of all distinct domains in combinations apppearing only once     : "
-                    + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
-            out.write( "Sum of all distinct domains in combinations apppearing more than once: "
-                    + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
-        }
-        else {
-            out.write( "Sum of all distinct domain combinations never lost                   : " + never_lost
-                    + ForesterUtil.LINE_SEPARATOR );
-            out.write( "Sum of all distinct domain combinations lost once                    : " + one
-                    + ForesterUtil.LINE_SEPARATOR );
-            out.write( "Sum of all distinct domain combinations lost more than once          : " + above_one
-                    + ForesterUtil.LINE_SEPARATOR );
-            out.write( "Sum of all distinct domains in combinations lost only once           : "
-                    + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
-            out.write( "Sum of all distinct domains in combinations lost more than once: "
-                    + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
-        }
-        out.write( "All binary combinations                                              : " + all
-                + ForesterUtil.LINE_SEPARATOR );
-        out.write( "All domains                                                          : "
-                + sum_of_all_domains_encountered );
-        out.close();
-        ForesterUtil.programMessage( surfacing.PRG_NAME,
-                                     "Wrote fitch domain combination dynamics counts analysis to \"" + output_file
-                                             + "\"" );
-    }
-
-    private static void executePlusMinusAnalysis( final File output_file,
-                                                  final List<String> plus_minus_analysis_high_copy_base,
-                                                  final List<String> plus_minus_analysis_high_copy_target,
-                                                  final List<String> plus_minus_analysis_low_copy,
-                                                  final List<GenomeWideCombinableDomains> gwcd_list,
-                                                  final SortedMap<Species, List<Protein>> protein_lists_per_species,
-                                                  final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
-                                                  final Map<GoId, GoTerm> go_id_to_term_map,
-                                                  final List<Object> plus_minus_analysis_numbers ) {
-        final Set<String> all_spec = new HashSet<String>();
-        for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
-            all_spec.add( gwcd.getSpecies().getSpeciesId() );
-        }
-        final File html_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX_HTML );
-        final File plain_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX );
-        final File html_out_dc = new File( output_file + PLUS_MINUS_DC_SUFFIX_HTML );
-        final File all_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX );
-        final File passing_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX );
-        final File proteins_file_base = new File( output_file + "" );
-        final int min_diff = ( ( Integer ) plus_minus_analysis_numbers.get( 0 ) ).intValue();
-        final double factor = ( ( Double ) plus_minus_analysis_numbers.get( 1 ) ).doubleValue();
-        try {
-            DomainCountsDifferenceUtil.calculateCopyNumberDifferences( gwcd_list,
-                                                                       protein_lists_per_species,
-                                                                       plus_minus_analysis_high_copy_base,
-                                                                       plus_minus_analysis_high_copy_target,
-                                                                       plus_minus_analysis_low_copy,
-                                                                       min_diff,
-                                                                       factor,
-                                                                       plain_out_dom,
-                                                                       html_out_dom,
-                                                                       html_out_dc,
-                                                                       domain_id_to_go_ids_map,
-                                                                       go_id_to_term_map,
-                                                                       all_domains_go_ids_out_dom,
-                                                                       passing_domains_go_ids_out_dom,
-                                                                       proteins_file_base );
-        }
-        catch ( final IOException e ) {
-            ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
-        }
-        ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
-                + html_out_dom + "\"" );
-        ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
-                + plain_out_dom + "\"" );
-        ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \"" + html_out_dc
-                + "\"" );
-        ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based passing GO ids to \""
-                + passing_domains_go_ids_out_dom + "\"" );
-        ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based all GO ids to \""
-                + all_domains_go_ids_out_dom + "\"" );
-    }
-
-    private static Phylogeny[] getIntrees( final File[] intree_files,
-                                           final int number_of_genomes,
-                                           final String[][] input_file_properties ) {
-        final Phylogeny[] intrees = new Phylogeny[ intree_files.length ];
-        int i = 0;
-        for( final File intree_file : intree_files ) {
-            Phylogeny intree = null;
-            final String error = ForesterUtil.isReadableFile( intree_file );
-            if ( !ForesterUtil.isEmpty( error ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: "
-                        + error );
-            }
-            try {
-                final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
-                        .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
-                if ( p_array.length < 1 ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
-                            + "] does not contain any phylogeny in phyloXML format" );
-                }
-                else if ( p_array.length > 1 ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
-                            + "] contains more than one phylogeny in phyloXML format" );
-                }
-                intree = p_array[ 0 ];
-            }
-            catch ( final Exception e ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file
-                        + "]: " + error );
-            }
-            if ( ( intree == null ) || intree.isEmpty() ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
-            }
-            if ( !intree.isRooted() ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
-            }
-            if ( intree.getNumberOfExternalNodes() < number_of_genomes ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                         "number of external nodes [" + intree.getNumberOfExternalNodes()
-                                                 + "] of input tree [" + intree_file
-                                                 + "] is smaller than the number of genomes the be analyzed ["
-                                                 + number_of_genomes + "]" );
-            }
-            final StringBuilder parent_names = new StringBuilder();
-            final int nodes_lacking_name = SurfacingUtil.getNumberOfNodesLackingName( intree, parent_names );
-            if ( nodes_lacking_name > 0 ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] has "
-                        + nodes_lacking_name + " node(s) lacking a name [parent names:" + parent_names + "]" );
-            }
-            preparePhylogenyForParsimonyAnalyses( intree, input_file_properties );
-            if ( !intree.isCompletelyBinary() ) {
-                ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "input tree [" + intree_file
-                        + "] is not completely binary" );
-            }
-            intrees[ i++ ] = intree;
-        }
-        return intrees;
-    }
-
-    private static void log( final String msg, final Writer w ) {
-        try {
-            w.write( msg );
-            w.write( ForesterUtil.LINE_SEPARATOR );
-        }
-        catch ( final IOException e ) {
-            ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
-        }
-    }
+    private static final boolean                              CALC_SIMILARITY_SCORES                                                        = false;
 
     public static void main( final String args[] ) {
         final long start_time = new Date().getTime();
@@ -605,14 +323,10 @@ public class surfacing {
         allowed_options.add( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
         allowed_options.add( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION );
         allowed_options.add( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS );
-        allowed_options.add( JACKNIFE_OPTION );
-        allowed_options.add( JACKNIFE_RANDOM_SEED_OPTION );
-        allowed_options.add( JACKNIFE_RATIO_OPTION );
         allowed_options.add( INPUT_SPECIES_TREE_OPTION );
-        //allowed_options.add( INFER_SPECIES_TREES_OPTION );
         allowed_options.add( FILTER_POSITIVE_OPTION );
         allowed_options.add( FILTER_NEGATIVE_OPTION );
-        allowed_options.add( INPUT_FILES_FROM_FILE_OPTION );
+        allowed_options.add( INPUT_GENOMES_FILE_OPTION );
         allowed_options.add( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
         allowed_options.add( FILTER_NEGATIVE_DOMAINS_OPTION );
         allowed_options.add( IGNORE_VIRAL_IDS );
@@ -623,6 +337,10 @@ public class surfacing {
         allowed_options.add( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS );
         allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS );
         allowed_options.add( CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY );
+        allowed_options.add( WRITE_TO_NEXUS_OPTION );
+        allowed_options.add( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION );
+        allowed_options.add( DA_ANALYSIS_OPTION );
+        allowed_options.add( USE_LAST_IN_FITCH_OPTION );
         boolean ignore_dufs = surfacing.IGNORE_DUFS_DEFAULT;
         boolean ignore_combination_with_same = surfacing.IGNORE_COMBINATION_WITH_SAME_DEFAULLT;
         double e_value_max = surfacing.MAX_E_VALUE_DEFAULT;
@@ -631,6 +349,22 @@ public class surfacing {
         if ( dissallowed_options.length() > 0 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown option(s): " + dissallowed_options );
         }
+        boolean use_last_in_fitch_parsimony = false;
+        if ( cla.isOptionSet( USE_LAST_IN_FITCH_OPTION ) ) {
+            use_last_in_fitch_parsimony = true;
+        }
+        boolean write_to_nexus = false;
+        if ( cla.isOptionSet( WRITE_TO_NEXUS_OPTION ) ) {
+            write_to_nexus = true;
+        }
+        boolean perform_dc_regain_proteins_stats = false;
+        if ( cla.isOptionSet( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION ) ) {
+            perform_dc_regain_proteins_stats = true;
+        }
+        boolean da_analysis = false;
+        if ( cla.isOptionSet( DA_ANALYSIS_OPTION ) ) {
+            da_analysis = true;
+        }
         boolean output_binary_domain_combinationsfor_graph_analysis = false;
         if ( cla.isOptionSet( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS ) ) {
             output_binary_domain_combinationsfor_graph_analysis = true;
@@ -673,6 +407,10 @@ public class surfacing {
         if ( cla.isOptionSet( surfacing.IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION ) ) {
             ignore_species_specific_domains = true;
         }
+        if ( !cla.isOptionValueSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
+            ForesterUtil.fatalError( surfacing.PRG_NAME, "no input species tree file given: "
+                    + surfacing.INPUT_SPECIES_TREE_OPTION + "=<file>" );
+        }
         File output_file = null;
         if ( cla.isOptionSet( surfacing.OUTPUT_FILE_OPTION ) ) {
             if ( !cla.isOptionValueSet( surfacing.OUTPUT_FILE_OPTION ) ) {
@@ -697,7 +435,7 @@ public class surfacing {
                         + error );
             }
             try {
-                final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, " " );
+                final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, ' ' );
                 individual_score_cutoffs = scores_table.getColumnsAsMapDouble( 0, 1 );
             }
             catch ( final IOException e ) {
@@ -784,35 +522,27 @@ public class surfacing {
         final List<String> plus_minus_analysis_high_copy_target_species = new ArrayList<String>();
         final List<String> plus_minus_analysis_high_low_copy_species = new ArrayList<String>();
         final List<Object> plus_minus_analysis_numbers = new ArrayList<Object>();
-        processPlusMinusAnalysisOption( cla,
-                                        plus_minus_analysis_high_copy_base_species,
-                                        plus_minus_analysis_high_copy_target_species,
-                                        plus_minus_analysis_high_low_copy_species,
-                                        plus_minus_analysis_numbers );
-        File input_files_file = null;
-        String[] input_file_names_from_file = null;
-        if ( cla.isOptionSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
-            if ( !cla.isOptionValueSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input files file: -"
-                        + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
-            }
-            input_files_file = new File( cla.getOptionValue( surfacing.INPUT_FILES_FROM_FILE_OPTION ) );
-            final String msg = ForesterUtil.isReadableFile( input_files_file );
+        SurfacingUtil.processPlusMinusAnalysisOption( cla,
+                                                      plus_minus_analysis_high_copy_base_species,
+                                                      plus_minus_analysis_high_copy_target_species,
+                                                      plus_minus_analysis_high_low_copy_species,
+                                                      plus_minus_analysis_numbers );
+        File input_genomes_file = null;
+        if ( cla.isOptionSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
+            if ( !cla.isOptionValueSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
+                ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input genomes file: -"
+                        + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
+            }
+            input_genomes_file = new File( cla.getOptionValue( surfacing.INPUT_GENOMES_FILE_OPTION ) );
+            final String msg = ForesterUtil.isReadableFile( input_genomes_file );
             if ( !ForesterUtil.isEmpty( msg ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + input_files_file + "\": " + msg );
-            }
-            try {
-                input_file_names_from_file = ForesterUtil.file2array( input_files_file );
-            }
-            catch ( final IOException e ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read from \"" + input_files_file + "\": " + e );
+                ForesterUtil
+                        .fatalError( surfacing.PRG_NAME, "can not read from \"" + input_genomes_file + "\": " + msg );
             }
         }
-        if ( ( cla.getNumberOfNames() < 1 )
-                && ( ( input_file_names_from_file == null ) || ( input_file_names_from_file.length < 1 ) ) ) {
-            ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                     "No hmmpfam output file indicated is input: use comand line directly or "
-                                             + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
+        else {
+            ForesterUtil.fatalError( surfacing.PRG_NAME, "no input genomes file given: "
+                    + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
         }
         DomainSimilarity.DomainSimilarityScoring scoring = SCORING_DEFAULT;
         if ( cla.isOptionSet( surfacing.SCORING_OPTION ) ) {
@@ -853,7 +583,6 @@ public class surfacing {
         double output_list_of_all_proteins_per_domain_e_value_max = -1;
         if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS ) ) {
             output_protein_lists_for_all_domains = true;
-            //
             if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION ) ) {
                 try {
                     output_list_of_all_proteins_per_domain_e_value_max = cla
@@ -863,7 +592,6 @@ public class surfacing {
                     ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for per domain E-value maximum" );
                 }
             }
-            //
         }
         Detailedness detailedness = DETAILEDNESS_DEFAULT;
         if ( cla.isOptionSet( surfacing.DETAILEDNESS_OPTION ) ) {
@@ -988,8 +716,6 @@ public class surfacing {
                 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.HTML;
             }
             else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML ) ) {
-                // domain_similarity_print_option =
-                // DomainSimilarity.PRINT_OPTION.SIMPLE_HTML;
                 ForesterUtil.fatalError( surfacing.PRG_NAME, "simple HTML output not implemented yet :(" );
             }
             else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED ) ) {
@@ -1032,13 +758,7 @@ public class surfacing {
                         + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
             }
         }
-        String[][] input_file_properties = null;
-        if ( input_file_names_from_file != null ) {
-            input_file_properties = surfacing.processInputFileNames( input_file_names_from_file );
-        }
-        else {
-            input_file_properties = surfacing.processInputFileNames( cla.getNames() );
-        }
+        final String[][] input_file_properties = SurfacingUtil.processInputGenomesFile( input_genomes_file );
         final int number_of_genomes = input_file_properties.length;
         if ( number_of_genomes < 2 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot analyze less than two files" );
@@ -1048,12 +768,12 @@ public class surfacing {
                     + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
                     + "=<suffix> to turn on pairwise analyses with less than three input files" );
         }
-        checkWriteabilityForPairwiseComparisons( domain_similarity_print_option,
-                                                 input_file_properties,
-                                                 automated_pairwise_comparison_suffix,
-                                                 out_dir );
+        SurfacingUtil.checkWriteabilityForPairwiseComparisons( domain_similarity_print_option,
+                                                               input_file_properties,
+                                                               automated_pairwise_comparison_suffix,
+                                                               out_dir );
         for( int i = 0; i < number_of_genomes; i++ ) {
-            File dcc_outfile = new File( input_file_properties[ i ][ 0 ]
+            File dcc_outfile = new File( input_file_properties[ i ][ 1 ]
                     + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
             if ( out_dir != null ) {
                 dcc_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + dcc_outfile );
@@ -1061,7 +781,7 @@ public class surfacing {
             SurfacingUtil.checkForOutputFileWriteability( dcc_outfile );
         }
         File pfam_to_go_file = null;
-        Map<DomainId, List<GoId>> domain_id_to_go_ids_map = null;
+        Map<String, List<GoId>> domain_id_to_go_ids_map = null;
         int domain_id_to_go_ids_count = 0;
         if ( cla.isOptionSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
             if ( !cla.isOptionValueSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
@@ -1159,67 +879,9 @@ public class surfacing {
                 && ( number_of_genomes > 2 ) ) {
             domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
         }
-        boolean jacknifed_distances = false;
-        int jacknife_resamplings = JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT;
-        double jacknife_ratio = JACKNIFE_RATIO_DEFAULT;
-        long random_seed = JACKNIFE_RANDOM_SEED_DEFAULT;
-        if ( cla.isOptionSet( surfacing.JACKNIFE_OPTION ) ) {
-            if ( ( number_of_genomes < 3 ) || !perform_pwc ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use jacknife resampling analysis (-"
-                        + surfacing.JACKNIFE_OPTION + "[=<number of resamplings>]) without pairwise analyses ("
-                        + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
-                        + "=<suffix for pairwise comparison output files>)" );
-            }
-            jacknifed_distances = true;
-            if ( cla.isOptionHasAValue( surfacing.JACKNIFE_OPTION ) ) {
-                try {
-                    jacknife_resamplings = cla.getOptionValueAsInt( surfacing.JACKNIFE_OPTION );
-                }
-                catch ( final IOException e ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for number of resamplings" );
-                }
-                if ( jacknife_resamplings < 2 ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use less than 2 resamplings" );
-                }
-            }
-            if ( cla.isOptionSet( surfacing.JACKNIFE_RATIO_OPTION )
-                    && cla.isOptionHasAValue( surfacing.JACKNIFE_RATIO_OPTION ) ) {
-                try {
-                    jacknife_ratio = cla.getOptionValueAsDouble( surfacing.JACKNIFE_RATIO_OPTION );
-                }
-                catch ( final IOException e ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for jacknife ratio" );
-                }
-                if ( ( jacknife_ratio <= 0.0 ) || ( jacknife_ratio >= 1.0 ) ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use illegal value for jacknife ratio: "
-                            + jacknife_ratio );
-                }
-            }
-            if ( cla.isOptionSet( surfacing.JACKNIFE_RANDOM_SEED_OPTION )
-                    && cla.isOptionHasAValue( surfacing.JACKNIFE_RANDOM_SEED_OPTION ) ) {
-                try {
-                    random_seed = cla.getOptionValueAsLong( surfacing.JACKNIFE_RANDOM_SEED_OPTION );
-                }
-                catch ( final IOException e ) {
-                    ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for random generator seed" );
-                }
-            }
-        }
-        //        boolean infer_species_trees = false;
-        //        if ( cla.isOptionSet( surfacing.INFER_SPECIES_TREES_OPTION ) ) {
-        //            if ( ( output_file == null ) || ( number_of_genomes < 3 )
-        //                    || ForesterUtil.isEmpty( automated_pairwise_comparison_suffix ) ) {
-        //                ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer species trees (-"
-        //                        + surfacing.INFER_SPECIES_TREES_OPTION + " without pairwise analyses ("
-        //                        + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
-        //                        + "=<suffix for pairwise comparison output files>)" );
-        //            }
-        //            infer_species_trees = true;
-        //        }
         File[] intree_files = null;
         Phylogeny[] intrees = null;
         if ( cla.isOptionSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
-            // TODO FIXME if jacknife.... maybe not
             if ( number_of_genomes < 3 ) {
                 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer gains and losses on input species trees (-"
                         + surfacing.INPUT_SPECIES_TREE_OPTION + " without pairwise analyses ("
@@ -1243,8 +905,9 @@ public class surfacing {
                 intree_files = new File[ 1 ];
                 intree_files[ 0 ] = new File( intrees_str );
             }
-            intrees = getIntrees( intree_files, number_of_genomes, input_file_properties );
+            intrees = SurfacingUtil.obtainAndPreProcessIntrees( intree_files, number_of_genomes, input_file_properties );
         }
+        final Phylogeny intree_0_orig = SurfacingUtil.obtainFirstIntree( intree_files[ 0 ] );
         long random_number_seed_for_fitch_parsimony = 0l;
         boolean radomize_fitch_parsimony = false;
         if ( cla.isOptionSet( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION ) ) {
@@ -1261,21 +924,21 @@ public class surfacing {
             }
             radomize_fitch_parsimony = true;
         }
-        SortedSet<DomainId> filter = null;
+        SortedSet<String> filter = null;
         if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
                 || ( negative_domains_filter_file != null ) ) {
-            filter = new TreeSet<DomainId>();
+            filter = new TreeSet<String>();
             if ( positive_filter_file != null ) {
-                processFilter( positive_filter_file, filter );
+                SurfacingUtil.processFilter( positive_filter_file, filter );
             }
             else if ( negative_filter_file != null ) {
-                processFilter( negative_filter_file, filter );
+                SurfacingUtil.processFilter( negative_filter_file, filter );
             }
             else if ( negative_domains_filter_file != null ) {
-                processFilter( negative_domains_filter_file, filter );
+                SurfacingUtil.processFilter( negative_domains_filter_file, filter );
             }
         }
-        Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps = null;
+        Map<String, Set<String>>[] domain_id_to_secondary_features_maps = null;
         File[] secondary_features_map_files = null;
         final File domain_lengths_analysis_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
                 + DOMAIN_LENGTHS_ANALYSIS_SUFFIX );
@@ -1332,12 +995,8 @@ public class surfacing {
                                              + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
         }
         System.out.println( "Output directory            : " + out_dir );
-        if ( input_file_names_from_file != null ) {
-            System.out.println( "Input files names from      : " + input_files_file + " ["
-                    + input_file_names_from_file.length + " input files]" );
-            html_desc.append( "<tr><td>Input files names from:</td><td>" + input_files_file + " ["
-                    + input_file_names_from_file.length + " input files]</td></tr>" + nl );
-        }
+        System.out.println( "Input genomes from          : " + input_genomes_file );
+        html_desc.append( "<tr><td>Input genomes from:</td><td>" + input_genomes_file + "</td></tr>" + nl );
         if ( positive_filter_file != null ) {
             final int filter_size = filter.size();
             System.out.println( "Positive protein filter     : " + positive_filter_file + " [" + filter_size
@@ -1427,19 +1086,34 @@ public class surfacing {
                     + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) + "</td></tr>"
                     + nl );
         }
+        System.out.println( "Use last in Fitch parimony  : " + use_last_in_fitch_parsimony );
+        html_desc.append( "<tr><td>Use last in Fitch parimon:</td><td>" + use_last_in_fitch_parsimony + "</td></tr>"
+                + nl );
+        System.out.println( "Write to Nexus files        : " + write_to_nexus );
+        html_desc.append( "<tr><td>Write to Nexus files:</td><td>" + write_to_nexus + "</td></tr>" + nl );
+        System.out.println( "DC regain prot stats        : " + perform_dc_regain_proteins_stats );
+        html_desc.append( "<tr><td>DC regain prot stats:</td><td>" + perform_dc_regain_proteins_stats + "</td></tr>"
+                + nl );
+        System.out.println( "DA analysis                 : " + da_analysis );
+        html_desc.append( "<tr><td>DA analysis :</td><td>" + da_analysis + "</td></tr>" + nl );
         System.out.print( "Domain counts sort order    : " );
+        html_desc.append( "<tr><td>Domain counts sort order:</td><td>" );
         switch ( dc_sort_order ) {
             case ALPHABETICAL_KEY_ID:
                 System.out.println( "alphabetical" );
+                html_desc.append( "alphabetical" + "</td></tr>" + nl );
                 break;
             case KEY_DOMAIN_COUNT:
                 System.out.println( "domain count" );
+                html_desc.append( "domain count" + "</td></tr>" + nl );
                 break;
             case KEY_DOMAIN_PROTEINS_COUNT:
                 System.out.println( "domain proteins count" );
+                html_desc.append( "domain proteins count" + "</td></tr>" + nl );
                 break;
             case COMBINATIONS_COUNT:
                 System.out.println( "domain combinations count" );
+                html_desc.append( "domain combinations count" + "</td></tr>" + nl );
                 break;
             default:
                 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for dc sort order" );
@@ -1603,19 +1277,6 @@ public class surfacing {
             }
             System.out.println();
             html_desc.append( "</td></tr>" + nl );
-            if ( jacknifed_distances ) {
-                html_desc.append( "<tr><td>Jacknife:</td><td>" + jacknife_resamplings + " resamplings</td></tr>" + nl );
-                html_desc.append( "<tr><td>Jacknife ratio:</td><td>" + ForesterUtil.round( jacknife_ratio, 2 )
-                        + "</td></tr>" + nl );
-                html_desc.append( "<tr><td>Jacknife random number seed:</td><td>" + random_seed + "</td></tr>" + nl );
-                System.out.println( "  Jacknife                  : " + jacknife_resamplings + " resamplings" );
-                System.out.println( "    Ratio                   : " + ForesterUtil.round( jacknife_ratio, 2 ) );
-                System.out.println( "    Random number seed      : " + random_seed );
-            }
-            //                if ( infer_species_trees ) {
-            //                    html_desc.append( "<tr><td>Infer species trees:</td><td>true</td></tr>" + nl );
-            //                    System.out.println( "  Infer species trees       : true" );
-            //                }
             if ( ( intrees != null ) && ( intrees.length > 0 ) ) {
                 for( final File intree_file : intree_files ) {
                     html_desc.append( "<tr><td>Intree for gain/loss parsimony analysis:</td><td>" + intree_file
@@ -1637,8 +1298,8 @@ public class surfacing {
                     if ( VERBOSE ) {
                         System.out.println();
                         System.out.println( "Domain ids to secondary features map:" );
-                        for( final DomainId domain_id : domain_id_to_secondary_features_maps[ i ].keySet() ) {
-                            System.out.print( domain_id.getId() );
+                        for( final String domain_id : domain_id_to_secondary_features_maps[ i ].keySet() ) {
+                            System.out.print( domain_id );
                             System.out.print( " => " );
                             for( final String sec : domain_id_to_secondary_features_maps[ i ].get( domain_id ) ) {
                                 System.out.print( sec );
@@ -1654,7 +1315,7 @@ public class surfacing {
         html_desc.append( "<tr><td>Command line:</td><td>\n" + cla.getCommandLineArgsAsString() + "\n</td></tr>" + nl );
         System.out.println( "Command line                : " + cla.getCommandLineArgsAsString() );
         BufferedWriter[] query_domains_writer_ary = null;
-        List<DomainId>[] query_domain_ids_array = null;
+        List<String>[] query_domain_ids_array = null;
         if ( query_domain_ids != null ) {
             final String[] query_domain_ids_str_array = query_domain_ids.split( "#" );
             query_domain_ids_array = new ArrayList[ query_domain_ids_str_array.length ];
@@ -1662,9 +1323,9 @@ public class surfacing {
             for( int i = 0; i < query_domain_ids_str_array.length; i++ ) {
                 String query_domain_ids_str = query_domain_ids_str_array[ i ];
                 final String[] query_domain_ids_str_ary = query_domain_ids_str.split( "~" );
-                final List<DomainId> query = new ArrayList<DomainId>();
+                final List<String> query = new ArrayList<String>();
                 for( final String element : query_domain_ids_str_ary ) {
-                    query.add( new DomainId( element ) );
+                    query.add( element );
                 }
                 query_domain_ids_array[ i ] = query;
                 query_domain_ids_str = query_domain_ids_str.replace( '~', '_' );
@@ -1689,8 +1350,8 @@ public class surfacing {
         if ( need_protein_lists_per_species ) {
             protein_lists_per_species = new TreeMap<Species, List<Protein>>();
         }
-        final List<GenomeWideCombinableDomains> gwcd_list = new ArrayList<GenomeWideCombinableDomains>( number_of_genomes );
-        final SortedSet<DomainId> all_domains_encountered = new TreeSet<DomainId>();
+        List<GenomeWideCombinableDomains> gwcd_list = new ArrayList<GenomeWideCombinableDomains>( number_of_genomes );
+        final SortedSet<String> all_domains_encountered = new TreeSet<String>();
         final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered = new TreeSet<BinaryDomainCombination>();
         List<BinaryDomainCombination> all_bin_domain_combinations_gained_fitch = null;
         List<BinaryDomainCombination> all_bin_domain_combinations_lost_fitch = null;
@@ -1698,7 +1359,7 @@ public class surfacing {
             all_bin_domain_combinations_gained_fitch = new ArrayList<BinaryDomainCombination>();
             all_bin_domain_combinations_lost_fitch = new ArrayList<BinaryDomainCombination>();
         }
-        final DomainLengthsTable domain_lengths_table = new DomainLengthsTable();
+        DomainLengthsTable domain_lengths_table = new DomainLengthsTable();
         final File per_genome_domain_promiscuity_statistics_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR
                 + output_file + D_PROMISCUITY_FILE_SUFFIX );
         BufferedWriter per_genome_domain_promiscuity_statistics_writer = null;
@@ -1743,7 +1404,7 @@ public class surfacing {
         BufferedWriter domains_per_potein_stats_writer = null;
         try {
             domains_per_potein_stats_writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR
-                    + output_file + "__domains_per_potein_stats.txt" ) );
+                    + output_file + "_domains_per_potein_stats.txt" ) );
             domains_per_potein_stats_writer.write( "Genome" );
             domains_per_potein_stats_writer.write( "\t" );
             domains_per_potein_stats_writer.write( "Mean" );
@@ -1765,17 +1426,21 @@ public class surfacing {
         Map<String, DescriptiveStatistics> protein_length_stats_by_dc = null;
         Map<String, DescriptiveStatistics> domain_number_stats_by_dc = null;
         final Map<String, DescriptiveStatistics> domain_length_stats_by_domain = new HashMap<String, DescriptiveStatistics>();
-        if ( PERFORM_DC_REGAIN_PROTEINS_STATS ) {
+        if ( perform_dc_regain_proteins_stats ) {
             protein_length_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
             domain_number_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
         }
         // Main loop:
+        final SortedMap<String, Set<String>> distinct_domain_architecutures_per_genome = new TreeMap<String, Set<String>>();
+        final SortedMap<String, Integer> distinct_domain_architecuture_counts = new TreeMap<String, Integer>();
         for( int i = 0; i < number_of_genomes; ++i ) {
             System.out.println();
             System.out.println( ( i + 1 ) + "/" + number_of_genomes );
-            log( ( i + 1 ) + "/" + number_of_genomes, log_writer );
-            System.out.println( "Processing                                     : " + input_file_properties[ i ][ 0 ] );
-            log( "Genome                                         : " + input_file_properties[ i ][ 0 ], log_writer );
+            SurfacingUtil.log( ( i + 1 ) + "/" + number_of_genomes, log_writer );
+            System.out.println( "Processing                                     : " + input_file_properties[ i ][ 1 ]
+                    + " [" + input_file_properties[ i ][ 0 ] + "]" );
+            SurfacingUtil.log( "Genome                                         : " + input_file_properties[ i ][ 1 ]
+                    + " [" + input_file_properties[ i ][ 0 ] + "]", log_writer );
             HmmscanPerDomainTableParser parser = null;
             INDIVIDUAL_SCORE_CUTOFF ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF.NONE;
             if ( individual_score_cutoffs != null ) {
@@ -1837,82 +1502,86 @@ public class surfacing {
             }
             final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
             protein_coverage_stats.addValue( coverage );
+            int distinct_das = -1;
+            if ( da_analysis ) {
+                final String genome = input_file_properties[ i ][ 0 ];
+                distinct_das = SurfacingUtil.storeDomainArchitectures( genome,
+                                                                       distinct_domain_architecutures_per_genome,
+                                                                       protein_list,
+                                                                       distinct_domain_architecuture_counts );
+            }
             System.out.println( "Number of proteins encountered                 : " + parser.getProteinsEncountered() );
-            log( "Number of proteins encountered                 : " + parser.getProteinsEncountered(), log_writer );
+            SurfacingUtil.log( "Number of proteins encountered                 : " + parser.getProteinsEncountered(),
+                               log_writer );
             System.out.println( "Number of proteins stored                      : " + protein_list.size() );
-            log( "Number of proteins stored                      : " + protein_list.size(), log_writer );
+            SurfacingUtil.log( "Number of proteins stored                      : " + protein_list.size(), log_writer );
             System.out.println( "Coverage                                       : "
                     + ForesterUtil.roundToInt( 100.0 * coverage ) + "%" );
-            log( "Coverage                                       : " + ForesterUtil.roundToInt( 100.0 * coverage )
-                    + "%", log_writer );
+            SurfacingUtil.log( "Coverage                                       : "
+                                       + ForesterUtil.roundToInt( 100.0 * coverage ) + "%",
+                               log_writer );
             System.out.println( "Domains encountered                            : " + parser.getDomainsEncountered() );
-            log( "Domains encountered                            : " + parser.getDomainsEncountered(), log_writer );
+            SurfacingUtil.log( "Domains encountered                            : " + parser.getDomainsEncountered(),
+                               log_writer );
             System.out.println( "Domains stored                                 : " + parser.getDomainsStored() );
-            log( "Domains stored                                 : " + parser.getDomainsStored(), log_writer );
+            SurfacingUtil.log( "Domains stored                                 : " + parser.getDomainsStored(),
+                               log_writer );
             System.out.println( "Distinct domains stored                        : "
                     + parser.getDomainsStoredSet().size() );
-            log( "Distinct domains stored                        : " + parser.getDomainsStoredSet().size(), log_writer );
+            SurfacingUtil.log( "Distinct domains stored                        : "
+                    + parser.getDomainsStoredSet().size(), log_writer );
             System.out.println( "Domains ignored due to individual score cutoffs: "
                     + parser.getDomainsIgnoredDueToIndividualScoreCutoff() );
-            log( "Domains ignored due to individual score cutoffs: "
-                         + parser.getDomainsIgnoredDueToIndividualScoreCutoff(),
-                 log_writer );
+            SurfacingUtil.log( "Domains ignored due to individual score cutoffs: "
+                                       + parser.getDomainsIgnoredDueToIndividualScoreCutoff(),
+                               log_writer );
             System.out.println( "Domains ignored due to E-value                 : "
                     + parser.getDomainsIgnoredDueToEval() );
-            log( "Domains ignored due to E-value                 : " + parser.getDomainsIgnoredDueToEval(), log_writer );
+            SurfacingUtil.log( "Domains ignored due to E-value                 : "
+                                       + parser.getDomainsIgnoredDueToEval(),
+                               log_writer );
             System.out.println( "Domains ignored due to DUF designation         : "
                     + parser.getDomainsIgnoredDueToDuf() );
-            log( "Domains ignored due to DUF designation         : " + parser.getDomainsIgnoredDueToDuf(), log_writer );
+            SurfacingUtil
+                    .log( "Domains ignored due to DUF designation         : " + parser.getDomainsIgnoredDueToDuf(),
+                          log_writer );
             if ( ignore_virus_like_ids ) {
                 System.out.println( "Domains ignored due virus like ids             : "
                         + parser.getDomainsIgnoredDueToVirusLikeIds() );
-                log( "Domains ignored due virus like ids             : " + parser.getDomainsIgnoredDueToVirusLikeIds(),
-                     log_writer );
+                SurfacingUtil.log( "Domains ignored due virus like ids             : "
+                                           + parser.getDomainsIgnoredDueToVirusLikeIds(),
+                                   log_writer );
             }
             System.out.println( "Domains ignored due negative domain filter     : "
                     + parser.getDomainsIgnoredDueToNegativeDomainFilter() );
-            log( "Domains ignored due negative domain filter     : "
-                         + parser.getDomainsIgnoredDueToNegativeDomainFilter(),
-                 log_writer );
+            SurfacingUtil.log( "Domains ignored due negative domain filter     : "
+                                       + parser.getDomainsIgnoredDueToNegativeDomainFilter(),
+                               log_writer );
             System.out.println( "Domains ignored due to overlap                 : "
                     + parser.getDomainsIgnoredDueToOverlap() );
-            log( "Domains ignored due to overlap                 : " + parser.getDomainsIgnoredDueToOverlap(),
-                 log_writer );
+            SurfacingUtil.log( "Domains ignored due to overlap                 : "
+                                       + parser.getDomainsIgnoredDueToOverlap(),
+                               log_writer );
             if ( negative_filter_file != null ) {
                 System.out.println( "Proteins ignored due to negative filter        : "
                         + parser.getProteinsIgnoredDueToFilter() );
-                log( "Proteins ignored due to negative filter        : " + parser.getProteinsIgnoredDueToFilter(),
-                     log_writer );
+                SurfacingUtil.log( "Proteins ignored due to negative filter        : "
+                                           + parser.getProteinsIgnoredDueToFilter(),
+                                   log_writer );
             }
             if ( positive_filter_file != null ) {
                 System.out.println( "Proteins ignored due to positive filter        : "
                         + parser.getProteinsIgnoredDueToFilter() );
-                log( "Proteins ignored due to positive filter        : " + parser.getProteinsIgnoredDueToFilter(),
-                     log_writer );
+                SurfacingUtil.log( "Proteins ignored due to positive filter        : "
+                                           + parser.getProteinsIgnoredDueToFilter(),
+                                   log_writer );
             }
-            System.out.println( "Time for processing                            : " + parser.getTime() + "ms" );
-            log( "", log_writer );
-            html_desc.append( "<tr><td>" + input_file_properties[ i ][ 0 ] + " [species: "
-                    + input_file_properties[ i ][ 1 ] + "]" + ":</td><td>domains analyzed: "
-                    + parser.getDomainsStored() + "; domains ignored: [ind score cutoffs: "
-                    + parser.getDomainsIgnoredDueToIndividualScoreCutoff() + "] [E-value cutoff: "
-                    + parser.getDomainsIgnoredDueToEval() + "] [DUF: " + parser.getDomainsIgnoredDueToDuf()
-                    + "] [virus like ids: " + parser.getDomainsIgnoredDueToVirusLikeIds()
-                    + "] [negative domain filter: " + parser.getDomainsIgnoredDueToNegativeDomainFilter()
-                    + "] [overlap: " + parser.getDomainsIgnoredDueToOverlap() + "]" );
-            if ( negative_filter_file != null ) {
-                html_desc.append( "; proteins ignored due to negative filter: "
-                        + parser.getProteinsIgnoredDueToFilter() );
-            }
-            if ( positive_filter_file != null ) {
-                html_desc.append( "; proteins ignored due to positive filter: "
-                        + parser.getProteinsIgnoredDueToFilter() );
+            if ( da_analysis ) {
+                System.out.println( "Distinct domain architectures stored           : " + distinct_das );
+                SurfacingUtil.log( "Distinct domain architectures stored           : " + distinct_das, log_writer );
             }
-            html_desc.append( "</td></tr>" + nl );
-            // domain_partner_counts_array[ i ] =
-            // Methods.getDomainPartnerCounts( protein_domain_collections_array[
-            // i ],
-            // false, input_file_properties[ i ][ 1 ] );
+            System.out.println( "Time for processing                            : " + parser.getTime() + "ms" );
+            SurfacingUtil.log( "", log_writer );
             try {
                 int count = 0;
                 for( final Protein protein : protein_list ) {
@@ -1939,32 +1608,34 @@ public class surfacing {
                                                         domains_which_are_sometimes_single_sometimes_not,
                                                         domains_which_never_single,
                                                         domains_per_potein_stats_writer );
-            gwcd_list.add( BasicGenomeWideCombinableDomains
-                    .createInstance( protein_list,
-                                     ignore_combination_with_same,
-                                     new BasicSpecies( input_file_properties[ i ][ 1 ] ),
-                                     domain_id_to_go_ids_map,
-                                     dc_type,
-                                     protein_length_stats_by_dc,
-                                     domain_number_stats_by_dc ) );
             domain_lengths_table.addLengths( protein_list );
-            if ( gwcd_list.get( i ).getSize() > 0 ) {
-                SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
-                                                                 out_dir,
-                                                                 per_genome_domain_promiscuity_statistics_writer,
-                                                                 gwcd_list.get( i ),
-                                                                 i,
-                                                                 dc_sort_order );
-                if ( output_binary_domain_combinationsfor_graph_analysis ) {
-                    SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
-                                                                                     out_dir,
-                                                                                     gwcd_list.get( i ),
-                                                                                     i,
-                                                                                     dc_sort_order );
+            if ( !da_analysis ) {
+                gwcd_list.add( BasicGenomeWideCombinableDomains
+                        .createInstance( protein_list,
+                                         ignore_combination_with_same,
+                                         new BasicSpecies( input_file_properties[ i ][ 1 ] ),
+                                         domain_id_to_go_ids_map,
+                                         dc_type,
+                                         protein_length_stats_by_dc,
+                                         domain_number_stats_by_dc ) );
+                if ( gwcd_list.get( i ).getSize() > 0 ) {
+                    SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
+                                                                     out_dir,
+                                                                     per_genome_domain_promiscuity_statistics_writer,
+                                                                     gwcd_list.get( i ),
+                                                                     i,
+                                                                     dc_sort_order );
+                    if ( output_binary_domain_combinationsfor_graph_analysis ) {
+                        SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
+                                                                                         out_dir,
+                                                                                         gwcd_list.get( i ),
+                                                                                         i,
+                                                                                         dc_sort_order );
+                    }
+                    SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
+                    SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
+                                                                      all_bin_domain_combinations_encountered );
                 }
-                SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
-                SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
-                                                                  all_bin_domain_combinations_encountered );
             }
             if ( query_domains_writer_ary != null ) {
                 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
@@ -1995,6 +1666,18 @@ public class surfacing {
         ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
                 + per_genome_domain_promiscuity_statistics_file );
         //
+        if ( da_analysis ) {
+            SurfacingUtil.performDomainArchitectureAnalysis( distinct_domain_architecutures_per_genome,
+                                                             distinct_domain_architecuture_counts,
+                                                             10,
+                                                             new File( out_dir.toString() + "/" + output_file
+                                                                     + "_DA_counts.txt" ),
+                                                             new File( out_dir.toString() + "/" + output_file
+                                                                     + "_unique_DAs.txt" ) );
+            distinct_domain_architecutures_per_genome.clear();
+            distinct_domain_architecuture_counts.clear();
+            System.gc();
+        }
         try {
             domains_per_potein_stats_writer.write( "ALL" );
             domains_per_potein_stats_writer.write( "\t" );
@@ -2011,15 +1694,15 @@ public class surfacing {
             domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMax() + "" );
             domains_per_potein_stats_writer.write( "\n" );
             domains_per_potein_stats_writer.close();
-            printOutPercentageOfMultidomainProteins( all_genomes_domains_per_potein_histo, log_writer );
+            SurfacingUtil.printOutPercentageOfMultidomainProteins( all_genomes_domains_per_potein_histo, log_writer );
             ForesterUtil.map2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
+                    + "_all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
             ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
+                    + "_domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
             ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
+                    + "_domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
             ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
-                    + "__domains_always_combined.txt" ), domains_which_never_single, "\n" );
+                    + "_domains_always_combined.txt" ), domains_which_never_single, "\n" );
             ForesterUtil.programMessage( PRG_NAME,
                                          "Average of proteins with a least one domain assigned: "
                                                  + ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
@@ -2027,10 +1710,14 @@ public class surfacing {
             ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: "
                     + ( 100 * protein_coverage_stats.getMin() ) + "%-" + ( 100 * protein_coverage_stats.getMax() )
                     + "%" );
-            log( "Average of prot with a least one dom assigned  : " + ( 100 * protein_coverage_stats.arithmeticMean() )
-                    + "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
-            log( "Range of prot with a least one dom assigned    : " + ( 100 * protein_coverage_stats.getMin() ) + "%-"
-                    + ( 100 * protein_coverage_stats.getMax() ) + "%", log_writer );
+            SurfacingUtil.log( "Average of prot with a least one dom assigned  : "
+                                       + ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
+                                       + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)",
+                               log_writer );
+            SurfacingUtil.log( "Range of prot with a least one dom assigned    : "
+                                       + ( 100 * protein_coverage_stats.getMin() ) + "%-"
+                                       + ( 100 * protein_coverage_stats.getMax() ) + "%",
+                               log_writer );
         }
         catch ( final IOException e2 ) {
             ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
@@ -2067,12 +1754,13 @@ public class surfacing {
             ForesterUtil.programMessage( PRG_NAME, "Wrote domain length data to: " + domain_lengths_analysis_outfile );
             System.out.println();
         }
+        domain_lengths_table = null;
         final long analysis_start_time = new Date().getTime();
         PairwiseDomainSimilarityCalculator pw_calc = null;
-        // double[] values_for_all_scores_histogram = null;
         final DomainSimilarityCalculator calc = new BasicDomainSimilarityCalculator( domain_similarity_sort_field,
                                                                                      sort_by_species_count_first,
-                                                                                     number_of_genomes == 2 );
+                                                                                     number_of_genomes == 2,
+                                                                                     CALC_SIMILARITY_SCORES );
         switch ( scoring ) {
             case COMBINATIONS:
                 pw_calc = new CombinationsBasedPairwiseDomainSimilarityCalculator();
@@ -2095,12 +1783,8 @@ public class surfacing {
                                         gwcd_list,
                                         ignore_domains_without_combs_in_all_spec,
                                         ignore_species_specific_domains );
-        SurfacingUtil.decoratePrintableDomainSimilarities( similarities,
-                                                           detailedness,
-                                                           go_annotation_output,
-                                                           go_id_to_term_map,
-                                                           go_namespace_limit );
-        DescriptiveStatistics pw_stats = null;
+        SurfacingUtil.decoratePrintableDomainSimilarities( similarities, detailedness );
+        final Map<String, Integer> tax_code_to_id_map = SurfacingUtil.createTaxCodeToIdMap( intrees[ 0 ] );
         try {
             String my_outfile = output_file.toString();
             Map<Character, Writer> split_writers = null;
@@ -2110,7 +1794,7 @@ public class surfacing {
                     my_outfile = my_outfile.substring( 0, my_outfile.length() - 5 );
                 }
                 split_writers = new HashMap<Character, Writer>();
-                createSplitWriters( out_dir, my_outfile, split_writers );
+                SurfacingUtil.createSplitWriters( out_dir, my_outfile, split_writers );
             }
             else if ( !my_outfile.endsWith( ".html" ) ) {
                 my_outfile += ".html";
@@ -2131,18 +1815,22 @@ public class surfacing {
                     + new java.text.SimpleDateFormat( "yyyy.MM.dd HH:mm:ss" ).format( new java.util.Date() )
                     + "</td></tr>" + nl );
             html_desc.append( "</table>" + nl );
-            pw_stats = SurfacingUtil
-                    .writeDomainSimilaritiesToFile( html_desc,
-                                                    new StringBuilder( number_of_genomes + " genomes" ),
-                                                    writer,
-                                                    split_writers,
-                                                    similarities,
-                                                    number_of_genomes == 2,
-                                                    species_order,
-                                                    domain_similarity_print_option,
-                                                    domain_similarity_sort_field,
-                                                    scoring,
-                                                    true );
+            final Writer simple_tab_writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR
+                    + my_outfile + ".tsv" ) );
+            SurfacingUtil.writeDomainSimilaritiesToFile( html_desc,
+                                                         new StringBuilder( number_of_genomes + " genomes" ),
+                                                         simple_tab_writer,
+                                                         writer,
+                                                         split_writers,
+                                                         similarities,
+                                                         number_of_genomes == 2,
+                                                         species_order,
+                                                         domain_similarity_print_option,
+                                                         scoring,
+                                                         true,
+                                                         tax_code_to_id_map,
+                                                         intree_0_orig );
+            simple_tab_writer.close();
             ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote main output (includes domain similarities) to: \""
                     + ( out_dir == null ? my_outfile : out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) + "\"" );
         }
@@ -2151,7 +1839,6 @@ public class surfacing {
                     + e.getMessage() + "]" );
         }
         System.out.println();
-        // values_for_all_scores_histogram = pw_stats.getDataAsDoubleArray();
         final Species[] species = new Species[ number_of_genomes ];
         for( int i = 0; i < number_of_genomes; ++i ) {
             species[ i ] = new BasicSpecies( input_file_properties[ i ][ 1 ] );
@@ -2179,7 +1866,10 @@ public class surfacing {
                                              surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX,
                                              surfacing.PRG_NAME,
                                              out_dir,
-                                             write_pwc_files );
+                                             write_pwc_files,
+                                             tax_code_to_id_map,
+                                             CALC_SIMILARITY_SCORES,
+                                             intree_0_orig );
             String matrix_output_file = new String( output_file.toString() );
             if ( matrix_output_file.indexOf( '.' ) > 1 ) {
                 matrix_output_file = matrix_output_file.substring( 0, matrix_output_file.indexOf( '.' ) );
@@ -2210,47 +1900,20 @@ public class surfacing {
             inferred_trees.add( nj_gd );
             inferred_trees.add( nj_bc );
             inferred_trees.add( nj_d );
-            if ( jacknifed_distances ) {
-                pwgc.performPairwiseComparisonsJacknifed( species,
-                                                          number_of_genomes,
-                                                          gwcd_list,
-                                                          true,
-                                                          jacknife_resamplings,
-                                                          jacknife_ratio,
-                                                          random_seed );
-                SurfacingUtil
-                        .writeMatrixToFile( new File( matrix_output_file
-                                                    + "_"
-                                                    + ForesterUtil.round( jacknife_ratio, 2 )
-                                                    + "_"
-                                                    + jacknife_resamplings
-                                                    + surfacing.MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ),
-                                            pwgc.getSharedBinaryCombinationsBasedDistances() );
-                SurfacingUtil
-                        .writeMatrixToFile( new File( matrix_output_file + "_" + ForesterUtil.round( jacknife_ratio, 2 )
-                                                    + "_" + jacknife_resamplings
-                                                    + surfacing.MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ),
-                                            pwgc.getSharedDomainsBasedDistances() );
-                //                if ( infer_species_trees ) {
-                //                    inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
-                //                            + INFERRED_SBC_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc
-                //                            .getSharedBinaryCombinationsBasedDistances() );
-                //                    inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
-                //                            + INFERRED_SD_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc.getSharedDomainsBasedDistances() );
-                //                }
-            }
         } // if ( ( output_file != null ) && ( number_of_genomes > 2 ) && !isEmpty( automated_pairwise_comparison_suffix ) )
         if ( ( out_dir != null ) && ( !perform_pwc ) ) {
             output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
         }
-        writePresentToNexus( output_file, positive_filter_file, filter, gwcd_list );
+        if ( write_to_nexus ) {
+            SurfacingUtil.writePresentToNexus( output_file, positive_filter_file, filter, gwcd_list );
+        }
         if ( ( ( intrees != null ) && ( intrees.length > 0 ) ) && ( number_of_genomes > 2 ) ) {
-            final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
-                                                                          e_value_max,
-                                                                          max_allowed_overlap,
-                                                                          no_engulfing_overlaps,
-                                                                          cutoff_scores_file,
-                                                                          dc_type );
+            final StringBuilder parameters_sb = SurfacingUtil.createParametersAsString( ignore_dufs,
+                                                                                        e_value_max,
+                                                                                        max_allowed_overlap,
+                                                                                        no_engulfing_overlaps,
+                                                                                        cutoff_scores_file,
+                                                                                        dc_type );
             String s = "_";
             if ( radomize_fitch_parsimony ) {
                 s += random_number_seed_for_fitch_parsimony + "_";
@@ -2278,12 +1941,15 @@ public class surfacing {
                                                         dc_type,
                                                         protein_length_stats_by_dc,
                                                         domain_number_stats_by_dc,
-                                                        domain_length_stats_by_domain );
+                                                        domain_length_stats_by_domain,
+                                                        tax_code_to_id_map,
+                                                        write_to_nexus,
+                                                        use_last_in_fitch_parsimony );
                 // Listing of all domain combinations gained is only done if only one input tree is used. 
                 if ( ( domain_id_to_secondary_features_maps != null )
                         && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
                     int j = 0;
-                    for( final Map<DomainId, Set<String>> domain_id_to_secondary_features_map : domain_id_to_secondary_features_maps ) {
+                    for( final Map<String, Set<String>> domain_id_to_secondary_features_map : domain_id_to_secondary_features_maps ) {
                         final Map<Species, MappingResults> mapping_results_map = new TreeMap<Species, MappingResults>();
                         final DomainParsimonyCalculator secondary_features_parsimony = DomainParsimonyCalculator
                                 .createInstance( intree, gwcd_list, domain_id_to_secondary_features_map );
@@ -2294,7 +1960,8 @@ public class surfacing {
                                                                                secondary_features_parsimony,
                                                                                intree,
                                                                                parameters_sb.toString(),
-                                                                               mapping_results_map );
+                                                                               mapping_results_map,
+                                                                               use_last_in_fitch_parsimony );
                         if ( i == 0 ) {
                             System.out.println();
                             System.out.println( "Mapping to secondary features:" );
@@ -2320,30 +1987,32 @@ public class surfacing {
             } // for( final Phylogeny intree : intrees ) {
         }
         if ( plus_minus_analysis_high_copy_base_species.size() > 0 ) {
-            executePlusMinusAnalysis( output_file,
-                                      plus_minus_analysis_high_copy_base_species,
-                                      plus_minus_analysis_high_copy_target_species,
-                                      plus_minus_analysis_high_low_copy_species,
-                                      gwcd_list,
-                                      protein_lists_per_species,
-                                      domain_id_to_go_ids_map,
-                                      go_id_to_term_map,
-                                      plus_minus_analysis_numbers );
+            SurfacingUtil.executePlusMinusAnalysis( output_file,
+                                                    plus_minus_analysis_high_copy_base_species,
+                                                    plus_minus_analysis_high_copy_target_species,
+                                                    plus_minus_analysis_high_low_copy_species,
+                                                    gwcd_list,
+                                                    protein_lists_per_species,
+                                                    domain_id_to_go_ids_map,
+                                                    go_id_to_term_map,
+                                                    plus_minus_analysis_numbers );
         }
         if ( output_protein_lists_for_all_domains ) {
-            writeProteinListsForAllSpecies( out_dir,
-                                            protein_lists_per_species,
-                                            gwcd_list,
-                                            output_list_of_all_proteins_per_domain_e_value_max );
+            SurfacingUtil.writeProteinListsForAllSpecies( out_dir,
+                                                          protein_lists_per_species,
+                                                          gwcd_list,
+                                                          output_list_of_all_proteins_per_domain_e_value_max );
         }
+        gwcd_list = null;
         if ( all_bin_domain_combinations_gained_fitch != null ) {
             try {
-                executeFitchGainsAnalysis( new File( output_file
-                                                   + surfacing.OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
-                                           all_bin_domain_combinations_gained_fitch,
-                                           all_domains_encountered.size(),
-                                           all_bin_domain_combinations_encountered,
-                                           true );
+                SurfacingUtil
+                        .executeFitchGainsAnalysis( new File( output_file
+                                                            + surfacing.OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
+                                                    all_bin_domain_combinations_gained_fitch,
+                                                    all_domains_encountered.size(),
+                                                    all_bin_domain_combinations_encountered,
+                                                    true );
             }
             catch ( final IOException e ) {
                 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
@@ -2351,12 +2020,13 @@ public class surfacing {
         }
         if ( all_bin_domain_combinations_lost_fitch != null ) {
             try {
-                executeFitchGainsAnalysis( new File( output_file
-                                                   + surfacing.OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
-                                           all_bin_domain_combinations_lost_fitch,
-                                           all_domains_encountered.size(),
-                                           all_bin_domain_combinations_encountered,
-                                           false );
+                SurfacingUtil
+                        .executeFitchGainsAnalysis( new File( output_file
+                                                            + surfacing.OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
+                                                    all_bin_domain_combinations_lost_fitch,
+                                                    all_domains_encountered.size(),
+                                                    all_bin_domain_combinations_encountered,
+                                                    false );
             }
             catch ( final IOException e ) {
                 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
@@ -2376,130 +2046,6 @@ public class surfacing {
         System.out.println();
     }
 
-    private static void createSplitWriters( final File out_dir,
-                                            final String my_outfile,
-                                            final Map<Character, Writer> split_writers ) throws IOException {
-        split_writers.put( 'a', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_A.html" ) ) );
-        split_writers.put( 'b', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_B.html" ) ) );
-        split_writers.put( 'c', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_C.html" ) ) );
-        split_writers.put( 'd', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_D.html" ) ) );
-        split_writers.put( 'e', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_E.html" ) ) );
-        split_writers.put( 'f', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_F.html" ) ) );
-        split_writers.put( 'g', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_G.html" ) ) );
-        split_writers.put( 'h', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_H.html" ) ) );
-        split_writers.put( 'i', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_I.html" ) ) );
-        split_writers.put( 'j', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_J.html" ) ) );
-        split_writers.put( 'k', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_K.html" ) ) );
-        split_writers.put( 'l', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_L.html" ) ) );
-        split_writers.put( 'm', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_M.html" ) ) );
-        split_writers.put( 'n', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_N.html" ) ) );
-        split_writers.put( 'o', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_O.html" ) ) );
-        split_writers.put( 'p', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_P.html" ) ) );
-        split_writers.put( 'q', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_Q.html" ) ) );
-        split_writers.put( 'r', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_R.html" ) ) );
-        split_writers.put( 's', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_S.html" ) ) );
-        split_writers.put( 't', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_T.html" ) ) );
-        split_writers.put( 'u', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_U.html" ) ) );
-        split_writers.put( 'v', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_V.html" ) ) );
-        split_writers.put( 'w', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_W.html" ) ) );
-        split_writers.put( 'x', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_X.html" ) ) );
-        split_writers.put( 'y', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_Y.html" ) ) );
-        split_writers.put( 'z', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_Z.html" ) ) );
-        split_writers.put( '0', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
-                + "_domains_0.html" ) ) );
-    }
-
-    private static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
-                                                                 final Writer log_writer ) {
-        int sum = 0;
-        for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
-            sum += entry.getValue();
-        }
-        final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
-        ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
-        log( "Percentage of multidomain proteins:            : " + percentage + "%", log_writer );
-    }
-
-    private static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
-                                                              final String[][] input_file_properties ) {
-        final String[] genomes = new String[ input_file_properties.length ];
-        for( int i = 0; i < input_file_properties.length; ++i ) {
-            if ( intree.getNodes( input_file_properties[ i ][ 1 ] ).size() > 1 ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
-                        + "] is not unique in input tree " + intree.getName() );
-            }
-            genomes[ i ] = input_file_properties[ i ][ 1 ];
-        }
-        //
-        final PhylogenyNodeIterator it = intree.iteratorPostorder();
-        while ( it.hasNext() ) {
-            final PhylogenyNode n = it.next();
-            if ( ForesterUtil.isEmpty( n.getName() ) ) {
-                if ( n.getNodeData().isHasTaxonomy()
-                        && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
-                    n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
-                }
-                else if ( n.getNodeData().isHasTaxonomy()
-                        && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
-                    n.setName( n.getNodeData().getTaxonomy().getScientificName() );
-                }
-                else if ( n.getNodeData().isHasTaxonomy()
-                        && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
-                    n.setName( n.getNodeData().getTaxonomy().getCommonName() );
-                }
-                else {
-                    ForesterUtil
-                            .fatalError( surfacing.PRG_NAME,
-                                         "node with no name, scientific name, common name, or taxonomy code present" );
-                }
-            }
-        }
-        //
-        final List<String> igns = PhylogenyMethods.deleteExternalNodesPositiveSelection( genomes, intree );
-        if ( igns.size() > 0 ) {
-            System.out.println( "Not using the following " + igns.size() + " nodes:" );
-            for( int i = 0; i < igns.size(); ++i ) {
-                System.out.println( " " + i + ": " + igns.get( i ) );
-            }
-            System.out.println( "--" );
-        }
-        for( final String[] input_file_propertie : input_file_properties ) {
-            try {
-                intree.getNode( input_file_propertie[ 1 ] );
-            }
-            catch ( final IllegalArgumentException e ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
-                        + "] not present/not unique in input tree" );
-            }
-        }
-    }
-
     private static void printHelp() {
         System.out.println();
         System.out.println( "Usage:" );
@@ -2551,17 +2097,6 @@ public class surfacing {
         System.out.println( surfacing.INPUT_SPECIES_TREE_OPTION
                 + ": species tree, to perform (Dollo, Fitch) parismony analyses" );
         System.out
-                .println( JACKNIFE_OPTION
-                        + ": perform jacknife resampling for domain and binary domain combination based distance matrices [default resamplings: "
-                        + JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT + "]" );
-        System.out.println( JACKNIFE_RATIO_OPTION + ": ratio for jacknife resampling [default: "
-                + JACKNIFE_RATIO_DEFAULT + "]" );
-        System.out.println( JACKNIFE_RANDOM_SEED_OPTION
-                + ": seed for random number generator for jacknife resampling [default: "
-                + JACKNIFE_RANDOM_SEED_DEFAULT + "]" );
-        //        System.out.println( surfacing.INFER_SPECIES_TREES_OPTION
-        //                + ": to infer NJ species trees based on shared domains/binary domain combinations" );
-        System.out
                 .println( surfacing.INPUT_SPECIES_TREE_OPTION
                         + "=<treefiles in phyloXML format, separated by #>: to infer domain/binary domain combination gains/losses on given species trees" );
         System.out.println( surfacing.FILTER_POSITIVE_OPTION
@@ -2570,7 +2105,7 @@ public class surfacing {
                 + "=<file>: to filter out proteins containing at least one domain listed in <file>" );
         System.out.println( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION
                 + "=<file>: to filter out (ignore) domains listed in <file>" );
-        System.out.println( surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>: to read input files from <file>" );
+        System.out.println( surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>: to read input files from <file>" );
         System.out
                 .println( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION
                         + "=<seed>: seed for random number generator for Fitch Parsimony analysis (type: long, default: no randomization - given a choice, prefer absence" );
@@ -2589,208 +2124,21 @@ public class surfacing {
         System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS + ": to output all proteins per domain" );
         System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION
                 + ": e value max per domain for output of all proteins per domain" );
+        System.out.println( surfacing.USE_LAST_IN_FITCH_OPTION + ": to use last in Fitch parsimony" );
+        System.out.println( surfacing.WRITE_TO_NEXUS_OPTION + ": to output in Nexus format" );
+        System.out.println( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION + ": to perform DC regain protein statistics" );
+        System.out.println( DA_ANALYSIS_OPTION + ": to do DA analysis" );
         System.out.println();
         System.out.println( "Example 1: java -Xms128m -Xmx512m -cp path/to/forester.jar"
                 + " org.forester.application.surfacing p2g=pfam2go_2012_02_07.txt -dufs -cos=Pfam_260_NC1"
-                + " -no_eo -mo=0 -input=genomes_limited.txt -out_dir=out -o=o "
+                + " -no_eo -mo=0 -genomes=eukaryotes.txt -out_dir=out -o=o "
                 + " -species_tree=tol.xml -obo=gene_ontology_2012_02_07.obo -pos_filter=f.txt -all_prot" );
         System.out.println();
         System.out.println( "Example 2: java -Xms128m -Xmx512m -cp path/to/forester.jar"
                 + " org.forester.application.surfacing -detail=punctilious -o=TEST.html -pwc=TEST"
                 + " -cos=Pfam_ls_22_TC2 -p2g=pfam2go -obo=gene_ontology_edit.obo "
-                + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo "
-                + "-ds_output=detailed_html -scoring=domains -sort=alpha human mouse brafl strpu" );
+                + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo -genomes=eukaryotes.txt "
+                + "-ds_output=detailed_html -scoring=domains -sort=alpha " );
         System.out.println();
     }
-
-    private static void processFilter( final File filter_file, final SortedSet<DomainId> filter ) {
-        SortedSet<String> filter_str = null;
-        try {
-            filter_str = ForesterUtil.file2set( filter_file );
-        }
-        catch ( final IOException e ) {
-            ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
-        }
-        if ( filter_str != null ) {
-            for( final String string : filter_str ) {
-                filter.add( new DomainId( string ) );
-            }
-        }
-        if ( VERBOSE ) {
-            System.out.println( "Filter:" );
-            for( final DomainId domainId : filter ) {
-                System.out.println( domainId.getId() );
-            }
-        }
-    }
-
-    private static String[][] processInputFileNames( final String[] names ) {
-        final String[][] input_file_properties = new String[ names.length ][];
-        for( int i = 0; i < names.length; ++i ) {
-            if ( names[ i ].indexOf( SEPARATOR_FOR_INPUT_VALUES ) < 0 ) {
-                input_file_properties[ i ] = new String[ 2 ];
-                input_file_properties[ i ][ 0 ] = names[ i ];
-                input_file_properties[ i ][ 1 ] = names[ i ];
-            }
-            else {
-                input_file_properties[ i ] = names[ i ].split( surfacing.SEPARATOR_FOR_INPUT_VALUES + "" );
-                if ( input_file_properties[ i ].length != 3 ) {
-                    ForesterUtil
-                            .fatalError( surfacing.PRG_NAME,
-                                         "properties for the input files (hmmpfam output) are expected "
-                                                 + "to be in the following format \"<hmmpfam output file>#<species>\" (or just one word, which is both the filename and the species id), instead received \""
-                                                 + names[ i ] + "\"" );
-                }
-            }
-            final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
-            if ( !ForesterUtil.isEmpty( error ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, error );
-            }
-        }
-        return input_file_properties;
-    }
-
-    private static void processPlusMinusAnalysisOption( final CommandLineArguments cla,
-                                                        final List<String> high_copy_base,
-                                                        final List<String> high_copy_target,
-                                                        final List<String> low_copy,
-                                                        final List<Object> numbers ) {
-        if ( cla.isOptionSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
-            if ( !cla.isOptionValueSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for 'plus-minus' file: -"
-                        + surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>" );
-            }
-            final File plus_minus_file = new File( cla.getOptionValue( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) );
-            final String msg = ForesterUtil.isReadableFile( plus_minus_file );
-            if ( !ForesterUtil.isEmpty( msg ) ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + plus_minus_file + "\": " + msg );
-            }
-            processPlusMinusFile( plus_minus_file, high_copy_base, high_copy_target, low_copy, numbers );
-        }
-    }
-
-    // First numbers is minimal difference, second is factor.
-    private static void processPlusMinusFile( final File plus_minus_file,
-                                              final List<String> high_copy_base,
-                                              final List<String> high_copy_target,
-                                              final List<String> low_copy,
-                                              final List<Object> numbers ) {
-        Set<String> species_set = null;
-        int min_diff = PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT;
-        double factor = PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT;
-        try {
-            species_set = ForesterUtil.file2set( plus_minus_file );
-        }
-        catch ( final IOException e ) {
-            ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
-        }
-        if ( species_set != null ) {
-            for( final String species : species_set ) {
-                final String species_trimmed = species.substring( 1 );
-                if ( species.startsWith( "+" ) ) {
-                    if ( low_copy.contains( species_trimmed ) ) {
-                        ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                                 "species/genome names can not appear with both '+' and '-' suffix, as appears the case for: \""
-                                                         + species_trimmed + "\"" );
-                    }
-                    high_copy_base.add( species_trimmed );
-                }
-                else if ( species.startsWith( "*" ) ) {
-                    if ( low_copy.contains( species_trimmed ) ) {
-                        ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                                 "species/genome names can not appear with both '*' and '-' suffix, as appears the case for: \""
-                                                         + species_trimmed + "\"" );
-                    }
-                    high_copy_target.add( species_trimmed );
-                }
-                else if ( species.startsWith( "-" ) ) {
-                    if ( high_copy_base.contains( species_trimmed ) || high_copy_target.contains( species_trimmed ) ) {
-                        ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                                 "species/genome names can not appear with both '+' or '*' and '-' suffix, as appears the case for: \""
-                                                         + species_trimmed + "\"" );
-                    }
-                    low_copy.add( species_trimmed );
-                }
-                else if ( species.startsWith( "$D" ) ) {
-                    try {
-                        min_diff = Integer.parseInt( species.substring( 3 ) );
-                    }
-                    catch ( final NumberFormatException e ) {
-                        ForesterUtil.fatalError( surfacing.PRG_NAME,
-                                                 "could not parse integer value for minimal difference from: \""
-                                                         + species.substring( 3 ) + "\"" );
-                    }
-                }
-                else if ( species.startsWith( "$F" ) ) {
-                    try {
-                        factor = Double.parseDouble( species.substring( 3 ) );
-                    }
-                    catch ( final NumberFormatException e ) {
-                        ForesterUtil.fatalError( surfacing.PRG_NAME, "could not parse double value for factor from: \""
-                                + species.substring( 3 ) + "\"" );
-                    }
-                }
-                else if ( species.startsWith( "#" ) ) {
-                    // Comment, ignore.
-                }
-                else {
-                    ForesterUtil
-                            .fatalError( surfacing.PRG_NAME,
-                                         "species/genome names in 'plus minus' file must begin with '*' (high copy target genome), '+' (high copy base genomes), '-' (low copy genomes), '$D=<integer>' minimal Difference (default is 1), '$F=<double>' factor (default is 1.0), double), or '#' (ignore) suffix, encountered: \""
-                                                 + species + "\"" );
-                }
-                numbers.add( new Integer( min_diff + "" ) );
-                numbers.add( new Double( factor + "" ) );
-            }
-        }
-        else {
-            ForesterUtil.fatalError( surfacing.PRG_NAME, "'plus minus' file [" + plus_minus_file + "] appears empty" );
-        }
-    }
-
-    private static void writePresentToNexus( final File output_file,
-                                             final File positive_filter_file,
-                                             final SortedSet<DomainId> filter,
-                                             final List<GenomeWideCombinableDomains> gwcd_list ) {
-        try {
-            SurfacingUtil
-                    .writeMatrixToFile( DomainParsimonyCalculator
-                            .createMatrixOfDomainPresenceOrAbsence( gwcd_list, positive_filter_file == null ? null
-                                    : filter ), output_file + DOMAINS_PRESENT_NEXUS, Format.NEXUS_BINARY );
-            SurfacingUtil.writeMatrixToFile( DomainParsimonyCalculator
-                    .createMatrixOfBinaryDomainCombinationPresenceOrAbsence( gwcd_list ), output_file
-                    + BDC_PRESENT_NEXUS, Format.NEXUS_BINARY );
-        }
-        catch ( final Exception e ) {
-            ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
-        }
-    }
-
-    private static void writeProteinListsForAllSpecies( final File output_dir,
-                                                        final SortedMap<Species, List<Protein>> protein_lists_per_species,
-                                                        final List<GenomeWideCombinableDomains> gwcd_list,
-                                                        final double domain_e_cutoff ) {
-        final SortedSet<DomainId> all_domains = new TreeSet<DomainId>();
-        for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
-            all_domains.addAll( gwcd.getAllDomainIds() );
-        }
-        for( final DomainId domain : all_domains ) {
-            final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain + SEQ_EXTRACT_SUFFIX );
-            SurfacingUtil.checkForOutputFileWriteability( out );
-            try {
-                final Writer proteins_file_writer = new BufferedWriter( new FileWriter( out ) );
-                SurfacingUtil.extractProteinNames( protein_lists_per_species,
-                                                   domain,
-                                                   proteins_file_writer,
-                                                   "\t",
-                                                   LIMIT_SPEC_FOR_PROT_EX,
-                                                   domain_e_cutoff );
-                proteins_file_writer.close();
-            }
-            catch ( final IOException e ) {
-                ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
-            }
-            ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote proteins list to \"" + out + "\"" );
-        }
-    }
 }