3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.application;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.util.ArrayList;
35 import java.util.Date;
36 import java.util.HashMap;
37 import java.util.HashSet;
38 import java.util.List;
40 import java.util.Map.Entry;
42 import java.util.SortedMap;
43 import java.util.SortedSet;
44 import java.util.TreeMap;
45 import java.util.TreeSet;
47 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
48 import org.forester.go.GoId;
49 import org.forester.go.GoNameSpace;
50 import org.forester.go.GoTerm;
51 import org.forester.go.GoUtils;
52 import org.forester.go.OBOparser;
53 import org.forester.go.PfamToGoMapping;
54 import org.forester.go.PfamToGoParser;
55 import org.forester.io.parsers.HmmscanPerDomainTableParser;
56 import org.forester.io.parsers.HmmscanPerDomainTableParser.INDIVIDUAL_SCORE_CUTOFF;
57 import org.forester.io.parsers.phyloxml.PhyloXmlUtil;
58 import org.forester.io.parsers.util.ParserUtils;
59 import org.forester.phylogeny.Phylogeny;
60 import org.forester.phylogeny.PhylogenyMethods;
61 import org.forester.phylogeny.PhylogenyNode;
62 import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
63 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
64 import org.forester.protein.BinaryDomainCombination;
65 import org.forester.protein.Domain;
66 import org.forester.protein.Protein;
67 import org.forester.species.BasicSpecies;
68 import org.forester.species.Species;
69 import org.forester.surfacing.BasicDomainSimilarityCalculator;
70 import org.forester.surfacing.BasicGenomeWideCombinableDomains;
71 import org.forester.surfacing.CombinationsBasedPairwiseDomainSimilarityCalculator;
72 import org.forester.surfacing.DomainCountsBasedPairwiseSimilarityCalculator;
73 import org.forester.surfacing.DomainCountsDifferenceUtil;
74 import org.forester.surfacing.DomainLengthsTable;
75 import org.forester.surfacing.DomainParsimonyCalculator;
76 import org.forester.surfacing.DomainSimilarity;
77 import org.forester.surfacing.DomainSimilarity.DomainSimilarityScoring;
78 import org.forester.surfacing.DomainSimilarity.DomainSimilaritySortField;
79 import org.forester.surfacing.DomainSimilarityCalculator;
80 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
81 import org.forester.surfacing.GenomeWideCombinableDomains;
82 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
83 import org.forester.surfacing.MappingResults;
84 import org.forester.surfacing.PairwiseDomainSimilarityCalculator;
85 import org.forester.surfacing.PairwiseGenomeComparator;
86 import org.forester.surfacing.PrintableDomainSimilarity;
87 import org.forester.surfacing.PrintableDomainSimilarity.PRINT_OPTION;
88 import org.forester.surfacing.ProteinCountsBasedPairwiseDomainSimilarityCalculator;
89 import org.forester.surfacing.SurfacingUtil;
90 import org.forester.util.BasicDescriptiveStatistics;
91 import org.forester.util.BasicTable;
92 import org.forester.util.BasicTableParser;
93 import org.forester.util.CommandLineArguments;
94 import org.forester.util.DescriptiveStatistics;
95 import org.forester.util.ForesterConstants;
96 import org.forester.util.ForesterUtil;
98 public class surfacing {
100 private static final int MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING = 1000;
101 public final static String DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS = "graph_analysis_out";
102 public final static String DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS = "_dc.dot";
103 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS = "_fitch_present_dc.dot";
104 public final static String DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX = ".dcc";
106 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS = "_dollo_gl_d";
107 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_BINARY_COMBINATIONS = "_dollo_gl_dc";
108 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_DOMAINS = "_fitch_gl_d";
109 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS = "_fitch_gl_dc";
111 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS = "_dollo_glc_d";
112 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_BINARY_COMBINATIONS = "_dollo_glc_dc";
113 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_DOMAINS = "_fitch_glc_d";
114 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS = "_fitch_glc_dc";
116 public final static String PARSIMONY_OUTPUT_FITCH_GAINS_BC = "_fitch_gains_dc";
117 public final static String PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC = "_fitch_gains_dc.html";
118 public final static String PARSIMONY_OUTPUT_FITCH_LOSSES_BC = "_fitch_losses_dc";
119 public final static String PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC = "_fitch_losses_dc.html";
120 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_BC = "_fitch_present_dc";
121 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC = "_fitch_present_dc.html";
122 public final static String PARSIMONY_OUTPUT_DOLLO_GAINS_D = "_dollo_gains_d";
123 public final static String PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D = "_dollo_gains_d.html";
124 public final static String PARSIMONY_OUTPUT_DOLLO_LOSSES_D = "_dollo_losses_d";
125 public final static String PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D = "_dollo_losses_d.html";
126 public final static String PARSIMONY_OUTPUT_DOLLO_PRESENT_D = "_dollo_present_d";
127 public final static String PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D = "_dollo_present_d.html";
128 public final static String DOMAINS_PRESENT_NEXUS = "_dom.nex";
129 public final static String BDC_PRESENT_NEXUS = "_dc.nex";
131 public final static String PRG_NAME = "surfacing";
132 public static final String DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_d_dollo"
133 + ForesterConstants.PHYLO_XML_SUFFIX;
134 public static final String DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH = "_d_fitch"
135 + ForesterConstants.PHYLO_XML_SUFFIX;
136 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_dc_dollo"
137 + ForesterConstants.PHYLO_XML_SUFFIX;
138 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH = "_dc_fitch"
139 + ForesterConstants.PHYLO_XML_SUFFIX;
140 public static final String NEXUS_EXTERNAL_DOMAINS = "_dom.nex";
141 public static final String NEXUS_EXTERNAL_DOMAIN_COMBINATIONS = "_dc.nex";
142 public static final String NEXUS_SECONDARY_FEATURES = "_secondary_features.nex";
143 public static final String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES = "_dollo_gl_secondary_features";
144 public static final String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES = "_dollo_glc_secondary_features";
145 public static final String PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES = "_dollo_gains_secondary_features";
146 public static final String PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES = "_dollo_losses_secondary_features";
147 public static final String PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES = "_dollo_present_secondary_features";
148 public static final String SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_secondary_features_dollo"
149 + ForesterConstants.PHYLO_XML_SUFFIX;
150 public static final String PARSIMONY_OUTPUT_DOLLO_ALL_GOID_D_ALL_NAMESPACES = "_dollo_goid_d";
151 public static final String PARSIMONY_OUTPUT_FITCH_ALL_GOID_BC_ALL_NAMESPACES = "_fitch_goid_dc";
152 final static private String HELP_OPTION_1 = "help";
153 final static private String HELP_OPTION_2 = "h";
154 final static private String OUTPUT_DIR_OPTION = "out_dir";
155 final static private String SCORING_OPTION = "scoring";
156 private static final DomainSimilarityScoring SCORING_DEFAULT = DomainSimilarity.DomainSimilarityScoring.COMBINATIONS;
157 final static private String SCORING_DOMAIN_COUNT_BASED = "domains";
158 final static private String SCORING_PROTEIN_COUNT_BASED = "proteins";
159 final static private String SCORING_COMBINATION_BASED = "combinations";
160 final static private String DETAILEDNESS_OPTION = "detail";
161 private final static Detailedness DETAILEDNESS_DEFAULT = DomainSimilarityCalculator.Detailedness.PUNCTILIOUS;
162 final static private String SPECIES_MATRIX_OPTION = "smatrix";
163 final static private String DETAILEDNESS_BASIC = "basic";
164 final static private String DETAILEDNESS_LIST_IDS = "list_ids";
165 final static private String DETAILEDNESS_PUNCTILIOUS = "punctilious";
166 final static private String DOMAIN_SIMILARITY_SORT_OPTION = "sort";
167 private static final DomainSimilaritySortField DOMAIN_SORT_FILD_DEFAULT = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
168 final static private String DOMAIN_SIMILARITY_SORT_MIN = "min";
169 final static private String DOMAIN_SIMILARITY_SORT_MAX = "max";
170 final static private String DOMAIN_SIMILARITY_SORT_SD = "sd";
171 final static private String DOMAIN_SIMILARITY_SORT_MEAN = "mean";
172 final static private String DOMAIN_SIMILARITY_SORT_DIFF = "diff";
173 final static private String DOMAIN_SIMILARITY_SORT_COUNTS_DIFF = "count_diff";
174 final static private String DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF = "abs_count_diff";
175 final static private String DOMAIN_SIMILARITY_SORT_SPECIES_COUNT = "species";
176 final static private String DOMAIN_SIMILARITY_SORT_ALPHA = "alpha";
177 final static private String DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION = "species_first";
178 final static private String DOMAIN_COUNT_SORT_OPTION = "dc_sort";
179 private static final GenomeWideCombinableDomainsSortOrder DOMAINS_SORT_ORDER_DEFAULT = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.ALPHABETICAL_KEY_ID;
180 final static private String DOMAIN_COUNT_SORT_ALPHA = "alpha";
181 final static private String DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT = "dom";
182 final static private String DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT = "prot";
183 final static private String DOMAIN_COUNT_SORT_COMBINATIONS_COUNT = "comb";
184 final static private String CUTOFF_SCORE_FILE_OPTION = "cos";
185 final static private String NOT_IGNORE_DUFS_OPTION = "dufs";
186 final static private String MAX_E_VALUE_OPTION = "e";
187 final static private String MAX_ALLOWED_OVERLAP_OPTION = "mo";
188 final static private String NO_ENGULFING_OVERLAP_OPTION = "no_eo";
189 final static private String IGNORE_COMBINATION_WITH_SAME_OPTION = "ignore_self_comb";
190 final static private String PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION = "dc_regain_stats";
191 final static private String DA_ANALYSIS_OPTION = "DA_analyis";
192 final static private String USE_LAST_IN_FITCH_OPTION = "last";
193 final static private String PAIRWISE_DOMAIN_COMPARISONS_PREFIX = "pwc_";
194 final static private String PAIRWISE_DOMAIN_COMPARISONS_OPTION = "pwc";
195 final static private String OUTPUT_FILE_OPTION = "o";
196 final static private String PFAM_TO_GO_FILE_USE_OPTION = "p2g";
197 final static private String GO_OBO_FILE_USE_OPTION = "obo";
198 final static private String GO_NAMESPACE_LIMIT_OPTION = "go_namespace";
199 final static private String GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION = "molecular_function";
200 final static private String GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS = "biological_process";
201 final static private String GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT = "cellular_component";
202 final static private String SECONDARY_FEATURES_PARSIMONY_MAP_FILE = "secondary";
203 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED = "simple_tab";
204 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML = "simple_html";
205 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML = "detailed_html";
206 final static private String DOMAIN_SIMILARITY_PRINT_OPTION = "ds_output";
207 private static final PRINT_OPTION DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT = PrintableDomainSimilarity.PRINT_OPTION.HTML;
208 final static private String IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION = "ignore_singlet_domains";
209 final static private String IGNORE_VIRAL_IDS = "ignore_viral_ids";
210 final static private boolean IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_DEFAULT = false;
211 final static private String IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION = "ignore_species_specific_domains";
212 final static private boolean IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION_DEFAULT = false;
213 final static private String MATRIX_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX = "_mean_score.pwd";
214 final static private String MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX = "_domains.pwd";
215 final static private String MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX = "_bin_combinations.pwd";
216 final static private String NJ_TREE_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX = "_mean_score_NJ"
217 + ForesterConstants.PHYLO_XML_SUFFIX;
218 final static private String NJ_TREE_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX = "_domains_NJ"
219 + ForesterConstants.PHYLO_XML_SUFFIX;
220 final static private String NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX = "_bin_combinations_NJ"
221 + ForesterConstants.PHYLO_XML_SUFFIX;
222 final static private String FILTER_POSITIVE_OPTION = "pos_filter";
223 final static private String FILTER_NEGATIVE_OPTION = "neg_filter";
224 final static private String FILTER_NEGATIVE_DOMAINS_OPTION = "neg_dom_filter";
225 final static private String INPUT_GENOMES_FILE_OPTION = "genomes";
226 final static private String INPUT_SPECIES_TREE_OPTION = "species_tree";
227 final static private String SEQ_EXTRACT_OPTION = "prot_extract";
228 final static private String PRG_VERSION = "2.280";
229 final static private String PRG_DATE = "130701";
230 final static private String E_MAIL = "czmasek@burnham.org";
231 final static private String WWW = "www.phylosoft.org/forester/applications/surfacing";
232 final static private boolean IGNORE_DUFS_DEFAULT = true;
233 final static private boolean IGNORE_COMBINATION_WITH_SAME_DEFAULLT = false;
234 final static private double MAX_E_VALUE_DEFAULT = -1;
235 final static private int MAX_ALLOWED_OVERLAP_DEFAULT = -1;
236 private static final String RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION = "random_seed";
237 private static final String CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS = "consider_bdc_direction";
238 private static final String CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY = "consider_bdc_adj";
239 private static final String SEQ_EXTRACT_SUFFIX = ".prot";
240 private static final String PLUS_MINUS_ANALYSIS_OPTION = "plus_minus";
241 private static final String PLUS_MINUS_DOM_SUFFIX = "_plus_minus_dom.txt";
242 private static final String PLUS_MINUS_DOM_SUFFIX_HTML = "_plus_minus_dom.html";
243 private static final String PLUS_MINUS_DC_SUFFIX_HTML = "_plus_minus_dc.html";
244 private static final int PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT = 0;
245 private static final double PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT = 1.0;
246 private static final String PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_all.txt";
247 private static final String PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_passing.txt";
248 private static final String OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS = "all_prot";
249 final static private String OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION = "all_prot_e";
250 private static final boolean VERBOSE = false;
251 private static final String OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_gains_counts";
252 private static final String OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_losses_counts";
253 private static final String DOMAIN_LENGTHS_ANALYSIS_SUFFIX = "_domain_lengths_analysis";
254 private static final boolean PERFORM_DOMAIN_LENGTH_ANALYSIS = true;
255 public static final String ALL_PFAMS_ENCOUNTERED_SUFFIX = "_all_encountered_pfams";
256 public static final String ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX = "_all_encountered_pfams_with_go_annotation";
257 public static final String ENCOUNTERED_PFAMS_SUMMARY_SUFFIX = "_encountered_pfams_summary";
258 public static final String ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX = "_all_pfams_gained_as_domains";
259 public static final String ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX = "_all_pfams_lost_as_domains";
260 public static final String ALL_PFAMS_GAINED_AS_DC_SUFFIX = "_all_pfams_gained_as_dc";
261 public static final String ALL_PFAMS_LOST_AS_DC_SUFFIX = "_all_pfams_lost_as_dc";
262 public static final String BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES = "PER_NODE_EVENTS";
263 public static final String BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES = "PER_SUBTREE_EVENTS";
264 public static final String D_PROMISCUITY_FILE_SUFFIX = "_domain_promiscuities";
265 private static final String LOG_FILE_SUFFIX = "_log.txt";
266 private static final String DATA_FILE_SUFFIX = "_domain_combination_data.txt";
267 private static final String DATA_FILE_DESC = "#SPECIES\tPRTEIN_ID\tN_TERM_DOMAIN\tC_TERM_DOMAIN\tN_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tC_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tN_TERM_DOMAIN_COUNTS_PER_PROTEIN\tC_TERM_DOMAIN_COUNTS_PER_PROTEIN";
268 private static final String WRITE_TO_NEXUS_OPTION = "nexus";
269 private static final INDIVIDUAL_SCORE_CUTOFF INDIVIDUAL_SCORE_CUTOFF_DEFAULT = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE; //TODO look at me! change?
270 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_counts.txt";
271 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists.txt";
272 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping.txt";
273 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique.txt";
274 public static final String LIMIT_SPEC_FOR_PROT_EX = null; // e.g. "HUMAN"; set to null for not using this feature (default).
275 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED = "_dc_MAPPED_secondary_features_fitch"
276 + ForesterConstants.PHYLO_XML_SUFFIX;
277 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_counts_MAPPED.txt";
278 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_MAPPED.txt";
279 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_MAPPED.txt";
280 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique_MAPPED.txt";
282 private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
283 final String[][] input_file_properties,
284 final String automated_pairwise_comparison_suffix,
285 final File outdir ) {
286 for( int i = 0; i < input_file_properties.length; ++i ) {
287 for( int j = 0; j < i; ++j ) {
288 final String species_i = input_file_properties[ i ][ 1 ];
289 final String species_j = input_file_properties[ j ][ 1 ];
290 String pairwise_similarities_output_file_str = PAIRWISE_DOMAIN_COMPARISONS_PREFIX + species_i + "_"
291 + species_j + automated_pairwise_comparison_suffix;
292 switch ( domain_similarity_print_option ) {
294 if ( !pairwise_similarities_output_file_str.endsWith( ".html" ) ) {
295 pairwise_similarities_output_file_str += ".html";
299 final String error = ForesterUtil
300 .isWritableFile( new File( outdir == null ? pairwise_similarities_output_file_str : outdir
301 + ForesterUtil.FILE_SEPARATOR + pairwise_similarities_output_file_str ) );
302 if ( !ForesterUtil.isEmpty( error ) ) {
303 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
309 private static StringBuilder createParametersAsString( final boolean ignore_dufs,
310 final double e_value_max,
311 final int max_allowed_overlap,
312 final boolean no_engulfing_overlaps,
313 final File cutoff_scores_file,
314 final BinaryDomainCombination.DomainCombinationType dc_type ) {
315 final StringBuilder parameters_sb = new StringBuilder();
316 parameters_sb.append( "E-value: " + e_value_max );
317 if ( cutoff_scores_file != null ) {
318 parameters_sb.append( ", Cutoff-scores-file: " + cutoff_scores_file );
321 parameters_sb.append( ", Cutoff-scores-file: not-set" );
323 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
324 parameters_sb.append( ", Max-overlap: " + max_allowed_overlap );
327 parameters_sb.append( ", Max-overlap: not-set" );
329 if ( no_engulfing_overlaps ) {
330 parameters_sb.append( ", Engulfing-overlaps: not-allowed" );
333 parameters_sb.append( ", Engulfing-overlaps: allowed" );
336 parameters_sb.append( ", Ignore-dufs: true" );
339 parameters_sb.append( ", Ignore-dufs: false" );
341 parameters_sb.append( ", DC type (if applicable): " + dc_type );
342 return parameters_sb;
346 * Warning: This side-effects 'all_bin_domain_combinations_encountered'!
350 * @param all_bin_domain_combinations_changed
351 * @param sum_of_all_domains_encountered
352 * @param all_bin_domain_combinations_encountered
353 * @param is_gains_analysis
354 * @param protein_length_stats_by_dc
355 * @throws IOException
357 private static void executeFitchGainsAnalysis( final File output_file,
358 final List<BinaryDomainCombination> all_bin_domain_combinations_changed,
359 final int sum_of_all_domains_encountered,
360 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered,
361 final boolean is_gains_analysis ) throws IOException {
362 SurfacingUtil.checkForOutputFileWriteability( output_file );
363 final Writer out = ForesterUtil.createBufferedWriter( output_file );
364 final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
365 .listToSortedCountsMap( all_bin_domain_combinations_changed );
366 final SortedSet<String> all_domains_in_combination_changed_more_than_once = new TreeSet<String>();
367 final SortedSet<String> all_domains_in_combination_changed_only_once = new TreeSet<String>();
370 for( final Object bdc_object : bdc_to_counts.keySet() ) {
371 final BinaryDomainCombination bdc = ( BinaryDomainCombination ) bdc_object;
372 final int count = bdc_to_counts.get( bdc_object );
374 ForesterUtil.unexpectedFatalError( PRG_NAME, "count < 1 " );
376 out.write( bdc + "\t" + count + ForesterUtil.LINE_SEPARATOR );
378 all_domains_in_combination_changed_more_than_once.add( bdc.getId0() );
379 all_domains_in_combination_changed_more_than_once.add( bdc.getId1() );
382 else if ( count == 1 ) {
383 all_domains_in_combination_changed_only_once.add( bdc.getId0() );
384 all_domains_in_combination_changed_only_once.add( bdc.getId1() );
388 final int all = all_bin_domain_combinations_encountered.size();
390 if ( !is_gains_analysis ) {
391 all_bin_domain_combinations_encountered.removeAll( all_bin_domain_combinations_changed );
392 never_lost = all_bin_domain_combinations_encountered.size();
393 for( final BinaryDomainCombination bdc : all_bin_domain_combinations_encountered ) {
394 out.write( bdc + "\t" + "0" + ForesterUtil.LINE_SEPARATOR );
397 if ( is_gains_analysis ) {
398 out.write( "Sum of all distinct domain combinations appearing once : " + one
399 + ForesterUtil.LINE_SEPARATOR );
400 out.write( "Sum of all distinct domain combinations appearing more than once : " + above_one
401 + ForesterUtil.LINE_SEPARATOR );
402 out.write( "Sum of all distinct domains in combinations apppearing only once : "
403 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
404 out.write( "Sum of all distinct domains in combinations apppearing more than once: "
405 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
408 out.write( "Sum of all distinct domain combinations never lost : " + never_lost
409 + ForesterUtil.LINE_SEPARATOR );
410 out.write( "Sum of all distinct domain combinations lost once : " + one
411 + ForesterUtil.LINE_SEPARATOR );
412 out.write( "Sum of all distinct domain combinations lost more than once : " + above_one
413 + ForesterUtil.LINE_SEPARATOR );
414 out.write( "Sum of all distinct domains in combinations lost only once : "
415 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
416 out.write( "Sum of all distinct domains in combinations lost more than once: "
417 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
419 out.write( "All binary combinations : " + all
420 + ForesterUtil.LINE_SEPARATOR );
421 out.write( "All domains : "
422 + sum_of_all_domains_encountered );
424 ForesterUtil.programMessage( surfacing.PRG_NAME,
425 "Wrote fitch domain combination dynamics counts analysis to \"" + output_file
429 private static void executePlusMinusAnalysis( final File output_file,
430 final List<String> plus_minus_analysis_high_copy_base,
431 final List<String> plus_minus_analysis_high_copy_target,
432 final List<String> plus_minus_analysis_low_copy,
433 final List<GenomeWideCombinableDomains> gwcd_list,
434 final SortedMap<Species, List<Protein>> protein_lists_per_species,
435 final Map<String, List<GoId>> domain_id_to_go_ids_map,
436 final Map<GoId, GoTerm> go_id_to_term_map,
437 final List<Object> plus_minus_analysis_numbers ) {
438 final Set<String> all_spec = new HashSet<String>();
439 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
440 all_spec.add( gwcd.getSpecies().getSpeciesId() );
442 final File html_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX_HTML );
443 final File plain_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX );
444 final File html_out_dc = new File( output_file + PLUS_MINUS_DC_SUFFIX_HTML );
445 final File all_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX );
446 final File passing_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX );
447 final File proteins_file_base = new File( output_file + "" );
448 final int min_diff = ( ( Integer ) plus_minus_analysis_numbers.get( 0 ) ).intValue();
449 final double factor = ( ( Double ) plus_minus_analysis_numbers.get( 1 ) ).doubleValue();
451 DomainCountsDifferenceUtil.calculateCopyNumberDifferences( gwcd_list,
452 protein_lists_per_species,
453 plus_minus_analysis_high_copy_base,
454 plus_minus_analysis_high_copy_target,
455 plus_minus_analysis_low_copy,
461 domain_id_to_go_ids_map,
463 all_domains_go_ids_out_dom,
464 passing_domains_go_ids_out_dom,
465 proteins_file_base );
467 catch ( final IOException e ) {
468 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
470 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
471 + html_out_dom + "\"" );
472 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
473 + plain_out_dom + "\"" );
474 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \"" + html_out_dc
476 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based passing GO ids to \""
477 + passing_domains_go_ids_out_dom + "\"" );
478 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based all GO ids to \""
479 + all_domains_go_ids_out_dom + "\"" );
482 private static Phylogeny[] getIntrees( final File[] intree_files,
483 final int number_of_genomes,
484 final String[][] input_file_properties ) {
485 final Phylogeny[] intrees = new Phylogeny[ intree_files.length ];
487 for( final File intree_file : intree_files ) {
488 Phylogeny intree = null;
489 final String error = ForesterUtil.isReadableFile( intree_file );
490 if ( !ForesterUtil.isEmpty( error ) ) {
491 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: "
495 final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
496 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
497 if ( p_array.length < 1 ) {
498 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
499 + "] does not contain any phylogeny in phyloXML format" );
501 else if ( p_array.length > 1 ) {
502 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
503 + "] contains more than one phylogeny in phyloXML format" );
505 intree = p_array[ 0 ];
507 catch ( final Exception e ) {
508 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file
511 if ( ( intree == null ) || intree.isEmpty() ) {
512 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
514 if ( !intree.isRooted() ) {
515 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
517 if ( intree.getNumberOfExternalNodes() < number_of_genomes ) {
518 ForesterUtil.fatalError( surfacing.PRG_NAME,
519 "number of external nodes [" + intree.getNumberOfExternalNodes()
520 + "] of input tree [" + intree_file
521 + "] is smaller than the number of genomes the be analyzed ["
522 + number_of_genomes + "]" );
524 final StringBuilder parent_names = new StringBuilder();
525 final int nodes_lacking_name = SurfacingUtil.getNumberOfNodesLackingName( intree, parent_names );
526 if ( nodes_lacking_name > 0 ) {
527 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] has "
528 + nodes_lacking_name + " node(s) lacking a name [parent names:" + parent_names + "]" );
530 preparePhylogenyForParsimonyAnalyses( intree, input_file_properties );
531 if ( !intree.isCompletelyBinary() ) {
532 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "input tree [" + intree_file
533 + "] is not completely binary" );
535 intrees[ i++ ] = intree;
540 private static void log( final String msg, final Writer w ) {
543 w.write( ForesterUtil.LINE_SEPARATOR );
545 catch ( final IOException e ) {
546 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
550 public static void main( final String args[] ) {
551 final long start_time = new Date().getTime();
552 // final StringBuffer log = new StringBuffer();
553 final StringBuilder html_desc = new StringBuilder();
554 ForesterUtil.printProgramInformation( surfacing.PRG_NAME,
555 surfacing.PRG_VERSION,
559 final String nl = ForesterUtil.LINE_SEPARATOR;
560 html_desc.append( "<table>" + nl );
561 html_desc.append( "<tr><td>Produced by:</td><td>" + surfacing.PRG_NAME + "</td></tr>" + nl );
562 html_desc.append( "<tr><td>Version:</td><td>" + surfacing.PRG_VERSION + "</td></tr>" + nl );
563 html_desc.append( "<tr><td>Release Date:</td><td>" + surfacing.PRG_DATE + "</td></tr>" + nl );
564 html_desc.append( "<tr><td>Contact:</td><td>" + surfacing.E_MAIL + "</td></tr>" + nl );
565 html_desc.append( "<tr><td>WWW:</td><td>" + surfacing.WWW + "</td></tr>" + nl );
566 CommandLineArguments cla = null;
568 cla = new CommandLineArguments( args );
570 catch ( final Exception e ) {
571 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
573 if ( cla.isOptionSet( surfacing.HELP_OPTION_1 ) || cla.isOptionSet( surfacing.HELP_OPTION_2 ) ) {
574 surfacing.printHelp();
577 if ( ( args.length < 1 ) ) {
578 surfacing.printHelp();
581 final List<String> allowed_options = new ArrayList<String>();
582 allowed_options.add( surfacing.NOT_IGNORE_DUFS_OPTION );
583 allowed_options.add( surfacing.MAX_E_VALUE_OPTION );
584 allowed_options.add( surfacing.DETAILEDNESS_OPTION );
585 allowed_options.add( surfacing.OUTPUT_FILE_OPTION );
586 allowed_options.add( surfacing.DOMAIN_SIMILARITY_SORT_OPTION );
587 allowed_options.add( surfacing.SPECIES_MATRIX_OPTION );
588 allowed_options.add( surfacing.SCORING_OPTION );
589 allowed_options.add( surfacing.MAX_ALLOWED_OVERLAP_OPTION );
590 allowed_options.add( surfacing.NO_ENGULFING_OVERLAP_OPTION );
591 allowed_options.add( surfacing.DOMAIN_COUNT_SORT_OPTION );
592 allowed_options.add( surfacing.CUTOFF_SCORE_FILE_OPTION );
593 allowed_options.add( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION );
594 allowed_options.add( surfacing.OUTPUT_DIR_OPTION );
595 allowed_options.add( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION );
596 allowed_options.add( surfacing.PFAM_TO_GO_FILE_USE_OPTION );
597 allowed_options.add( surfacing.GO_OBO_FILE_USE_OPTION );
598 allowed_options.add( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION );
599 allowed_options.add( surfacing.GO_NAMESPACE_LIMIT_OPTION );
600 allowed_options.add( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
601 allowed_options.add( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION );
602 allowed_options.add( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS );
603 //allowed_options.add( JACKNIFE_OPTION );
604 // allowed_options.add( JACKNIFE_RANDOM_SEED_OPTION );
605 // allowed_options.add( JACKNIFE_RATIO_OPTION );
606 allowed_options.add( INPUT_SPECIES_TREE_OPTION );
607 allowed_options.add( FILTER_POSITIVE_OPTION );
608 allowed_options.add( FILTER_NEGATIVE_OPTION );
609 allowed_options.add( INPUT_GENOMES_FILE_OPTION );
610 allowed_options.add( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
611 allowed_options.add( FILTER_NEGATIVE_DOMAINS_OPTION );
612 allowed_options.add( IGNORE_VIRAL_IDS );
613 allowed_options.add( SEQ_EXTRACT_OPTION );
614 allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
615 allowed_options.add( SECONDARY_FEATURES_PARSIMONY_MAP_FILE );
616 allowed_options.add( PLUS_MINUS_ANALYSIS_OPTION );
617 allowed_options.add( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS );
618 allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS );
619 allowed_options.add( CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY );
620 allowed_options.add( WRITE_TO_NEXUS_OPTION );
621 allowed_options.add( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION );
622 allowed_options.add( DA_ANALYSIS_OPTION );
623 allowed_options.add( USE_LAST_IN_FITCH_OPTION );
624 boolean ignore_dufs = surfacing.IGNORE_DUFS_DEFAULT;
625 boolean ignore_combination_with_same = surfacing.IGNORE_COMBINATION_WITH_SAME_DEFAULLT;
626 double e_value_max = surfacing.MAX_E_VALUE_DEFAULT;
627 int max_allowed_overlap = surfacing.MAX_ALLOWED_OVERLAP_DEFAULT;
628 final String dissallowed_options = cla.validateAllowedOptionsAsString( allowed_options );
629 if ( dissallowed_options.length() > 0 ) {
630 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown option(s): " + dissallowed_options );
632 boolean use_last_in_fitch_parsimony = false;
633 if ( cla.isOptionSet( USE_LAST_IN_FITCH_OPTION ) ) {
634 use_last_in_fitch_parsimony = true;
636 boolean write_to_nexus = false;
637 if ( cla.isOptionSet( WRITE_TO_NEXUS_OPTION ) ) {
638 write_to_nexus = true;
640 boolean perform_dc_regain_proteins_stats = false;
641 if ( cla.isOptionSet( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION ) ) {
642 perform_dc_regain_proteins_stats = true;
644 boolean da_analysis = false;
645 if ( cla.isOptionSet( DA_ANALYSIS_OPTION ) ) {
648 boolean output_binary_domain_combinationsfor_graph_analysis = false;
649 if ( cla.isOptionSet( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS ) ) {
650 output_binary_domain_combinationsfor_graph_analysis = true;
652 if ( cla.isOptionSet( surfacing.MAX_E_VALUE_OPTION ) ) {
654 e_value_max = cla.getOptionValueAsDouble( surfacing.MAX_E_VALUE_OPTION );
656 catch ( final Exception e ) {
657 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for E-value maximum" );
660 if ( cla.isOptionSet( surfacing.MAX_ALLOWED_OVERLAP_OPTION ) ) {
662 max_allowed_overlap = cla.getOptionValueAsInt( surfacing.MAX_ALLOWED_OVERLAP_OPTION );
664 catch ( final Exception e ) {
665 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for maximal allowed domain overlap" );
668 boolean no_engulfing_overlaps = false;
669 if ( cla.isOptionSet( surfacing.NO_ENGULFING_OVERLAP_OPTION ) ) {
670 no_engulfing_overlaps = true;
672 boolean ignore_virus_like_ids = false;
673 if ( cla.isOptionSet( surfacing.IGNORE_VIRAL_IDS ) ) {
674 ignore_virus_like_ids = true;
676 if ( cla.isOptionSet( surfacing.NOT_IGNORE_DUFS_OPTION ) ) {
679 if ( cla.isOptionSet( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION ) ) {
680 ignore_combination_with_same = true;
682 boolean ignore_domains_without_combs_in_all_spec = IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_DEFAULT;
683 if ( cla.isOptionSet( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION ) ) {
684 ignore_domains_without_combs_in_all_spec = true;
686 boolean ignore_species_specific_domains = IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION_DEFAULT;
687 if ( cla.isOptionSet( surfacing.IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION ) ) {
688 ignore_species_specific_domains = true;
690 File output_file = null;
691 if ( cla.isOptionSet( surfacing.OUTPUT_FILE_OPTION ) ) {
692 if ( !cla.isOptionValueSet( surfacing.OUTPUT_FILE_OPTION ) ) {
693 ForesterUtil.fatalError( surfacing.PRG_NAME,
694 "no value for domain combinations similarities output file: -"
695 + surfacing.OUTPUT_FILE_OPTION + "=<file>" );
697 output_file = new File( cla.getOptionValue( surfacing.OUTPUT_FILE_OPTION ) );
698 SurfacingUtil.checkForOutputFileWriteability( output_file );
700 File cutoff_scores_file = null;
701 Map<String, Double> individual_score_cutoffs = null;
702 if ( cla.isOptionSet( surfacing.CUTOFF_SCORE_FILE_OPTION ) ) {
703 if ( !cla.isOptionValueSet( surfacing.CUTOFF_SCORE_FILE_OPTION ) ) {
704 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for individual domain score cutoffs file: -"
705 + surfacing.CUTOFF_SCORE_FILE_OPTION + "=<file>" );
707 cutoff_scores_file = new File( cla.getOptionValue( surfacing.CUTOFF_SCORE_FILE_OPTION ) );
708 final String error = ForesterUtil.isReadableFile( cutoff_scores_file );
709 if ( !ForesterUtil.isEmpty( error ) ) {
710 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read individual domain score cutoffs file: "
714 final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, ' ' );
715 individual_score_cutoffs = scores_table.getColumnsAsMapDouble( 0, 1 );
717 catch ( final IOException e ) {
718 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from individual score cutoffs file: " + e );
721 BinaryDomainCombination.DomainCombinationType dc_type = BinaryDomainCombination.DomainCombinationType.BASIC;
722 if ( cla.isOptionSet( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS ) ) {
723 dc_type = BinaryDomainCombination.DomainCombinationType.DIRECTED;
725 if ( cla.isOptionSet( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY ) ) {
726 dc_type = BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT;
729 if ( cla.isOptionSet( surfacing.OUTPUT_DIR_OPTION ) ) {
730 if ( !cla.isOptionValueSet( surfacing.OUTPUT_DIR_OPTION ) ) {
731 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for output directory: -"
732 + surfacing.OUTPUT_DIR_OPTION + "=<dir>" );
734 out_dir = new File( cla.getOptionValue( surfacing.OUTPUT_DIR_OPTION ) );
735 if ( out_dir.exists() && ( out_dir.listFiles().length > 0 ) ) {
736 ForesterUtil.fatalError( surfacing.PRG_NAME, "\"" + out_dir + "\" aready exists and is not empty" );
738 if ( !out_dir.exists() ) {
739 final boolean success = out_dir.mkdir();
740 if ( !success || !out_dir.exists() ) {
741 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to create \"" + out_dir + "\"" );
744 if ( !out_dir.canWrite() ) {
745 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot write to \"" + out_dir + "\"" );
748 File positive_filter_file = null;
749 File negative_filter_file = null;
750 File negative_domains_filter_file = null;
751 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) && cla.isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
752 ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use both negative and positive protein filter" );
754 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION )
755 && ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) || cla
756 .isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) ) {
758 .fatalError( surfacing.PRG_NAME,
759 "attempt to use both negative or positive protein filter together wirh a negative domains filter" );
761 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) ) {
762 if ( !cla.isOptionValueSet( surfacing.FILTER_NEGATIVE_OPTION ) ) {
763 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for negative filter: -"
764 + surfacing.FILTER_NEGATIVE_OPTION + "=<file>" );
766 negative_filter_file = new File( cla.getOptionValue( surfacing.FILTER_NEGATIVE_OPTION ) );
767 final String msg = ForesterUtil.isReadableFile( negative_filter_file );
768 if ( !ForesterUtil.isEmpty( msg ) ) {
769 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + negative_filter_file + "\": "
773 else if ( cla.isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
774 if ( !cla.isOptionValueSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
775 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for positive filter: -"
776 + surfacing.FILTER_POSITIVE_OPTION + "=<file>" );
778 positive_filter_file = new File( cla.getOptionValue( surfacing.FILTER_POSITIVE_OPTION ) );
779 final String msg = ForesterUtil.isReadableFile( positive_filter_file );
780 if ( !ForesterUtil.isEmpty( msg ) ) {
781 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + positive_filter_file + "\": "
785 else if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) ) {
786 if ( !cla.isOptionValueSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) ) {
787 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for negative domains filter: -"
788 + surfacing.FILTER_NEGATIVE_DOMAINS_OPTION + "=<file>" );
790 negative_domains_filter_file = new File( cla.getOptionValue( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) );
791 final String msg = ForesterUtil.isReadableFile( negative_domains_filter_file );
792 if ( !ForesterUtil.isEmpty( msg ) ) {
793 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + negative_domains_filter_file
797 final List<String> plus_minus_analysis_high_copy_base_species = new ArrayList<String>();
798 final List<String> plus_minus_analysis_high_copy_target_species = new ArrayList<String>();
799 final List<String> plus_minus_analysis_high_low_copy_species = new ArrayList<String>();
800 final List<Object> plus_minus_analysis_numbers = new ArrayList<Object>();
801 processPlusMinusAnalysisOption( cla,
802 plus_minus_analysis_high_copy_base_species,
803 plus_minus_analysis_high_copy_target_species,
804 plus_minus_analysis_high_low_copy_species,
805 plus_minus_analysis_numbers );
806 File input_genomes_file = null;
807 if ( cla.isOptionSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
808 if ( !cla.isOptionValueSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
809 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input genomes file: -"
810 + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
812 input_genomes_file = new File( cla.getOptionValue( surfacing.INPUT_GENOMES_FILE_OPTION ) );
813 final String msg = ForesterUtil.isReadableFile( input_genomes_file );
814 if ( !ForesterUtil.isEmpty( msg ) ) {
816 .fatalError( surfacing.PRG_NAME, "can not read from \"" + input_genomes_file + "\": " + msg );
820 ForesterUtil.fatalError( surfacing.PRG_NAME, "no input genomes file given: "
821 + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
823 DomainSimilarity.DomainSimilarityScoring scoring = SCORING_DEFAULT;
824 if ( cla.isOptionSet( surfacing.SCORING_OPTION ) ) {
825 if ( !cla.isOptionValueSet( surfacing.SCORING_OPTION ) ) {
826 ForesterUtil.fatalError( surfacing.PRG_NAME,
827 "no value for scoring method for domain combinations similarity calculation: -"
828 + surfacing.SCORING_OPTION + "=<"
829 + surfacing.SCORING_DOMAIN_COUNT_BASED + "|"
830 + surfacing.SCORING_PROTEIN_COUNT_BASED + "|"
831 + surfacing.SCORING_COMBINATION_BASED + ">\"" );
833 final String scoring_str = cla.getOptionValue( surfacing.SCORING_OPTION );
834 if ( scoring_str.equals( surfacing.SCORING_DOMAIN_COUNT_BASED ) ) {
835 scoring = DomainSimilarity.DomainSimilarityScoring.DOMAINS;
837 else if ( scoring_str.equals( surfacing.SCORING_COMBINATION_BASED ) ) {
838 scoring = DomainSimilarity.DomainSimilarityScoring.COMBINATIONS;
840 else if ( scoring_str.equals( surfacing.SCORING_PROTEIN_COUNT_BASED ) ) {
841 scoring = DomainSimilarity.DomainSimilarityScoring.PROTEINS;
844 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + scoring_str
845 + "\" for scoring method for domain combinations similarity calculation: \"-"
846 + surfacing.SCORING_OPTION + "=<" + surfacing.SCORING_DOMAIN_COUNT_BASED + "|"
847 + surfacing.SCORING_PROTEIN_COUNT_BASED + "|" + surfacing.SCORING_COMBINATION_BASED + ">\"" );
850 boolean sort_by_species_count_first = false;
851 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION ) ) {
852 sort_by_species_count_first = true;
854 boolean species_matrix = false;
855 if ( cla.isOptionSet( surfacing.SPECIES_MATRIX_OPTION ) ) {
856 species_matrix = true;
858 boolean output_protein_lists_for_all_domains = false;
859 double output_list_of_all_proteins_per_domain_e_value_max = -1;
860 if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS ) ) {
861 output_protein_lists_for_all_domains = true;
862 if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION ) ) {
864 output_list_of_all_proteins_per_domain_e_value_max = cla
865 .getOptionValueAsDouble( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
867 catch ( final Exception e ) {
868 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for per domain E-value maximum" );
872 Detailedness detailedness = DETAILEDNESS_DEFAULT;
873 if ( cla.isOptionSet( surfacing.DETAILEDNESS_OPTION ) ) {
874 if ( !cla.isOptionValueSet( surfacing.DETAILEDNESS_OPTION ) ) {
875 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for -" + surfacing.DETAILEDNESS_OPTION + "=<"
876 + surfacing.DETAILEDNESS_BASIC + "|" + surfacing.DETAILEDNESS_LIST_IDS + "|"
877 + surfacing.DETAILEDNESS_PUNCTILIOUS + ">\"" );
879 final String detness = cla.getOptionValue( surfacing.DETAILEDNESS_OPTION ).toLowerCase();
880 if ( detness.equals( surfacing.DETAILEDNESS_BASIC ) ) {
881 detailedness = DomainSimilarityCalculator.Detailedness.BASIC;
883 else if ( detness.equals( surfacing.DETAILEDNESS_LIST_IDS ) ) {
884 detailedness = DomainSimilarityCalculator.Detailedness.LIST_COMBINING_DOMAIN_FOR_EACH_SPECIES;
886 else if ( detness.equals( surfacing.DETAILEDNESS_PUNCTILIOUS ) ) {
887 detailedness = DomainSimilarityCalculator.Detailedness.PUNCTILIOUS;
890 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + detness + "\" for detailedness: \"-"
891 + surfacing.DETAILEDNESS_OPTION + "=<" + surfacing.DETAILEDNESS_BASIC + "|"
892 + surfacing.DETAILEDNESS_LIST_IDS + "|" + surfacing.DETAILEDNESS_PUNCTILIOUS + ">\"" );
895 String automated_pairwise_comparison_suffix = null;
896 boolean perform_pwc = false;
897 boolean write_pwc_files = false;
898 if ( cla.isOptionSet( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION ) ) {
900 if ( !cla.isOptionValueSet( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION ) ) {
901 write_pwc_files = false;
904 write_pwc_files = true;
905 automated_pairwise_comparison_suffix = "_"
906 + cla.getOptionValue( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
909 String query_domain_ids = null;
910 if ( cla.isOptionSet( surfacing.SEQ_EXTRACT_OPTION ) ) {
911 if ( !cla.isOptionValueSet( surfacing.SEQ_EXTRACT_OPTION ) ) {
913 .fatalError( surfacing.PRG_NAME,
914 "no domain ids given for sequences with given domains to be extracted : -"
915 + surfacing.SEQ_EXTRACT_OPTION
916 + "=<ordered domain sequences, domain ids separated by '~', sequences separated by '#'>" );
918 query_domain_ids = cla.getOptionValue( surfacing.SEQ_EXTRACT_OPTION );
920 DomainSimilarity.DomainSimilaritySortField domain_similarity_sort_field = DOMAIN_SORT_FILD_DEFAULT;
921 DomainSimilarity.DomainSimilaritySortField domain_similarity_sort_field_for_automated_pwc = DOMAIN_SORT_FILD_DEFAULT;
922 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ) ) {
923 if ( !cla.isOptionValueSet( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ) ) {
924 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for domain combinations similarities sorting: -"
925 + surfacing.DOMAIN_SIMILARITY_SORT_OPTION + "=<" + surfacing.DOMAIN_SIMILARITY_SORT_ALPHA + "|"
926 + surfacing.DOMAIN_SIMILARITY_SORT_MAX + "|" + surfacing.DOMAIN_SIMILARITY_SORT_MIN + "|"
927 + surfacing.DOMAIN_SIMILARITY_SORT_MEAN + "|" + surfacing.DOMAIN_SIMILARITY_SORT_DIFF + "|"
928 + surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF + "|"
929 + surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF + "|"
930 + surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT + "|" + surfacing.DOMAIN_SIMILARITY_SORT_SD
933 final String sort_str = cla.getOptionValue( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ).toLowerCase();
934 if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_ALPHA ) ) {
935 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
936 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
938 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MAX ) ) {
939 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX;
940 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
942 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MIN ) ) {
943 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MIN;
944 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
946 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MEAN ) ) {
947 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MEAN;
948 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MEAN;
950 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT ) ) {
951 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.SPECIES_COUNT;
952 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
954 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_SD ) ) {
955 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.SD;
956 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
958 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_DIFF ) ) {
959 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX_DIFFERENCE;
960 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MAX_DIFFERENCE;
962 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF ) ) {
963 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
964 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
966 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF ) ) {
967 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE;
968 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE;
971 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort_str
972 + "\" for domain combinations similarities sorting: \"-"
973 + surfacing.DOMAIN_SIMILARITY_SORT_OPTION + "=<" + surfacing.DOMAIN_SIMILARITY_SORT_ALPHA + "|"
974 + surfacing.DOMAIN_SIMILARITY_SORT_MAX + "|" + surfacing.DOMAIN_SIMILARITY_SORT_MIN + "|"
975 + surfacing.DOMAIN_SIMILARITY_SORT_MEAN + "|" + surfacing.DOMAIN_SIMILARITY_SORT_DIFF + "|"
976 + surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF + "|"
977 + surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF + "|" + "|"
978 + surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT + "|" + surfacing.DOMAIN_SIMILARITY_SORT_SD
982 PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option = DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT;
983 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ) ) {
984 if ( !cla.isOptionValueSet( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ) ) {
985 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for print option: -"
986 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML + "|"
987 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML + "|"
988 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED + ">\"" );
990 final String sort = cla.getOptionValue( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ).toLowerCase();
991 if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML ) ) {
992 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.HTML;
994 else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML ) ) {
995 ForesterUtil.fatalError( surfacing.PRG_NAME, "simple HTML output not implemented yet :(" );
997 else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED ) ) {
998 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.SIMPLE_TAB_DELIMITED;
1001 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort + "\" for print option: -"
1002 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML + "|"
1003 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML + "|"
1004 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED + ">\"" );
1007 GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order = DOMAINS_SORT_ORDER_DEFAULT;
1008 if ( cla.isOptionSet( surfacing.DOMAIN_COUNT_SORT_OPTION ) ) {
1009 if ( !cla.isOptionValueSet( surfacing.DOMAIN_COUNT_SORT_OPTION ) ) {
1010 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for sorting of domain counts: -"
1011 + surfacing.DOMAIN_COUNT_SORT_OPTION + "=<" + surfacing.DOMAIN_COUNT_SORT_ALPHA + "|"
1012 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT + "|"
1013 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT + "|"
1014 + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
1016 final String sort = cla.getOptionValue( surfacing.DOMAIN_COUNT_SORT_OPTION ).toLowerCase();
1017 if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_ALPHA ) ) {
1018 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.ALPHABETICAL_KEY_ID;
1020 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT ) ) {
1021 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.KEY_DOMAIN_COUNT;
1023 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT ) ) {
1024 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.KEY_DOMAIN_PROTEINS_COUNT;
1026 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT ) ) {
1027 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.COMBINATIONS_COUNT;
1030 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort
1031 + "\" for sorting of domain counts: \"-" + surfacing.DOMAIN_COUNT_SORT_OPTION + "=<"
1032 + surfacing.DOMAIN_COUNT_SORT_ALPHA + "|" + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT + "|"
1033 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT + "|"
1034 + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
1037 final String[][] input_file_properties = processInputGenomesFile( input_genomes_file );
1038 final int number_of_genomes = input_file_properties.length;
1039 if ( number_of_genomes < 2 ) {
1040 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot analyze less than two files" );
1042 if ( ( number_of_genomes < 3 ) && perform_pwc ) {
1043 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use : -"
1044 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1045 + "=<suffix> to turn on pairwise analyses with less than three input files" );
1047 checkWriteabilityForPairwiseComparisons( domain_similarity_print_option,
1048 input_file_properties,
1049 automated_pairwise_comparison_suffix,
1051 for( int i = 0; i < number_of_genomes; i++ ) {
1052 File dcc_outfile = new File( input_file_properties[ i ][ 1 ]
1053 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1054 if ( out_dir != null ) {
1055 dcc_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + dcc_outfile );
1057 SurfacingUtil.checkForOutputFileWriteability( dcc_outfile );
1059 File pfam_to_go_file = null;
1060 Map<String, List<GoId>> domain_id_to_go_ids_map = null;
1061 int domain_id_to_go_ids_count = 0;
1062 if ( cla.isOptionSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
1063 if ( !cla.isOptionValueSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
1064 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for Pfam to GO mapping file: -"
1065 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>" );
1067 pfam_to_go_file = new File( cla.getOptionValue( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) );
1068 final String error = ForesterUtil.isReadableFile( pfam_to_go_file );
1069 if ( !ForesterUtil.isEmpty( error ) ) {
1070 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read Pfam to GO mapping file: " + error );
1073 final PfamToGoParser parser = new PfamToGoParser( pfam_to_go_file );
1074 final List<PfamToGoMapping> pfam_to_go_mappings = parser.parse();
1075 domain_id_to_go_ids_map = SurfacingUtil.createDomainIdToGoIdMap( pfam_to_go_mappings );
1076 if ( parser.getMappingCount() < domain_id_to_go_ids_map.size() ) {
1077 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME,
1078 "parser.getMappingCount() < domain_id_to_go_ids_map.size()" );
1080 domain_id_to_go_ids_count = parser.getMappingCount();
1082 catch ( final IOException e ) {
1083 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from Pfam to GO mapping file: " + e );
1086 File go_obo_file = null;
1087 List<GoTerm> go_terms = null;
1088 if ( cla.isOptionSet( surfacing.GO_OBO_FILE_USE_OPTION ) ) {
1089 if ( !cla.isOptionValueSet( surfacing.GO_OBO_FILE_USE_OPTION ) ) {
1090 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for GO OBO file: -"
1091 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>" );
1093 if ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) {
1094 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use GO OBO file (-"
1095 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>) without Pfam to GO mapping file ("
1096 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>)" );
1098 go_obo_file = new File( cla.getOptionValue( surfacing.GO_OBO_FILE_USE_OPTION ) );
1099 final String error = ForesterUtil.isReadableFile( go_obo_file );
1100 if ( !ForesterUtil.isEmpty( error ) ) {
1101 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read GO OBO file: " + error );
1104 final OBOparser parser = new OBOparser( go_obo_file, OBOparser.ReturnType.BASIC_GO_TERM );
1105 go_terms = parser.parse();
1106 if ( parser.getGoTermCount() != go_terms.size() ) {
1108 .unexpectedFatalError( surfacing.PRG_NAME, "parser.getGoTermCount() != go_terms.size()" );
1111 catch ( final IOException e ) {
1112 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from GO OBO file: " + e );
1115 Map<GoId, GoTerm> go_id_to_term_map = null;
1116 if ( ( ( domain_id_to_go_ids_map != null ) && ( domain_id_to_go_ids_map.size() > 0 ) )
1117 && ( ( go_terms != null ) && ( go_terms.size() > 0 ) ) ) {
1118 go_id_to_term_map = GoUtils.createGoIdToGoTermMap( go_terms );
1120 GoNameSpace go_namespace_limit = null;
1121 if ( cla.isOptionSet( surfacing.GO_NAMESPACE_LIMIT_OPTION ) ) {
1122 if ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) {
1123 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use GO namespace limit (-"
1124 + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<namespace>) without Pfam to GO mapping file ("
1125 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>) and GO OBO file (-"
1126 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
1128 if ( !cla.isOptionValueSet( surfacing.GO_NAMESPACE_LIMIT_OPTION ) ) {
1129 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for GO namespace limit: \"-"
1130 + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<"
1131 + surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION + "|"
1132 + surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS + "|"
1133 + surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT + ">\"" );
1135 final String go_namespace_limit_str = cla.getOptionValue( surfacing.GO_NAMESPACE_LIMIT_OPTION )
1137 if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION ) ) {
1138 go_namespace_limit = GoNameSpace.createMolecularFunction();
1140 else if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS ) ) {
1141 go_namespace_limit = GoNameSpace.createBiologicalProcess();
1143 else if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT ) ) {
1144 go_namespace_limit = GoNameSpace.createCellularComponent();
1147 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + go_namespace_limit_str
1148 + "\" for GO namespace limit: \"-" + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<"
1149 + surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION + "|"
1150 + surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS + "|"
1151 + surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT + ">\"" );
1154 if ( ( domain_similarity_sort_field == DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE )
1155 && ( number_of_genomes > 2 ) ) {
1156 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
1158 File[] intree_files = null;
1159 Phylogeny[] intrees = null;
1160 if ( cla.isOptionSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
1161 if ( number_of_genomes < 3 ) {
1162 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer gains and losses on input species trees (-"
1163 + surfacing.INPUT_SPECIES_TREE_OPTION + " without pairwise analyses ("
1164 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1165 + "=<suffix for pairwise comparison output files>)" );
1167 if ( !cla.isOptionValueSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
1168 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input tree: -"
1169 + surfacing.INPUT_SPECIES_TREE_OPTION + "=<tree file in phyloXML format>" );
1171 final String intrees_str = cla.getOptionValue( surfacing.INPUT_SPECIES_TREE_OPTION );
1172 if ( intrees_str.indexOf( "#" ) > 0 ) {
1173 final String[] intrees_strs = intrees_str.split( "#" );
1174 intree_files = new File[ intrees_strs.length ];
1176 for( final String s : intrees_strs ) {
1177 intree_files[ i++ ] = new File( s.trim() );
1181 intree_files = new File[ 1 ];
1182 intree_files[ 0 ] = new File( intrees_str );
1184 intrees = getIntrees( intree_files, number_of_genomes, input_file_properties );
1186 long random_number_seed_for_fitch_parsimony = 0l;
1187 boolean radomize_fitch_parsimony = false;
1188 if ( cla.isOptionSet( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION ) ) {
1189 if ( !cla.isOptionValueSet( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION ) ) {
1190 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for random number seed: -"
1191 + surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION + "=<seed>" );
1194 random_number_seed_for_fitch_parsimony = cla
1195 .getOptionValueAsLong( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
1197 catch ( final IOException e ) {
1198 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1200 radomize_fitch_parsimony = true;
1202 SortedSet<String> filter = null;
1203 if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
1204 || ( negative_domains_filter_file != null ) ) {
1205 filter = new TreeSet<String>();
1206 if ( positive_filter_file != null ) {
1207 processFilter( positive_filter_file, filter );
1209 else if ( negative_filter_file != null ) {
1210 processFilter( negative_filter_file, filter );
1212 else if ( negative_domains_filter_file != null ) {
1213 processFilter( negative_domains_filter_file, filter );
1216 Map<String, Set<String>>[] domain_id_to_secondary_features_maps = null;
1217 File[] secondary_features_map_files = null;
1218 final File domain_lengths_analysis_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1219 + DOMAIN_LENGTHS_ANALYSIS_SUFFIX );
1220 if ( PERFORM_DOMAIN_LENGTH_ANALYSIS ) {
1221 SurfacingUtil.checkForOutputFileWriteability( domain_lengths_analysis_outfile );
1223 if ( cla.isOptionSet( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ) ) {
1224 if ( !cla.isOptionValueSet( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ) ) {
1225 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for secondary features map file: -"
1226 + surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE + "=<file>" );
1228 final String[] secondary_features_map_files_strs = cla
1229 .getOptionValue( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ).split( "#" );
1230 secondary_features_map_files = new File[ secondary_features_map_files_strs.length ];
1231 domain_id_to_secondary_features_maps = new Map[ secondary_features_map_files_strs.length ];
1233 for( final String secondary_features_map_files_str : secondary_features_map_files_strs ) {
1234 secondary_features_map_files[ i ] = new File( secondary_features_map_files_str );
1235 final String error = ForesterUtil.isReadableFile( secondary_features_map_files[ i ] );
1236 if ( !ForesterUtil.isEmpty( error ) ) {
1237 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read secondary features map file: " + error );
1240 domain_id_to_secondary_features_maps[ i ] = SurfacingUtil
1241 .createDomainIdToSecondaryFeaturesMap( secondary_features_map_files[ i ] );
1243 catch ( final IOException e ) {
1244 ForesterUtil.fatalError( surfacing.PRG_NAME,
1245 "cannot read secondary features map file: " + e.getMessage() );
1247 catch ( final Exception e ) {
1248 ForesterUtil.fatalError( surfacing.PRG_NAME, "problem with contents of features map file ["
1249 + secondary_features_map_files[ i ] + "]: " + e.getMessage() );
1254 if ( out_dir == null ) {
1255 ForesterUtil.fatalError( surfacing.PRG_NAME, "no output directory indicated (-"
1256 + surfacing.OUTPUT_DIR_OPTION + "=<dir>)" );
1258 if ( output_file == null ) {
1259 ForesterUtil.fatalError( surfacing.PRG_NAME, "no name for (main) output file indicated (-"
1260 + surfacing.OUTPUT_FILE_OPTION + "=<file>)" );
1262 if ( ( domain_id_to_go_ids_map == null ) || domain_id_to_go_ids_map.isEmpty() ) {
1263 ForesterUtil.fatalError( surfacing.PRG_NAME,
1264 "no (acceptable) Pfam to GO id mapping file provided ('pfam2go file') (-"
1265 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>)" );
1267 if ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) {
1268 ForesterUtil.fatalError( surfacing.PRG_NAME,
1269 "no (acceptable) go id to term mapping file provided ('GO OBO file') (-"
1270 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
1272 System.out.println( "Output directory : " + out_dir );
1273 System.out.println( "Input genomes from : " + input_genomes_file );
1274 html_desc.append( "<tr><td>Input genomes from:</td><td>" + input_genomes_file + "</td></tr>" + nl );
1275 if ( positive_filter_file != null ) {
1276 final int filter_size = filter.size();
1277 System.out.println( "Positive protein filter : " + positive_filter_file + " [" + filter_size
1279 html_desc.append( "<tr><td>Positive protein filter:</td><td>" + positive_filter_file + " [" + filter_size
1280 + " domain ids]</td></tr>" + nl );
1282 if ( negative_filter_file != null ) {
1283 final int filter_size = filter.size();
1284 System.out.println( "Negative protein filter : " + negative_filter_file + " [" + filter_size
1286 html_desc.append( "<tr><td>Negative protein filter:</td><td>" + negative_filter_file + " [" + filter_size
1287 + " domain ids]</td></tr>" + nl );
1289 if ( negative_domains_filter_file != null ) {
1290 final int filter_size = filter.size();
1291 System.out.println( "Negative domain filter : " + negative_domains_filter_file + " [" + filter_size
1293 html_desc.append( "<tr><td>Negative domain filter:</td><td>" + negative_domains_filter_file + " ["
1294 + filter_size + " domain ids]</td></tr>" + nl );
1296 if ( plus_minus_analysis_high_copy_base_species.size() > 0 ) {
1298 for( final String s : plus_minus_analysis_high_copy_base_species ) {
1299 plus0 += "+" + s + " ";
1302 for( final String s : plus_minus_analysis_high_copy_target_species ) {
1303 plus1 += "*" + s + " ";
1306 for( final String s : plus_minus_analysis_high_low_copy_species ) {
1307 minus += "-" + s + " ";
1309 System.out.println( "Plus-minus analysis : " + plus1 + "&& " + plus0 + "&& " + minus );
1310 html_desc.append( "<tr><td>Plus-minus analysis:</td><td>" + plus1 + "&& " + plus0 + "&& " + minus
1311 + "</td></tr>" + nl );
1313 if ( cutoff_scores_file != null ) {
1314 System.out.println( "Cutoff scores file : " + cutoff_scores_file );
1315 html_desc.append( "<tr><td>Cutoff scores file:</td><td>" + cutoff_scores_file + "</td></tr>" + nl );
1317 if ( e_value_max >= 0.0 ) {
1318 System.out.println( "E-value maximum (inclusive) : " + e_value_max );
1319 html_desc.append( "<tr><td>E-value maximum (inclusive):</td><td>" + e_value_max + "</td></tr>" + nl );
1321 if ( output_protein_lists_for_all_domains ) {
1322 System.out.println( "Domain E-value max : " + output_list_of_all_proteins_per_domain_e_value_max );
1323 html_desc.append( "<tr><td>Protein lists: E-value maximum per domain (inclusive):</td><td>"
1324 + output_list_of_all_proteins_per_domain_e_value_max + "</td></tr>" + nl );
1326 System.out.println( "Ignore DUFs : " + ignore_dufs );
1327 if ( ignore_virus_like_ids ) {
1328 System.out.println( "Ignore virus like ids : " + ignore_virus_like_ids );
1329 html_desc.append( "<tr><td>Ignore virus, phage, transposition related ids:</td><td>"
1330 + ignore_virus_like_ids + "</td></tr>" + nl );
1332 html_desc.append( "<tr><td>Ignore DUFs:</td><td>" + ignore_dufs + "</td></tr>" + nl );
1333 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
1334 System.out.println( "Max allowed domain overlap : " + max_allowed_overlap );
1335 html_desc.append( "<tr><td>Max allowed domain overlap:</td><td>" + max_allowed_overlap + "</td></tr>" + nl );
1337 if ( no_engulfing_overlaps ) {
1338 System.out.println( "Ignore engulfed domains : " + no_engulfing_overlaps );
1339 html_desc.append( "<tr><td>Ignore (lower confidence) engulfed domains:</td><td>" + no_engulfing_overlaps
1340 + "</td></tr>" + nl );
1342 System.out.println( "Ignore singlet domains : " + ignore_domains_without_combs_in_all_spec );
1344 .append( "<tr><td>Ignore singlet domains for domain combination similarity analyses (not for parsimony analyses):</td><td>"
1345 + ignore_domains_without_combs_in_all_spec + "</td></tr>" + nl );
1346 System.out.println( "Ignore species specific doms: " + ignore_species_specific_domains );
1348 .append( "<tr><td>Ignore species specific domains for domain combination similarity analyses (not for parsimony analyses):</td><td>"
1349 + ignore_species_specific_domains + "</td></tr>" + nl );
1350 System.out.println( "Ignore combination with self: " + ignore_combination_with_same );
1351 html_desc.append( "<tr><td>Ignore combination with self for domain combination similarity analyses:</td><td>"
1352 + ignore_combination_with_same + "</td></tr>" + nl );
1353 System.out.println( "Consider directedness : "
1354 + ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) );
1355 html_desc.append( "<tr><td>Consider directedness of binary domain combinations:</td><td>"
1356 + ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) + "</td></tr>" + nl );
1357 if ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) {
1358 System.out.println( "Consider adjacency : "
1359 + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) );
1360 html_desc.append( "<tr><td>Consider djacency of binary domain combinations:</td><td>"
1361 + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) + "</td></tr>"
1364 System.out.println( "Use last in Fitch parimony : " + use_last_in_fitch_parsimony );
1365 html_desc.append( "<tr><td>Use last in Fitch parimon:</td><td>" + use_last_in_fitch_parsimony + "</td></tr>"
1367 System.out.println( "Write to Nexus files : " + write_to_nexus );
1368 html_desc.append( "<tr><td>Write to Nexus files:</td><td>" + write_to_nexus + "</td></tr>" + nl );
1369 System.out.println( "DC regain prot stats : " + perform_dc_regain_proteins_stats );
1370 html_desc.append( "<tr><td>DC regain prot stats:</td><td>" + perform_dc_regain_proteins_stats + "</td></tr>"
1372 System.out.println( "DA analysis : " + da_analysis );
1373 html_desc.append( "<tr><td>DA analysis :</td><td>" + da_analysis + "</td></tr>" + nl );
1374 System.out.print( "Domain counts sort order : " );
1375 html_desc.append( "<tr><td>Domain counts sort order:</td><td>" );
1376 switch ( dc_sort_order ) {
1377 case ALPHABETICAL_KEY_ID:
1378 System.out.println( "alphabetical" );
1379 html_desc.append( "alphabetical" + "</td></tr>" + nl );
1381 case KEY_DOMAIN_COUNT:
1382 System.out.println( "domain count" );
1383 html_desc.append( "domain count" + "</td></tr>" + nl );
1385 case KEY_DOMAIN_PROTEINS_COUNT:
1386 System.out.println( "domain proteins count" );
1387 html_desc.append( "domain proteins count" + "</td></tr>" + nl );
1389 case COMBINATIONS_COUNT:
1390 System.out.println( "domain combinations count" );
1391 html_desc.append( "domain combinations count" + "</td></tr>" + nl );
1394 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for dc sort order" );
1396 if ( domain_id_to_go_ids_map != null ) {
1397 System.out.println( "Pfam to GO mappings from : " + pfam_to_go_file + " [" + domain_id_to_go_ids_count
1399 html_desc.append( "<tr><td>Pfam to GO mappings from:</td><td>" + pfam_to_go_file + " ["
1400 + domain_id_to_go_ids_count + " mappings]" + "</td></tr>" + nl );
1402 if ( go_terms != null ) {
1403 System.out.println( "GO terms from : " + go_obo_file + " [" + go_terms.size() + " terms]" );
1404 html_desc.append( "<tr><td>GO terms from:</td><td>" + go_obo_file + " [" + go_terms.size() + " terms]"
1405 + "</td></tr>" + nl );
1407 if ( go_namespace_limit != null ) {
1408 System.out.println( "Limit GO terms to : " + go_namespace_limit.toString() );
1409 html_desc.append( "<tr><td>Limit GO terms to</td><td>" + go_namespace_limit + "</td></tr>" + nl );
1411 if ( perform_pwc ) {
1412 System.out.println( "Suffix for PWC files : " + automated_pairwise_comparison_suffix );
1413 html_desc.append( "<tr><td>Suffix for PWC files</td><td>" + automated_pairwise_comparison_suffix
1414 + "</td></tr>" + nl );
1416 if ( out_dir != null ) {
1417 System.out.println( "Output directory : " + out_dir );
1419 if ( query_domain_ids != null ) {
1420 System.out.println( "Query domains (ordered) : " + query_domain_ids );
1421 html_desc.append( "<tr><td></td><td>" + query_domain_ids + "</td></tr>" + nl );
1423 System.out.println( "Write similarities to : " + output_file );
1424 System.out.print( " Scoring method : " );
1425 html_desc.append( "<tr><td>Scoring method:</td><td>" );
1426 switch ( scoring ) {
1428 System.out.println( "domain combinations based" );
1429 html_desc.append( "domain combinations based" + "</td></tr>" + nl );
1432 System.out.println( "domain counts based" );
1433 html_desc.append( "domain counts based" + "</td></tr>" + nl );
1436 System.out.println( "domain proteins counts based" );
1437 html_desc.append( "domain proteins counts based" + "</td></tr>" + nl );
1440 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for scoring" );
1442 System.out.print( " Sort by : " );
1443 html_desc.append( "<tr><td>Sort by:</td><td>" );
1444 switch ( domain_similarity_sort_field ) {
1446 System.out.print( "score minimum" );
1447 html_desc.append( "score minimum" );
1450 System.out.print( "score maximum" );
1451 html_desc.append( "score maximum" );
1454 System.out.print( "score mean" );
1455 html_desc.append( "score mean" );
1458 System.out.print( "score standard deviation" );
1459 html_desc.append( "score standard deviation" );
1462 System.out.print( "species number" );
1463 html_desc.append( "species number" );
1466 System.out.print( "alphabetical domain identifier" );
1467 html_desc.append( "alphabetical domain identifier" );
1469 case MAX_DIFFERENCE:
1470 System.out.print( "(maximal) difference" );
1471 html_desc.append( "(maximal) difference" );
1473 case ABS_MAX_COUNTS_DIFFERENCE:
1474 System.out.print( "absolute (maximal) counts difference" );
1475 html_desc.append( "absolute (maximal) counts difference" );
1477 case MAX_COUNTS_DIFFERENCE:
1478 System.out.print( "(maximal) counts difference" );
1479 html_desc.append( "(maximal) counts difference" );
1482 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for similarities" );
1484 if ( sort_by_species_count_first ) {
1485 System.out.println( " (sort by species count first)" );
1486 html_desc.append( " (sort by species count first)" );
1489 System.out.println();
1491 html_desc.append( "</td></tr>" + nl );
1492 System.out.print( " Detailedness : " );
1493 switch ( detailedness ) {
1495 System.out.println( "basic" );
1497 case LIST_COMBINING_DOMAIN_FOR_EACH_SPECIES:
1498 System.out.println( "list combining domains for each species" );
1501 System.out.println( "punctilious" );
1504 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for detailedness" );
1506 System.out.print( " Print option : " );
1507 switch ( domain_similarity_print_option ) {
1509 System.out.println( "HTML" );
1511 case SIMPLE_TAB_DELIMITED:
1512 System.out.println( "simple tab delimited" );
1515 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for print option" );
1517 System.out.print( " Species matrix : " + species_matrix );
1518 System.out.println();
1519 final File dc_data_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file + DATA_FILE_SUFFIX );
1520 System.out.println( "Domain comb data output : " + dc_data_file );
1521 html_desc.append( "<tr><td>Domain combination data output:</td><td> " + dc_data_file + " </td></tr>" );
1522 System.out.println();
1523 if ( perform_pwc ) {
1524 System.out.println( "Pairwise comparisons: " );
1525 html_desc.append( "<tr><td>Pairwise comparisons:</td><td></td></tr>" );
1526 System.out.print( " Sort by : " );
1527 html_desc.append( "<tr><td>Sort by:</td><td>" );
1528 switch ( domain_similarity_sort_field_for_automated_pwc ) {
1530 System.out.print( "score mean" );
1531 html_desc.append( "score mean" );
1534 System.out.print( "alphabetical domain identifier" );
1535 html_desc.append( "alphabetical domain identifier" );
1537 case MAX_DIFFERENCE:
1538 System.out.print( "difference" );
1539 html_desc.append( "difference" );
1541 case ABS_MAX_COUNTS_DIFFERENCE:
1542 System.out.print( "absolute counts difference" );
1543 html_desc.append( "absolute counts difference" );
1545 case MAX_COUNTS_DIFFERENCE:
1546 System.out.print( "counts difference" );
1547 html_desc.append( "counts difference" );
1551 .unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for similarities" );
1553 System.out.println();
1554 html_desc.append( "</td></tr>" + nl );
1555 if ( ( intrees != null ) && ( intrees.length > 0 ) ) {
1556 for( final File intree_file : intree_files ) {
1557 html_desc.append( "<tr><td>Intree for gain/loss parsimony analysis:</td><td>" + intree_file
1558 + "</td></tr>" + nl );
1559 System.out.println( " Intree for gain/loss pars.: " + intree_file );
1562 if ( radomize_fitch_parsimony ) {
1563 html_desc.append( "<tr><td> Random number seed for Fitch parsimony analysis:</td><td>"
1564 + random_number_seed_for_fitch_parsimony + "</td></tr>" + nl );
1565 System.out.println( " Random number seed : " + random_number_seed_for_fitch_parsimony );
1567 if ( ( domain_id_to_secondary_features_maps != null ) && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
1568 for( int i = 0; i < secondary_features_map_files.length; i++ ) {
1569 html_desc.append( "<tr><td>Secondary features map file:</td><td>"
1570 + secondary_features_map_files[ i ] + "</td></tr>" + nl );
1571 System.out.println( "Secondary features map file : " + secondary_features_map_files[ i ]
1572 + " [mappings for " + domain_id_to_secondary_features_maps[ i ].size() + " domain ids]" );
1574 System.out.println();
1575 System.out.println( "Domain ids to secondary features map:" );
1576 for( final String domain_id : domain_id_to_secondary_features_maps[ i ].keySet() ) {
1577 System.out.print( domain_id );
1578 System.out.print( " => " );
1579 for( final String sec : domain_id_to_secondary_features_maps[ i ].get( domain_id ) ) {
1580 System.out.print( sec );
1581 System.out.print( " " );
1583 System.out.println();
1588 } // if ( perform_pwc ) {
1589 System.out.println();
1590 html_desc.append( "<tr><td>Command line:</td><td>\n" + cla.getCommandLineArgsAsString() + "\n</td></tr>" + nl );
1591 System.out.println( "Command line : " + cla.getCommandLineArgsAsString() );
1592 BufferedWriter[] query_domains_writer_ary = null;
1593 List<String>[] query_domain_ids_array = null;
1594 if ( query_domain_ids != null ) {
1595 final String[] query_domain_ids_str_array = query_domain_ids.split( "#" );
1596 query_domain_ids_array = new ArrayList[ query_domain_ids_str_array.length ];
1597 query_domains_writer_ary = new BufferedWriter[ query_domain_ids_str_array.length ];
1598 for( int i = 0; i < query_domain_ids_str_array.length; i++ ) {
1599 String query_domain_ids_str = query_domain_ids_str_array[ i ];
1600 final String[] query_domain_ids_str_ary = query_domain_ids_str.split( "~" );
1601 final List<String> query = new ArrayList<String>();
1602 for( final String element : query_domain_ids_str_ary ) {
1603 query.add( element );
1605 query_domain_ids_array[ i ] = query;
1606 query_domain_ids_str = query_domain_ids_str.replace( '~', '_' );
1607 String protein_names_writer_str = query_domain_ids_str + surfacing.SEQ_EXTRACT_SUFFIX;
1608 if ( out_dir != null ) {
1609 protein_names_writer_str = out_dir + ForesterUtil.FILE_SEPARATOR + protein_names_writer_str;
1612 query_domains_writer_ary[ i ] = new BufferedWriter( new FileWriter( protein_names_writer_str ) );
1614 catch ( final IOException e ) {
1615 ForesterUtil.fatalError( surfacing.PRG_NAME, "Could not open [" + protein_names_writer_str + "]: "
1616 + e.getLocalizedMessage() );
1620 SortedMap<Species, List<Protein>> protein_lists_per_species = null; //This will only be created if neede.
1621 boolean need_protein_lists_per_species = false;
1622 if ( ( plus_minus_analysis_high_copy_base_species.size() > 0 ) || output_protein_lists_for_all_domains ) {
1623 need_protein_lists_per_species = true;
1625 if ( need_protein_lists_per_species ) {
1626 protein_lists_per_species = new TreeMap<Species, List<Protein>>();
1628 List<GenomeWideCombinableDomains> gwcd_list = new ArrayList<GenomeWideCombinableDomains>( number_of_genomes );
1629 final SortedSet<String> all_domains_encountered = new TreeSet<String>();
1630 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered = new TreeSet<BinaryDomainCombination>();
1631 List<BinaryDomainCombination> all_bin_domain_combinations_gained_fitch = null;
1632 List<BinaryDomainCombination> all_bin_domain_combinations_lost_fitch = null;
1633 if ( ( intrees != null ) && ( intrees.length == 1 ) ) {
1634 all_bin_domain_combinations_gained_fitch = new ArrayList<BinaryDomainCombination>();
1635 all_bin_domain_combinations_lost_fitch = new ArrayList<BinaryDomainCombination>();
1637 final DomainLengthsTable domain_lengths_table = new DomainLengthsTable();
1638 final File per_genome_domain_promiscuity_statistics_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR
1639 + output_file + D_PROMISCUITY_FILE_SUFFIX );
1640 BufferedWriter per_genome_domain_promiscuity_statistics_writer = null;
1642 per_genome_domain_promiscuity_statistics_writer = new BufferedWriter( new FileWriter( per_genome_domain_promiscuity_statistics_file ) );
1643 per_genome_domain_promiscuity_statistics_writer.write( "Species:\t" );
1644 per_genome_domain_promiscuity_statistics_writer.write( "Mean:\t" );
1645 per_genome_domain_promiscuity_statistics_writer.write( "SD:\t" );
1646 per_genome_domain_promiscuity_statistics_writer.write( "Median:\t" );
1647 per_genome_domain_promiscuity_statistics_writer.write( "Min:\t" );
1648 per_genome_domain_promiscuity_statistics_writer.write( "Max:\t" );
1649 per_genome_domain_promiscuity_statistics_writer.write( "N:\t" );
1650 per_genome_domain_promiscuity_statistics_writer.write( "Max Promiscuous Domains:"
1651 + ForesterUtil.LINE_SEPARATOR );
1653 catch ( final IOException e2 ) {
1654 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1656 final File log_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file + LOG_FILE_SUFFIX );
1657 BufferedWriter log_writer = null;
1659 log_writer = new BufferedWriter( new FileWriter( log_file ) );
1661 catch ( final IOException e2 ) {
1662 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1664 BufferedWriter dc_data_writer = null;
1666 dc_data_writer = new BufferedWriter( new FileWriter( dc_data_file ) );
1667 dc_data_writer.write( DATA_FILE_DESC );
1668 dc_data_writer.write( ForesterUtil.LINE_SEPARATOR );
1670 catch ( final IOException e2 ) {
1671 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1673 final DescriptiveStatistics protein_coverage_stats = new BasicDescriptiveStatistics();
1674 final DescriptiveStatistics all_genomes_domains_per_potein_stats = new BasicDescriptiveStatistics();
1675 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo = new TreeMap<Integer, Integer>();
1676 final SortedSet<String> domains_which_are_always_single = new TreeSet<String>();
1677 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not = new TreeSet<String>();
1678 final SortedSet<String> domains_which_never_single = new TreeSet<String>();
1679 BufferedWriter domains_per_potein_stats_writer = null;
1681 domains_per_potein_stats_writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR
1682 + output_file + "_domains_per_potein_stats.txt" ) );
1683 domains_per_potein_stats_writer.write( "Genome" );
1684 domains_per_potein_stats_writer.write( "\t" );
1685 domains_per_potein_stats_writer.write( "Mean" );
1686 domains_per_potein_stats_writer.write( "\t" );
1687 domains_per_potein_stats_writer.write( "SD" );
1688 domains_per_potein_stats_writer.write( "\t" );
1689 domains_per_potein_stats_writer.write( "Median" );
1690 domains_per_potein_stats_writer.write( "\t" );
1691 domains_per_potein_stats_writer.write( "N" );
1692 domains_per_potein_stats_writer.write( "\t" );
1693 domains_per_potein_stats_writer.write( "Min" );
1694 domains_per_potein_stats_writer.write( "\t" );
1695 domains_per_potein_stats_writer.write( "Max" );
1696 domains_per_potein_stats_writer.write( "\n" );
1698 catch ( final IOException e3 ) {
1699 e3.printStackTrace();
1701 Map<String, DescriptiveStatistics> protein_length_stats_by_dc = null;
1702 Map<String, DescriptiveStatistics> domain_number_stats_by_dc = null;
1703 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain = new HashMap<String, DescriptiveStatistics>();
1704 if ( perform_dc_regain_proteins_stats ) {
1705 protein_length_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
1706 domain_number_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
1709 final SortedMap<String, Set<String>> distinct_domain_architecutures_per_genome = new TreeMap<String, Set<String>>();
1710 final SortedMap<String, Integer> distinct_domain_architecuture_counts = new TreeMap<String, Integer>();
1711 for( int i = 0; i < number_of_genomes; ++i ) {
1712 System.out.println();
1713 System.out.println( ( i + 1 ) + "/" + number_of_genomes );
1714 log( ( i + 1 ) + "/" + number_of_genomes, log_writer );
1715 System.out.println( "Processing : " + input_file_properties[ i ][ 1 ]
1716 + " [" + input_file_properties[ i ][ 0 ] + "]" );
1717 log( "Genome : " + input_file_properties[ i ][ 1 ] + " ["
1718 + input_file_properties[ i ][ 0 ] + "]", log_writer );
1719 HmmscanPerDomainTableParser parser = null;
1720 INDIVIDUAL_SCORE_CUTOFF ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF.NONE;
1721 if ( individual_score_cutoffs != null ) {
1722 ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF_DEFAULT;
1724 if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
1725 || ( negative_domains_filter_file != null ) ) {
1726 HmmscanPerDomainTableParser.FilterType filter_type = HmmscanPerDomainTableParser.FilterType.NONE;
1727 if ( positive_filter_file != null ) {
1728 filter_type = HmmscanPerDomainTableParser.FilterType.POSITIVE_PROTEIN;
1730 else if ( negative_filter_file != null ) {
1731 filter_type = HmmscanPerDomainTableParser.FilterType.NEGATIVE_PROTEIN;
1733 else if ( negative_domains_filter_file != null ) {
1734 filter_type = HmmscanPerDomainTableParser.FilterType.NEGATIVE_DOMAIN;
1736 parser = new HmmscanPerDomainTableParser( new File( input_file_properties[ i ][ 0 ] ),
1737 input_file_properties[ i ][ 1 ],
1744 parser = new HmmscanPerDomainTableParser( new File( input_file_properties[ i ][ 0 ] ),
1745 input_file_properties[ i ][ 1 ],
1749 if ( e_value_max >= 0.0 ) {
1750 parser.setEValueMaximum( e_value_max );
1752 parser.setIgnoreDufs( ignore_dufs );
1753 parser.setIgnoreVirusLikeIds( ignore_virus_like_ids );
1754 parser.setIgnoreEngulfedDomains( no_engulfing_overlaps );
1755 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
1756 parser.setMaxAllowedOverlap( max_allowed_overlap );
1758 parser.setReturnType( HmmscanPerDomainTableParser.ReturnType.UNORDERED_PROTEIN_DOMAIN_COLLECTION_PER_PROTEIN );
1759 if ( individual_score_cutoffs != null ) {
1760 parser.setIndividualScoreCutoffs( individual_score_cutoffs );
1762 List<Protein> protein_list = null;
1764 protein_list = parser.parse();
1766 catch ( final IOException e ) {
1767 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1769 catch ( final Exception e ) {
1770 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, e.getMessage(), e );
1773 System.out.println( "Domains ignored due to negative domain filter: " );
1774 ForesterUtil.printCountingMap( parser.getDomainsIgnoredDueToNegativeDomainFilterCountsMap() );
1775 System.out.println( "Domains ignored due to virus like id: " );
1776 ForesterUtil.printCountingMap( parser.getDomainsIgnoredDueToVirusLikeIdCountsMap() );
1778 final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
1779 protein_coverage_stats.addValue( coverage );
1780 int distinct_das = -1;
1781 if ( da_analysis ) {
1782 final String genome = input_file_properties[ i ][ 0 ];
1783 distinct_das = SurfacingUtil.storeDomainArchitectures( genome,
1784 distinct_domain_architecutures_per_genome,
1786 distinct_domain_architecuture_counts );
1788 System.out.println( "Number of proteins encountered : " + parser.getProteinsEncountered() );
1789 log( "Number of proteins encountered : " + parser.getProteinsEncountered(), log_writer );
1790 System.out.println( "Number of proteins stored : " + protein_list.size() );
1791 log( "Number of proteins stored : " + protein_list.size(), log_writer );
1792 System.out.println( "Coverage : "
1793 + ForesterUtil.roundToInt( 100.0 * coverage ) + "%" );
1794 log( "Coverage : " + ForesterUtil.roundToInt( 100.0 * coverage )
1795 + "%", log_writer );
1796 System.out.println( "Domains encountered : " + parser.getDomainsEncountered() );
1797 log( "Domains encountered : " + parser.getDomainsEncountered(), log_writer );
1798 System.out.println( "Domains stored : " + parser.getDomainsStored() );
1799 log( "Domains stored : " + parser.getDomainsStored(), log_writer );
1800 System.out.println( "Distinct domains stored : "
1801 + parser.getDomainsStoredSet().size() );
1802 log( "Distinct domains stored : " + parser.getDomainsStoredSet().size(), log_writer );
1803 System.out.println( "Domains ignored due to individual score cutoffs: "
1804 + parser.getDomainsIgnoredDueToIndividualScoreCutoff() );
1805 log( "Domains ignored due to individual score cutoffs: "
1806 + parser.getDomainsIgnoredDueToIndividualScoreCutoff(),
1808 System.out.println( "Domains ignored due to E-value : "
1809 + parser.getDomainsIgnoredDueToEval() );
1810 log( "Domains ignored due to E-value : " + parser.getDomainsIgnoredDueToEval(), log_writer );
1811 System.out.println( "Domains ignored due to DUF designation : "
1812 + parser.getDomainsIgnoredDueToDuf() );
1813 log( "Domains ignored due to DUF designation : " + parser.getDomainsIgnoredDueToDuf(), log_writer );
1814 if ( ignore_virus_like_ids ) {
1815 System.out.println( "Domains ignored due virus like ids : "
1816 + parser.getDomainsIgnoredDueToVirusLikeIds() );
1817 log( "Domains ignored due virus like ids : " + parser.getDomainsIgnoredDueToVirusLikeIds(),
1820 System.out.println( "Domains ignored due negative domain filter : "
1821 + parser.getDomainsIgnoredDueToNegativeDomainFilter() );
1822 log( "Domains ignored due negative domain filter : "
1823 + parser.getDomainsIgnoredDueToNegativeDomainFilter(),
1825 System.out.println( "Domains ignored due to overlap : "
1826 + parser.getDomainsIgnoredDueToOverlap() );
1827 log( "Domains ignored due to overlap : " + parser.getDomainsIgnoredDueToOverlap(),
1829 if ( negative_filter_file != null ) {
1830 System.out.println( "Proteins ignored due to negative filter : "
1831 + parser.getProteinsIgnoredDueToFilter() );
1832 log( "Proteins ignored due to negative filter : " + parser.getProteinsIgnoredDueToFilter(),
1835 if ( positive_filter_file != null ) {
1836 System.out.println( "Proteins ignored due to positive filter : "
1837 + parser.getProteinsIgnoredDueToFilter() );
1838 log( "Proteins ignored due to positive filter : " + parser.getProteinsIgnoredDueToFilter(),
1841 if ( da_analysis ) {
1842 System.out.println( "Distinct domain architectures stored : " + distinct_das );
1843 log( "Distinct domain architectures stored : " + distinct_das, log_writer );
1845 System.out.println( "Time for processing : " + parser.getTime() + "ms" );
1846 log( "", log_writer );
1847 html_desc.append( "<tr><td>" + input_file_properties[ i ][ 0 ] + " [species: "
1848 + input_file_properties[ i ][ 1 ] + "]" + ":</td><td>domains analyzed: "
1849 + parser.getDomainsStored() + "; domains ignored: [ind score cutoffs: "
1850 + parser.getDomainsIgnoredDueToIndividualScoreCutoff() + "] [E-value cutoff: "
1851 + parser.getDomainsIgnoredDueToEval() + "] [DUF: " + parser.getDomainsIgnoredDueToDuf()
1852 + "] [virus like ids: " + parser.getDomainsIgnoredDueToVirusLikeIds()
1853 + "] [negative domain filter: " + parser.getDomainsIgnoredDueToNegativeDomainFilter()
1854 + "] [overlap: " + parser.getDomainsIgnoredDueToOverlap() + "]" );
1855 if ( negative_filter_file != null ) {
1856 html_desc.append( "; proteins ignored due to negative filter: "
1857 + parser.getProteinsIgnoredDueToFilter() );
1859 if ( positive_filter_file != null ) {
1860 html_desc.append( "; proteins ignored due to positive filter: "
1861 + parser.getProteinsIgnoredDueToFilter() );
1863 html_desc.append( "</td></tr>" + nl );
1866 for( final Protein protein : protein_list ) {
1867 dc_data_writer.write( SurfacingUtil.proteinToDomainCombinations( protein, count + "", "\t" )
1870 for( final Domain d : protein.getProteinDomains() ) {
1871 final String d_str = d.getDomainId().toString();
1872 if ( !domain_length_stats_by_domain.containsKey( d_str ) ) {
1873 domain_length_stats_by_domain.put( d_str, new BasicDescriptiveStatistics() );
1875 domain_length_stats_by_domain.get( d_str ).addValue( d.getLength() );
1879 catch ( final IOException e ) {
1880 ForesterUtil.fatalError( surfacing.PRG_NAME, e.toString() );
1882 SurfacingUtil.domainsPerProteinsStatistics( input_file_properties[ i ][ 1 ],
1884 all_genomes_domains_per_potein_stats,
1885 all_genomes_domains_per_potein_histo,
1886 domains_which_are_always_single,
1887 domains_which_are_sometimes_single_sometimes_not,
1888 domains_which_never_single,
1889 domains_per_potein_stats_writer );
1890 domain_lengths_table.addLengths( protein_list );
1891 if ( !da_analysis ) {
1892 gwcd_list.add( BasicGenomeWideCombinableDomains
1893 .createInstance( protein_list,
1894 ignore_combination_with_same,
1895 new BasicSpecies( input_file_properties[ i ][ 1 ] ),
1896 domain_id_to_go_ids_map,
1898 protein_length_stats_by_dc,
1899 domain_number_stats_by_dc ) );
1900 if ( gwcd_list.get( i ).getSize() > 0 ) {
1901 SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
1903 per_genome_domain_promiscuity_statistics_writer,
1907 if ( output_binary_domain_combinationsfor_graph_analysis ) {
1908 SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
1914 SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
1915 SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
1916 all_bin_domain_combinations_encountered );
1919 if ( query_domains_writer_ary != null ) {
1920 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
1922 SurfacingUtil.extractProteinNames( protein_list,
1923 query_domain_ids_array[ j ],
1924 query_domains_writer_ary[ j ],
1926 LIMIT_SPEC_FOR_PROT_EX );
1927 query_domains_writer_ary[ j ].flush();
1929 catch ( final IOException e ) {
1930 e.printStackTrace();
1934 if ( need_protein_lists_per_species ) {
1935 protein_lists_per_species.put( new BasicSpecies( input_file_properties[ i ][ 1 ] ), protein_list );
1940 catch ( final IOException e2 ) {
1941 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
1944 } // for( int i = 0; i < number_of_genomes; ++i ) {
1945 ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
1946 + per_genome_domain_promiscuity_statistics_file );
1948 if ( da_analysis ) {
1949 SurfacingUtil.performDomainArchitectureAnalysis( distinct_domain_architecutures_per_genome,
1950 distinct_domain_architecuture_counts,
1952 new File( out_dir.toString() + "/" + output_file
1953 + "_DA_counts.txt" ),
1954 new File( out_dir.toString() + "/" + output_file
1955 + "_unique_DAs.txt" ) );
1956 distinct_domain_architecutures_per_genome.clear();
1957 distinct_domain_architecuture_counts.clear();
1961 domains_per_potein_stats_writer.write( "ALL" );
1962 domains_per_potein_stats_writer.write( "\t" );
1963 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.arithmeticMean() + "" );
1964 domains_per_potein_stats_writer.write( "\t" );
1965 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.sampleStandardDeviation() + "" );
1966 domains_per_potein_stats_writer.write( "\t" );
1967 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.median() + "" );
1968 domains_per_potein_stats_writer.write( "\t" );
1969 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getN() + "" );
1970 domains_per_potein_stats_writer.write( "\t" );
1971 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMin() + "" );
1972 domains_per_potein_stats_writer.write( "\t" );
1973 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMax() + "" );
1974 domains_per_potein_stats_writer.write( "\n" );
1975 domains_per_potein_stats_writer.close();
1976 printOutPercentageOfMultidomainProteins( all_genomes_domains_per_potein_histo, log_writer );
1977 ForesterUtil.map2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1978 + "_all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
1979 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1980 + "_domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
1981 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1982 + "_domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
1983 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1984 + "_domains_always_combined.txt" ), domains_which_never_single, "\n" );
1985 ForesterUtil.programMessage( PRG_NAME,
1986 "Average of proteins with a least one domain assigned: "
1987 + ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
1988 + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)" );
1989 ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: "
1990 + ( 100 * protein_coverage_stats.getMin() ) + "%-" + ( 100 * protein_coverage_stats.getMax() )
1992 log( "Average of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.arithmeticMean() )
1993 + "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
1994 log( "Range of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.getMin() ) + "%-"
1995 + ( 100 * protein_coverage_stats.getMax() ) + "%", log_writer );
1997 catch ( final IOException e2 ) {
1998 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
2000 if ( query_domains_writer_ary != null ) {
2001 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
2003 query_domains_writer_ary[ j ].close();
2005 catch ( final IOException e ) {
2006 ForesterUtil.fatalError( surfacing.PRG_NAME, e.toString() );
2011 per_genome_domain_promiscuity_statistics_writer.close();
2012 dc_data_writer.close();
2015 catch ( final IOException e2 ) {
2016 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
2018 if ( PERFORM_DOMAIN_LENGTH_ANALYSIS ) {
2020 SurfacingUtil.executeDomainLengthAnalysis( input_file_properties,
2022 domain_lengths_table,
2023 domain_lengths_analysis_outfile );
2025 catch ( final IOException e1 ) {
2026 ForesterUtil.fatalError( surfacing.PRG_NAME, e1.toString() );
2028 System.out.println();
2029 ForesterUtil.programMessage( PRG_NAME, "Wrote domain length data to: " + domain_lengths_analysis_outfile );
2030 System.out.println();
2032 final long analysis_start_time = new Date().getTime();
2033 PairwiseDomainSimilarityCalculator pw_calc = null;
2034 // double[] values_for_all_scores_histogram = null;
2035 final DomainSimilarityCalculator calc = new BasicDomainSimilarityCalculator( domain_similarity_sort_field,
2036 sort_by_species_count_first,
2037 number_of_genomes == 2 );
2038 switch ( scoring ) {
2040 pw_calc = new CombinationsBasedPairwiseDomainSimilarityCalculator();
2043 pw_calc = new DomainCountsBasedPairwiseSimilarityCalculator();
2046 pw_calc = new ProteinCountsBasedPairwiseDomainSimilarityCalculator();
2049 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for scoring" );
2051 DomainSimilarityCalculator.GoAnnotationOutput go_annotation_output = DomainSimilarityCalculator.GoAnnotationOutput.NONE;
2052 if ( domain_id_to_go_ids_map != null ) {
2053 go_annotation_output = DomainSimilarityCalculator.GoAnnotationOutput.ALL;
2055 final SortedSet<DomainSimilarity> similarities = calc
2056 .calculateSimilarities( pw_calc,
2058 ignore_domains_without_combs_in_all_spec,
2059 ignore_species_specific_domains );
2060 SurfacingUtil.decoratePrintableDomainSimilarities( similarities,
2062 go_annotation_output,
2064 go_namespace_limit );
2065 final Map<String, Integer> tax_code_to_id_map = SurfacingUtil.createTaxCodeToIdMap( intrees[ 0 ] );
2067 String my_outfile = output_file.toString();
2068 Map<Character, Writer> split_writers = null;
2069 Writer writer = null;
2070 if ( similarities.size() > MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING ) {
2071 if ( my_outfile.endsWith( ".html" ) ) {
2072 my_outfile = my_outfile.substring( 0, my_outfile.length() - 5 );
2074 split_writers = new HashMap<Character, Writer>();
2075 createSplitWriters( out_dir, my_outfile, split_writers );
2077 else if ( !my_outfile.endsWith( ".html" ) ) {
2078 my_outfile += ".html";
2079 writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) );
2081 List<Species> species_order = null;
2082 if ( species_matrix ) {
2083 species_order = new ArrayList<Species>();
2084 for( int i = 0; i < number_of_genomes; i++ ) {
2085 species_order.add( new BasicSpecies( input_file_properties[ i ][ 1 ] ) );
2088 html_desc.append( "<tr><td>Sum of all distinct binary combinations:</td><td>"
2089 + all_bin_domain_combinations_encountered.size() + "</td></tr>" + nl );
2090 html_desc.append( "<tr><td>Sum of all distinct domains:</td><td>" + all_domains_encountered.size()
2091 + "</td></tr>" + nl );
2092 html_desc.append( "<tr><td>Analysis date/time:</td><td>"
2093 + new java.text.SimpleDateFormat( "yyyy.MM.dd HH:mm:ss" ).format( new java.util.Date() )
2094 + "</td></tr>" + nl );
2095 html_desc.append( "</table>" + nl );
2096 final DescriptiveStatistics pw_stats = SurfacingUtil
2097 .writeDomainSimilaritiesToFile( html_desc,
2098 new StringBuilder( number_of_genomes + " genomes" ),
2102 number_of_genomes == 2,
2104 domain_similarity_print_option,
2105 domain_similarity_sort_field,
2108 tax_code_to_id_map );
2109 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote main output (includes domain similarities) to: \""
2110 + ( out_dir == null ? my_outfile : out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) + "\"" );
2112 catch ( final IOException e ) {
2113 ForesterUtil.fatalError( surfacing.PRG_NAME, "Failed to write similarites to: \"" + output_file + "\" ["
2114 + e.getMessage() + "]" );
2116 System.out.println();
2117 final Species[] species = new Species[ number_of_genomes ];
2118 for( int i = 0; i < number_of_genomes; ++i ) {
2119 species[ i ] = new BasicSpecies( input_file_properties[ i ][ 1 ] );
2121 List<Phylogeny> inferred_trees = null;
2122 if ( ( number_of_genomes > 2 ) && perform_pwc ) {
2123 final PairwiseGenomeComparator pwgc = new PairwiseGenomeComparator();
2124 pwgc.performPairwiseComparisons( html_desc,
2125 sort_by_species_count_first,
2127 ignore_domains_without_combs_in_all_spec,
2128 ignore_species_specific_domains,
2129 domain_similarity_sort_field_for_automated_pwc,
2130 domain_similarity_print_option,
2132 domain_id_to_go_ids_map,
2139 automated_pairwise_comparison_suffix,
2141 surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX,
2145 tax_code_to_id_map );
2146 String matrix_output_file = new String( output_file.toString() );
2147 if ( matrix_output_file.indexOf( '.' ) > 1 ) {
2148 matrix_output_file = matrix_output_file.substring( 0, matrix_output_file.indexOf( '.' ) );
2150 if ( out_dir != null ) {
2151 matrix_output_file = out_dir + ForesterUtil.FILE_SEPARATOR + matrix_output_file;
2152 output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
2154 SurfacingUtil.writeMatrixToFile( new File( matrix_output_file
2155 + surfacing.MATRIX_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX ), pwgc.getDomainDistanceScoresMeans() );
2157 .writeMatrixToFile( new File( matrix_output_file
2158 + surfacing.MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ),
2159 pwgc.getSharedBinaryCombinationsBasedDistances() );
2160 SurfacingUtil.writeMatrixToFile( new File( matrix_output_file
2161 + surfacing.MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ),
2162 pwgc.getSharedDomainsBasedDistances() );
2163 final Phylogeny nj_gd = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2164 + surfacing.NJ_TREE_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX ), pwgc.getDomainDistanceScoresMeans()
2166 final Phylogeny nj_bc = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2167 + surfacing.NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ), pwgc
2168 .getSharedBinaryCombinationsBasedDistances().get( 0 ) );
2169 final Phylogeny nj_d = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2170 + surfacing.NJ_TREE_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ), pwgc
2171 .getSharedDomainsBasedDistances().get( 0 ) );
2172 inferred_trees = new ArrayList<Phylogeny>();
2173 inferred_trees.add( nj_gd );
2174 inferred_trees.add( nj_bc );
2175 inferred_trees.add( nj_d );
2176 } // if ( ( output_file != null ) && ( number_of_genomes > 2 ) && !isEmpty( automated_pairwise_comparison_suffix ) )
2177 if ( ( out_dir != null ) && ( !perform_pwc ) ) {
2178 output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
2180 if ( write_to_nexus ) {
2181 writePresentToNexus( output_file, positive_filter_file, filter, gwcd_list );
2183 if ( ( ( intrees != null ) && ( intrees.length > 0 ) ) && ( number_of_genomes > 2 ) ) {
2184 final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
2186 max_allowed_overlap,
2187 no_engulfing_overlaps,
2191 if ( radomize_fitch_parsimony ) {
2192 s += random_number_seed_for_fitch_parsimony + "_";
2195 for( final Phylogeny intree : intrees ) {
2196 final String outfile_name = ForesterUtil.removeSuffix( output_file.toString() ) + s
2197 + ForesterUtil.removeSuffix( intree_files[ i ].toString() );
2198 final DomainParsimonyCalculator domain_parsimony = DomainParsimonyCalculator.createInstance( intree,
2200 SurfacingUtil.executeParsimonyAnalysis( random_number_seed_for_fitch_parsimony,
2201 radomize_fitch_parsimony,
2205 domain_id_to_go_ids_map,
2208 parameters_sb.toString(),
2209 domain_id_to_secondary_features_maps,
2210 positive_filter_file == null ? null : filter,
2211 output_binary_domain_combinationsfor_graph_analysis,
2212 all_bin_domain_combinations_gained_fitch,
2213 all_bin_domain_combinations_lost_fitch,
2215 protein_length_stats_by_dc,
2216 domain_number_stats_by_dc,
2217 domain_length_stats_by_domain,
2220 use_last_in_fitch_parsimony );
2221 // Listing of all domain combinations gained is only done if only one input tree is used.
2222 if ( ( domain_id_to_secondary_features_maps != null )
2223 && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
2225 for( final Map<String, Set<String>> domain_id_to_secondary_features_map : domain_id_to_secondary_features_maps ) {
2226 final Map<Species, MappingResults> mapping_results_map = new TreeMap<Species, MappingResults>();
2227 final DomainParsimonyCalculator secondary_features_parsimony = DomainParsimonyCalculator
2228 .createInstance( intree, gwcd_list, domain_id_to_secondary_features_map );
2230 .executeParsimonyAnalysisForSecondaryFeatures( outfile_name
2232 + secondary_features_map_files[ j++ ],
2233 secondary_features_parsimony,
2235 parameters_sb.toString(),
2236 mapping_results_map,
2237 use_last_in_fitch_parsimony );
2239 System.out.println();
2240 System.out.println( "Mapping to secondary features:" );
2241 for( final Species spec : mapping_results_map.keySet() ) {
2242 final MappingResults mapping_results = mapping_results_map.get( spec );
2243 final int total_domains = mapping_results.getSumOfFailures()
2244 + mapping_results.getSumOfSuccesses();
2245 System.out.print( spec + ":" );
2246 System.out.print( " mapped domains = " + mapping_results.getSumOfSuccesses() );
2247 System.out.print( ", not mapped domains = " + mapping_results.getSumOfFailures() );
2248 if ( total_domains > 0 ) {
2249 System.out.println( ", mapped ratio = "
2250 + ( ( 100 * mapping_results.getSumOfSuccesses() ) / total_domains ) + "%" );
2253 System.out.println( ", mapped ratio = n/a (total domains = 0 )" );
2260 } // for( final Phylogeny intree : intrees ) {
2262 if ( plus_minus_analysis_high_copy_base_species.size() > 0 ) {
2263 executePlusMinusAnalysis( output_file,
2264 plus_minus_analysis_high_copy_base_species,
2265 plus_minus_analysis_high_copy_target_species,
2266 plus_minus_analysis_high_low_copy_species,
2268 protein_lists_per_species,
2269 domain_id_to_go_ids_map,
2271 plus_minus_analysis_numbers );
2273 if ( output_protein_lists_for_all_domains ) {
2274 writeProteinListsForAllSpecies( out_dir,
2275 protein_lists_per_species,
2277 output_list_of_all_proteins_per_domain_e_value_max );
2280 if ( all_bin_domain_combinations_gained_fitch != null ) {
2282 executeFitchGainsAnalysis( new File( output_file
2283 + surfacing.OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
2284 all_bin_domain_combinations_gained_fitch,
2285 all_domains_encountered.size(),
2286 all_bin_domain_combinations_encountered,
2289 catch ( final IOException e ) {
2290 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
2293 if ( all_bin_domain_combinations_lost_fitch != null ) {
2295 executeFitchGainsAnalysis( new File( output_file
2296 + surfacing.OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
2297 all_bin_domain_combinations_lost_fitch,
2298 all_domains_encountered.size(),
2299 all_bin_domain_combinations_encountered,
2302 catch ( final IOException e ) {
2303 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
2306 final Runtime rt = java.lang.Runtime.getRuntime();
2307 final long free_memory = rt.freeMemory() / 1000000;
2308 final long total_memory = rt.totalMemory() / 1000000;
2309 ForesterUtil.programMessage( PRG_NAME, "Time for analysis : " + ( new Date().getTime() - analysis_start_time )
2311 ForesterUtil.programMessage( PRG_NAME, "Total running time: " + ( new Date().getTime() - start_time ) + "ms " );
2312 ForesterUtil.programMessage( PRG_NAME, "Free memory : " + free_memory + "MB, total memory: "
2313 + total_memory + "MB" );
2314 ForesterUtil.programMessage( PRG_NAME, "If this application is useful to you, please cite:" );
2315 ForesterUtil.programMessage( PRG_NAME, surfacing.WWW );
2316 ForesterUtil.programMessage( PRG_NAME, "OK" );
2317 System.out.println();
2320 private static void createSplitWriters( final File out_dir,
2321 final String my_outfile,
2322 final Map<Character, Writer> split_writers ) throws IOException {
2323 split_writers.put( 'a', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2324 + "_domains_A.html" ) ) );
2325 split_writers.put( 'b', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2326 + "_domains_B.html" ) ) );
2327 split_writers.put( 'c', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2328 + "_domains_C.html" ) ) );
2329 split_writers.put( 'd', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2330 + "_domains_D.html" ) ) );
2331 split_writers.put( 'e', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2332 + "_domains_E.html" ) ) );
2333 split_writers.put( 'f', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2334 + "_domains_F.html" ) ) );
2335 split_writers.put( 'g', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2336 + "_domains_G.html" ) ) );
2337 split_writers.put( 'h', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2338 + "_domains_H.html" ) ) );
2339 split_writers.put( 'i', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2340 + "_domains_I.html" ) ) );
2341 split_writers.put( 'j', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2342 + "_domains_J.html" ) ) );
2343 split_writers.put( 'k', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2344 + "_domains_K.html" ) ) );
2345 split_writers.put( 'l', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2346 + "_domains_L.html" ) ) );
2347 split_writers.put( 'm', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2348 + "_domains_M.html" ) ) );
2349 split_writers.put( 'n', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2350 + "_domains_N.html" ) ) );
2351 split_writers.put( 'o', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2352 + "_domains_O.html" ) ) );
2353 split_writers.put( 'p', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2354 + "_domains_P.html" ) ) );
2355 split_writers.put( 'q', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2356 + "_domains_Q.html" ) ) );
2357 split_writers.put( 'r', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2358 + "_domains_R.html" ) ) );
2359 split_writers.put( 's', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2360 + "_domains_S.html" ) ) );
2361 split_writers.put( 't', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2362 + "_domains_T.html" ) ) );
2363 split_writers.put( 'u', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2364 + "_domains_U.html" ) ) );
2365 split_writers.put( 'v', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2366 + "_domains_V.html" ) ) );
2367 split_writers.put( 'w', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2368 + "_domains_W.html" ) ) );
2369 split_writers.put( 'x', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2370 + "_domains_X.html" ) ) );
2371 split_writers.put( 'y', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2372 + "_domains_Y.html" ) ) );
2373 split_writers.put( 'z', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2374 + "_domains_Z.html" ) ) );
2375 split_writers.put( '0', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2376 + "_domains_0.html" ) ) );
2379 private static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
2380 final Writer log_writer ) {
2382 for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
2383 sum += entry.getValue();
2385 final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
2386 ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
2387 log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
2390 private static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
2391 final String[][] input_file_properties ) {
2392 final String[] genomes = new String[ input_file_properties.length ];
2393 for( int i = 0; i < input_file_properties.length; ++i ) {
2394 if ( intree.getNodes( input_file_properties[ i ][ 1 ] ).size() > 1 ) {
2395 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
2396 + "] is not unique in input tree " + intree.getName() );
2398 genomes[ i ] = input_file_properties[ i ][ 1 ];
2401 final PhylogenyNodeIterator it = intree.iteratorPostorder();
2402 while ( it.hasNext() ) {
2403 final PhylogenyNode n = it.next();
2404 if ( ForesterUtil.isEmpty( n.getName() ) ) {
2405 if ( n.getNodeData().isHasTaxonomy()
2406 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
2407 n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
2409 else if ( n.getNodeData().isHasTaxonomy()
2410 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
2411 n.setName( n.getNodeData().getTaxonomy().getScientificName() );
2413 else if ( n.getNodeData().isHasTaxonomy()
2414 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
2415 n.setName( n.getNodeData().getTaxonomy().getCommonName() );
2419 .fatalError( surfacing.PRG_NAME,
2420 "node with no name, scientific name, common name, or taxonomy code present" );
2425 final List<String> igns = PhylogenyMethods.deleteExternalNodesPositiveSelection( genomes, intree );
2426 if ( igns.size() > 0 ) {
2427 System.out.println( "Not using the following " + igns.size() + " nodes:" );
2428 for( int i = 0; i < igns.size(); ++i ) {
2429 System.out.println( " " + i + ": " + igns.get( i ) );
2431 System.out.println( "--" );
2433 for( final String[] input_file_propertie : input_file_properties ) {
2435 intree.getNode( input_file_propertie[ 1 ] );
2437 catch ( final IllegalArgumentException e ) {
2438 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
2439 + "] not present/not unique in input tree" );
2444 private static void printHelp() {
2445 System.out.println();
2446 System.out.println( "Usage:" );
2447 System.out.println();
2448 System.out.println( "% java -Xms256m -Xmx512m -cp forester.jar org.forester.applications." + surfacing.PRG_NAME
2449 + " [options] <phylogen(y|ies) infile> [external node name 1] [name 2] ... [name n]" );
2450 System.out.println();
2451 System.out.println( " Note: This software might need a significant amount of memory (heap space);" );
2453 .println( " hence use \"-Xms128m -Xmx512m\" (or more) to prevent a \"java.lang.OutOfMemoryError\"." );
2454 System.out.println();
2455 System.out.println( " Options: " );
2456 System.out.println( surfacing.DETAILEDNESS_OPTION + ": level of detail for similarities output file (default:"
2457 + DETAILEDNESS_DEFAULT + ")" );
2458 System.out.println( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION
2459 + ": to ignore combinations with self (default: not to ignore)" );
2461 .println( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION
2462 + ": to ignore domains without combinations in any species (for similarity calc purposes, not for parsimony analyses) (default: not to ignore)" );
2464 .println( surfacing.IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION
2465 + ": to ignore domains specific to one species (for similarity calc purposes, not for parsimony analyses) (default: not to ignore)" );
2466 System.out.println( surfacing.NOT_IGNORE_DUFS_OPTION
2467 + ": to _not_ ignore DUFs (domains with unknown function) (default: ignore DUFs)" );
2469 .println( surfacing.IGNORE_VIRAL_IDS
2470 + ": to ignore domains with ids containing 'vir', 'retro', 'transpos', 'phage', or starting with 'rv' or 'gag_'" );
2471 System.out.println( surfacing.DOMAIN_SIMILARITY_SORT_OPTION + ": sorting for similarities (default: "
2472 + DOMAIN_SORT_FILD_DEFAULT + ")" );
2473 System.out.println( surfacing.OUTPUT_FILE_OPTION + ": name for (main) output file (mandatory)" );
2474 System.out.println( surfacing.MAX_E_VALUE_OPTION + ": max (inclusive) E-value" );
2475 System.out.println( surfacing.MAX_ALLOWED_OVERLAP_OPTION + ": maximal allowed domain overlap" );
2476 System.out.println( surfacing.NO_ENGULFING_OVERLAP_OPTION + ": to ignore engulfed lower confidence domains" );
2477 System.out.println( surfacing.SPECIES_MATRIX_OPTION + ": species matrix" );
2478 System.out.println( surfacing.SCORING_OPTION + ": scoring (default:" + SCORING_DEFAULT + ")" );
2479 System.out.println( surfacing.DOMAIN_COUNT_SORT_OPTION + ": sorting for domain counts (default:"
2480 + DOMAINS_SORT_ORDER_DEFAULT + ")" );
2481 System.out.println( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION + ": domain similarity print option (default:"
2482 + DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT + ")" );
2483 System.out.println( surfacing.CUTOFF_SCORE_FILE_OPTION + ": cutoff score file" );
2484 System.out.println( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION
2485 + ": sort by species count first" );
2486 System.out.println( surfacing.OUTPUT_DIR_OPTION + ": output directory" );
2487 System.out.println( surfacing.PFAM_TO_GO_FILE_USE_OPTION + ": Pfam to GO mapping file" );
2488 System.out.println( surfacing.GO_OBO_FILE_USE_OPTION + ": GO terms file (OBO format)" );
2489 System.out.println( surfacing.GO_NAMESPACE_LIMIT_OPTION + ": limit GO term to one GO namespace" );
2490 System.out.println( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
2491 + "[=<suffix for pairwise comparison output files>]: to perform pairwise comparison based analyses" );
2492 System.out.println( surfacing.INPUT_SPECIES_TREE_OPTION
2493 + ": species tree, to perform (Dollo, Fitch) parismony analyses" );
2495 .println( surfacing.INPUT_SPECIES_TREE_OPTION
2496 + "=<treefiles in phyloXML format, separated by #>: to infer domain/binary domain combination gains/losses on given species trees" );
2497 System.out.println( surfacing.FILTER_POSITIVE_OPTION
2498 + "=<file>: to filter out proteins not containing at least one domain listed in <file>" );
2499 System.out.println( surfacing.FILTER_NEGATIVE_OPTION
2500 + "=<file>: to filter out proteins containing at least one domain listed in <file>" );
2501 System.out.println( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION
2502 + "=<file>: to filter out (ignore) domains listed in <file>" );
2503 System.out.println( surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>: to read input files from <file>" );
2505 .println( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION
2506 + "=<seed>: seed for random number generator for Fitch Parsimony analysis (type: long, default: no randomization - given a choice, prefer absence" );
2507 System.out.println( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS
2508 + ": to consider directedness in binary combinations: e.g. A-B != B-A" );
2509 System.out.println( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY
2510 + ": to consider directedness and adjacency in binary combinations" );
2512 .println( surfacing.SEQ_EXTRACT_OPTION
2513 + "=<domain ids (Pfam names)>: to extract sequence names of sequences containing matching domains and/or domain-sequences (order N to C) (domain separator: '~', domain sequences speparator: '#', e.g. 'NACHT#BIR~CARD')" );
2514 System.out.println( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE
2515 + "=<file>: to perfom parsimony analysis on secondary features" );
2516 System.out.println( surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>: to presence/absence genome analysis" );
2517 System.out.println( surfacing.DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS
2518 + ": to output binary domain combinations for (downstream) graph analysis" );
2519 System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS + ": to output all proteins per domain" );
2520 System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION
2521 + ": e value max per domain for output of all proteins per domain" );
2522 System.out.println( surfacing.USE_LAST_IN_FITCH_OPTION + ": to use last in Fitch parsimony" );
2523 System.out.println( surfacing.WRITE_TO_NEXUS_OPTION + ": to output in Nexus format" );
2524 System.out.println( PERFORM_DC_REGAIN_PROTEINS_STATS_OPTION + ": to perform DC regain protein statistics" );
2525 System.out.println( DA_ANALYSIS_OPTION + ": to DA analysis" );
2526 System.out.println();
2527 System.out.println( "Example 1: java -Xms128m -Xmx512m -cp path/to/forester.jar"
2528 + " org.forester.application.surfacing p2g=pfam2go_2012_02_07.txt -dufs -cos=Pfam_260_NC1"
2529 + " -no_eo -mo=0 -genomes=eukaryotes.txt -out_dir=out -o=o "
2530 + " -species_tree=tol.xml -obo=gene_ontology_2012_02_07.obo -pos_filter=f.txt -all_prot" );
2531 System.out.println();
2532 System.out.println( "Example 2: java -Xms128m -Xmx512m -cp path/to/forester.jar"
2533 + " org.forester.application.surfacing -detail=punctilious -o=TEST.html -pwc=TEST"
2534 + " -cos=Pfam_ls_22_TC2 -p2g=pfam2go -obo=gene_ontology_edit.obo "
2535 + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo -genomes=eukaryotes.txt "
2536 + "-ds_output=detailed_html -scoring=domains -sort=alpha " );
2537 System.out.println();
2540 private static void processFilter( final File filter_file, final SortedSet<String> filter ) {
2541 SortedSet<String> filter_str = null;
2543 filter_str = ForesterUtil.file2set( filter_file );
2545 catch ( final IOException e ) {
2546 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2548 if ( filter_str != null ) {
2549 for( final String string : filter_str ) {
2550 filter.add( string );
2554 System.out.println( "Filter:" );
2555 for( final String domainId : filter ) {
2556 System.out.println( domainId );
2561 private static String[][] processInputGenomesFile( final File input_genomes ) {
2562 String[][] input_file_properties = null;
2564 input_file_properties = ForesterUtil.file22dArray( input_genomes );
2566 catch ( final IOException e ) {
2567 ForesterUtil.fatalError( surfacing.PRG_NAME,
2568 "genomes files is to be in the following format \"<hmmpfam output file> <species>\": "
2569 + e.getLocalizedMessage() );
2571 final Set<String> specs = new HashSet<String>();
2572 final Set<String> paths = new HashSet<String>();
2573 for( int i = 0; i < input_file_properties.length; ++i ) {
2574 if ( !PhyloXmlUtil.TAXOMONY_CODE_PATTERN.matcher( input_file_properties[ i ][ 1 ] ).matches() ) {
2575 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for species code: "
2576 + input_file_properties[ i ][ 1 ] );
2578 if ( specs.contains( input_file_properties[ i ][ 1 ] ) ) {
2579 ForesterUtil.fatalError( surfacing.PRG_NAME, "species code " + input_file_properties[ i ][ 1 ]
2580 + " is not unique" );
2582 specs.add( input_file_properties[ i ][ 1 ] );
2583 if ( paths.contains( input_file_properties[ i ][ 0 ] ) ) {
2584 ForesterUtil.fatalError( surfacing.PRG_NAME, "path " + input_file_properties[ i ][ 0 ]
2585 + " is not unique" );
2587 paths.add( input_file_properties[ i ][ 0 ] );
2588 final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
2589 if ( !ForesterUtil.isEmpty( error ) ) {
2590 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
2593 return input_file_properties;
2596 private static void processPlusMinusAnalysisOption( final CommandLineArguments cla,
2597 final List<String> high_copy_base,
2598 final List<String> high_copy_target,
2599 final List<String> low_copy,
2600 final List<Object> numbers ) {
2601 if ( cla.isOptionSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
2602 if ( !cla.isOptionValueSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
2603 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for 'plus-minus' file: -"
2604 + surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>" );
2606 final File plus_minus_file = new File( cla.getOptionValue( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) );
2607 final String msg = ForesterUtil.isReadableFile( plus_minus_file );
2608 if ( !ForesterUtil.isEmpty( msg ) ) {
2609 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + plus_minus_file + "\": " + msg );
2611 processPlusMinusFile( plus_minus_file, high_copy_base, high_copy_target, low_copy, numbers );
2615 // First numbers is minimal difference, second is factor.
2616 private static void processPlusMinusFile( final File plus_minus_file,
2617 final List<String> high_copy_base,
2618 final List<String> high_copy_target,
2619 final List<String> low_copy,
2620 final List<Object> numbers ) {
2621 Set<String> species_set = null;
2622 int min_diff = PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT;
2623 double factor = PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT;
2625 species_set = ForesterUtil.file2set( plus_minus_file );
2627 catch ( final IOException e ) {
2628 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2630 if ( species_set != null ) {
2631 for( final String species : species_set ) {
2632 final String species_trimmed = species.substring( 1 );
2633 if ( species.startsWith( "+" ) ) {
2634 if ( low_copy.contains( species_trimmed ) ) {
2635 ForesterUtil.fatalError( surfacing.PRG_NAME,
2636 "species/genome names can not appear with both '+' and '-' suffix, as appears the case for: \""
2637 + species_trimmed + "\"" );
2639 high_copy_base.add( species_trimmed );
2641 else if ( species.startsWith( "*" ) ) {
2642 if ( low_copy.contains( species_trimmed ) ) {
2643 ForesterUtil.fatalError( surfacing.PRG_NAME,
2644 "species/genome names can not appear with both '*' and '-' suffix, as appears the case for: \""
2645 + species_trimmed + "\"" );
2647 high_copy_target.add( species_trimmed );
2649 else if ( species.startsWith( "-" ) ) {
2650 if ( high_copy_base.contains( species_trimmed ) || high_copy_target.contains( species_trimmed ) ) {
2651 ForesterUtil.fatalError( surfacing.PRG_NAME,
2652 "species/genome names can not appear with both '+' or '*' and '-' suffix, as appears the case for: \""
2653 + species_trimmed + "\"" );
2655 low_copy.add( species_trimmed );
2657 else if ( species.startsWith( "$D" ) ) {
2659 min_diff = Integer.parseInt( species.substring( 3 ) );
2661 catch ( final NumberFormatException e ) {
2662 ForesterUtil.fatalError( surfacing.PRG_NAME,
2663 "could not parse integer value for minimal difference from: \""
2664 + species.substring( 3 ) + "\"" );
2667 else if ( species.startsWith( "$F" ) ) {
2669 factor = Double.parseDouble( species.substring( 3 ) );
2671 catch ( final NumberFormatException e ) {
2672 ForesterUtil.fatalError( surfacing.PRG_NAME, "could not parse double value for factor from: \""
2673 + species.substring( 3 ) + "\"" );
2676 else if ( species.startsWith( "#" ) ) {
2681 .fatalError( surfacing.PRG_NAME,
2682 "species/genome names in 'plus minus' file must begin with '*' (high copy target genome), '+' (high copy base genomes), '-' (low copy genomes), '$D=<integer>' minimal Difference (default is 1), '$F=<double>' factor (default is 1.0), double), or '#' (ignore) suffix, encountered: \""
2685 numbers.add( new Integer( min_diff + "" ) );
2686 numbers.add( new Double( factor + "" ) );
2690 ForesterUtil.fatalError( surfacing.PRG_NAME, "'plus minus' file [" + plus_minus_file + "] appears empty" );
2694 private static void writePresentToNexus( final File output_file,
2695 final File positive_filter_file,
2696 final SortedSet<String> filter,
2697 final List<GenomeWideCombinableDomains> gwcd_list ) {
2700 .writeMatrixToFile( DomainParsimonyCalculator
2701 .createMatrixOfDomainPresenceOrAbsence( gwcd_list, positive_filter_file == null ? null
2702 : filter ), output_file + DOMAINS_PRESENT_NEXUS, Format.NEXUS_BINARY );
2703 SurfacingUtil.writeMatrixToFile( DomainParsimonyCalculator
2704 .createMatrixOfBinaryDomainCombinationPresenceOrAbsence( gwcd_list ), output_file
2705 + BDC_PRESENT_NEXUS, Format.NEXUS_BINARY );
2707 catch ( final Exception e ) {
2708 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2712 private static void writeProteinListsForAllSpecies( final File output_dir,
2713 final SortedMap<Species, List<Protein>> protein_lists_per_species,
2714 final List<GenomeWideCombinableDomains> gwcd_list,
2715 final double domain_e_cutoff ) {
2716 final SortedSet<String> all_domains = new TreeSet<String>();
2717 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
2718 all_domains.addAll( gwcd.getAllDomainIds() );
2720 for( final String domain : all_domains ) {
2721 final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain + SEQ_EXTRACT_SUFFIX );
2722 SurfacingUtil.checkForOutputFileWriteability( out );
2724 final Writer proteins_file_writer = new BufferedWriter( new FileWriter( out ) );
2725 SurfacingUtil.extractProteinNames( protein_lists_per_species,
2727 proteins_file_writer,
2729 LIMIT_SPEC_FOR_PROT_EX,
2731 proteins_file_writer.close();
2733 catch ( final IOException e ) {
2734 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2736 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote proteins list to \"" + out + "\"" );