3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: www.phylosoft.org/forester
27 package org.forester.application;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.util.ArrayList;
35 import java.util.Date;
36 import java.util.HashMap;
37 import java.util.HashSet;
38 import java.util.List;
40 import java.util.Map.Entry;
42 import java.util.SortedMap;
43 import java.util.SortedSet;
44 import java.util.TreeMap;
45 import java.util.TreeSet;
47 import org.forester.evoinference.distance.NeighborJoining;
48 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
49 import org.forester.evoinference.matrix.distance.DistanceMatrix;
50 import org.forester.go.GoId;
51 import org.forester.go.GoNameSpace;
52 import org.forester.go.GoTerm;
53 import org.forester.go.GoUtils;
54 import org.forester.go.OBOparser;
55 import org.forester.go.PfamToGoMapping;
56 import org.forester.go.PfamToGoParser;
57 import org.forester.io.parsers.HmmscanPerDomainTableParser;
58 import org.forester.io.parsers.HmmscanPerDomainTableParser.INDIVIDUAL_SCORE_CUTOFF;
59 import org.forester.io.parsers.util.ParserUtils;
60 import org.forester.io.writers.PhylogenyWriter;
61 import org.forester.phylogeny.Phylogeny;
62 import org.forester.phylogeny.PhylogenyMethods;
63 import org.forester.phylogeny.PhylogenyNode;
64 import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
65 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
66 import org.forester.surfacing.BasicDomainSimilarityCalculator;
67 import org.forester.surfacing.BasicGenomeWideCombinableDomains;
68 import org.forester.surfacing.BasicSpecies;
69 import org.forester.surfacing.BinaryDomainCombination;
70 import org.forester.surfacing.CombinationsBasedPairwiseDomainSimilarityCalculator;
71 import org.forester.surfacing.DomainCountsBasedPairwiseSimilarityCalculator;
72 import org.forester.surfacing.DomainCountsDifferenceUtil;
73 import org.forester.surfacing.DomainId;
74 import org.forester.surfacing.DomainLengthsTable;
75 import org.forester.surfacing.DomainParsimonyCalculator;
76 import org.forester.surfacing.DomainSimilarity;
77 import org.forester.surfacing.DomainSimilarity.DomainSimilarityScoring;
78 import org.forester.surfacing.DomainSimilarity.DomainSimilaritySortField;
79 import org.forester.surfacing.DomainSimilarityCalculator;
80 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
81 import org.forester.surfacing.GenomeWideCombinableDomains;
82 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
83 import org.forester.surfacing.MappingResults;
84 import org.forester.surfacing.PairwiseDomainSimilarityCalculator;
85 import org.forester.surfacing.PairwiseGenomeComparator;
86 import org.forester.surfacing.PrintableDomainSimilarity;
87 import org.forester.surfacing.PrintableDomainSimilarity.PRINT_OPTION;
88 import org.forester.surfacing.Protein;
89 import org.forester.surfacing.ProteinCountsBasedPairwiseDomainSimilarityCalculator;
90 import org.forester.surfacing.Species;
91 import org.forester.surfacing.SurfacingUtil;
92 import org.forester.util.BasicDescriptiveStatistics;
93 import org.forester.util.BasicTable;
94 import org.forester.util.BasicTableParser;
95 import org.forester.util.CommandLineArguments;
96 import org.forester.util.DescriptiveStatistics;
97 import org.forester.util.ForesterConstants;
98 import org.forester.util.ForesterUtil;
100 public class surfacing {
102 private static final int MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING = 1000;
103 public final static String DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS = "graph_analysis_out";
104 public final static String DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS = "_dc.dot";
105 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS = "_fitch_present_dc.dot";
106 public final static String DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX = ".dcc";
108 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS = "_dollo_gl_d";
109 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_BINARY_COMBINATIONS = "_dollo_gl_dc";
110 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_DOMAINS = "_fitch_gl_d";
111 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS = "_fitch_gl_dc";
113 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS = "_dollo_glc_d";
114 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_BINARY_COMBINATIONS = "_dollo_glc_dc";
115 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_DOMAINS = "_fitch_glc_d";
116 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS = "_fitch_glc_dc";
118 public final static String PARSIMONY_OUTPUT_FITCH_GAINS_BC = "_fitch_gains_dc";
119 public final static String PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC = "_fitch_gains_dc.html";
120 public final static String PARSIMONY_OUTPUT_FITCH_LOSSES_BC = "_fitch_losses_dc";
121 public final static String PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC = "_fitch_losses_dc.html";
122 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_BC = "_fitch_present_dc";
123 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC = "_fitch_present_dc.html";
124 public final static String PARSIMONY_OUTPUT_DOLLO_GAINS_D = "_dollo_gains_d";
125 public final static String PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D = "_dollo_gains_d.html";
126 public final static String PARSIMONY_OUTPUT_DOLLO_LOSSES_D = "_dollo_losses_d";
127 public final static String PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D = "_dollo_losses_d.html";
128 public final static String PARSIMONY_OUTPUT_DOLLO_PRESENT_D = "_dollo_present_d";
129 public final static String PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D = "_dollo_present_d.html";
130 public final static String DOMAINS_PRESENT_NEXUS = "_dom.nex";
131 public final static String BDC_PRESENT_NEXUS = "_dc.nex";
133 public final static String PRG_NAME = "surfacing";
134 public static final String DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_d_dollo"
135 + ForesterConstants.PHYLO_XML_SUFFIX;
136 public static final String DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH = "_d_fitch"
137 + ForesterConstants.PHYLO_XML_SUFFIX;
138 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_dc_dollo"
139 + ForesterConstants.PHYLO_XML_SUFFIX;
140 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH = "_dc_fitch"
141 + ForesterConstants.PHYLO_XML_SUFFIX;
142 public static final String NEXUS_EXTERNAL_DOMAINS = "_dom.nex";
143 public static final String NEXUS_EXTERNAL_DOMAIN_COMBINATIONS = "_dc.nex";
144 public static final String NEXUS_SECONDARY_FEATURES = "_secondary_features.nex";
145 public static final String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES = "_dollo_gl_secondary_features";
146 public static final String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES = "_dollo_glc_secondary_features";
147 public static final String PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES = "_dollo_gains_secondary_features";
148 public static final String PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES = "_dollo_losses_secondary_features";
149 public static final String PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES = "_dollo_present_secondary_features";
150 public static final String SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_secondary_features_dollo"
151 + ForesterConstants.PHYLO_XML_SUFFIX;
152 public static final String PARSIMONY_OUTPUT_DOLLO_ALL_GOID_D_ALL_NAMESPACES = "_dollo_goid_d";
153 public static final String PARSIMONY_OUTPUT_FITCH_ALL_GOID_BC_ALL_NAMESPACES = "_fitch_goid_dc";
154 final static private String HELP_OPTION_1 = "help";
155 final static private String HELP_OPTION_2 = "h";
156 final static private String OUTPUT_DIR_OPTION = "out_dir";
157 final static private String SCORING_OPTION = "scoring";
158 private static final DomainSimilarityScoring SCORING_DEFAULT = DomainSimilarity.DomainSimilarityScoring.COMBINATIONS;
159 final static private String SCORING_DOMAIN_COUNT_BASED = "domains";
160 final static private String SCORING_PROTEIN_COUNT_BASED = "proteins";
161 final static private String SCORING_COMBINATION_BASED = "combinations";
162 final static private String DETAILEDNESS_OPTION = "detail";
163 private final static Detailedness DETAILEDNESS_DEFAULT = DomainSimilarityCalculator.Detailedness.PUNCTILIOUS;
164 final static private String SPECIES_MATRIX_OPTION = "smatrix";
165 final static private String DETAILEDNESS_BASIC = "basic";
166 final static private String DETAILEDNESS_LIST_IDS = "list_ids";
167 final static private String DETAILEDNESS_PUNCTILIOUS = "punctilious";
168 final static private String DOMAIN_SIMILARITY_SORT_OPTION = "sort";
169 private static final DomainSimilaritySortField DOMAIN_SORT_FILD_DEFAULT = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
170 final static private String DOMAIN_SIMILARITY_SORT_MIN = "min";
171 final static private String DOMAIN_SIMILARITY_SORT_MAX = "max";
172 final static private String DOMAIN_SIMILARITY_SORT_SD = "sd";
173 final static private String DOMAIN_SIMILARITY_SORT_MEAN = "mean";
174 final static private String DOMAIN_SIMILARITY_SORT_DIFF = "diff";
175 final static private String DOMAIN_SIMILARITY_SORT_COUNTS_DIFF = "count_diff";
176 final static private String DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF = "abs_count_diff";
177 final static private String DOMAIN_SIMILARITY_SORT_SPECIES_COUNT = "species";
178 final static private String DOMAIN_SIMILARITY_SORT_ALPHA = "alpha";
179 final static private String DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION = "species_first";
180 final static private String DOMAIN_COUNT_SORT_OPTION = "dc_sort";
181 private static final GenomeWideCombinableDomainsSortOrder DOMAINS_SORT_ORDER_DEFAULT = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.ALPHABETICAL_KEY_ID;
182 final static private String DOMAIN_COUNT_SORT_ALPHA = "alpha";
183 final static private String DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT = "dom";
184 final static private String DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT = "prot";
185 final static private String DOMAIN_COUNT_SORT_COMBINATIONS_COUNT = "comb";
186 final static private String CUTOFF_SCORE_FILE_OPTION = "cos";
187 final static private String NOT_IGNORE_DUFS_OPTION = "dufs";
188 final static private String MAX_E_VALUE_OPTION = "e";
189 final static private String MAX_ALLOWED_OVERLAP_OPTION = "mo";
190 final static private String NO_ENGULFING_OVERLAP_OPTION = "no_eo";
191 final static private String IGNORE_COMBINATION_WITH_SAME_OPTION = "ignore_self_comb";
192 final static private String PAIRWISE_DOMAIN_COMPARISONS_PREFIX = "pwc_";
193 final static private String PAIRWISE_DOMAIN_COMPARISONS_OPTION = "pwc";
194 final static private String OUTPUT_FILE_OPTION = "o";
195 final static private String PFAM_TO_GO_FILE_USE_OPTION = "p2g";
196 final static private String GO_OBO_FILE_USE_OPTION = "obo";
197 final static private String GO_NAMESPACE_LIMIT_OPTION = "go_namespace";
198 final static private String GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION = "molecular_function";
199 final static private String GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS = "biological_process";
200 final static private String GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT = "cellular_component";
201 final static private String SECONDARY_FEATURES_PARSIMONY_MAP_FILE = "secondary";
202 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED = "simple_tab";
203 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML = "simple_html";
204 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML = "detailed_html";
205 final static private String DOMAIN_SIMILARITY_PRINT_OPTION = "ds_output";
206 private static final PRINT_OPTION DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT = PrintableDomainSimilarity.PRINT_OPTION.HTML;
207 final static private String IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION = "ignore_singlet_domains";
208 final static private String IGNORE_VIRAL_IDS = "ignore_viral_ids";
209 final static private boolean IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_DEFAULT = false;
210 final static private String IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION = "ignore_species_specific_domains";
211 final static private boolean IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION_DEFAULT = false;
212 final static private String MATRIX_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX = "_mean_score.pwd";
213 final static private String MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX = "_domains.pwd";
214 final static private String MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX = "_bin_combinations.pwd";
215 final static private String NJ_TREE_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX = "_mean_score_NJ"
216 + ForesterConstants.PHYLO_XML_SUFFIX;
217 final static private String NJ_TREE_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX = "_domains_NJ"
218 + ForesterConstants.PHYLO_XML_SUFFIX;
219 final static private String NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX = "_bin_combinations_NJ"
220 + ForesterConstants.PHYLO_XML_SUFFIX;
221 final static private String JACKNIFE_OPTION = "jack";
222 final static private String JACKNIFE_RANDOM_SEED_OPTION = "seed";
223 final static private String JACKNIFE_RATIO_OPTION = "jack_ratio";
224 private static final int JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT = 100;
225 final static private long JACKNIFE_RANDOM_SEED_DEFAULT = 19;
226 final static private double JACKNIFE_RATIO_DEFAULT = 0.5;
227 //final static private String INFER_SPECIES_TREES_OPTION = "species_tree_inference";
228 final static private String INFERRED_SD_BASED_NJ_SPECIES_TREE_SUFFIX = "_sd_nj.nh";
229 final static private String INFERRED_SBC_BASED_NJ_SPECIES_TREE_SUFFIX = "_sbc_nj.nh";
230 final static private String FILTER_POSITIVE_OPTION = "pos_filter";
231 final static private String FILTER_NEGATIVE_OPTION = "neg_filter";
232 final static private String FILTER_NEGATIVE_DOMAINS_OPTION = "neg_dom_filter";
233 final static private String INPUT_FILES_FROM_FILE_OPTION = "input";
234 final static private String INPUT_SPECIES_TREE_OPTION = "species_tree";
235 final static private String SEQ_EXTRACT_OPTION = "prot_extract";
236 final static private char SEPARATOR_FOR_INPUT_VALUES = '#';
237 final static private String PRG_VERSION = "2.210";
238 final static private String PRG_DATE = "2011.12.08";
239 final static private String E_MAIL = "czmasek@burnham.org";
240 final static private String WWW = "www.phylosoft.org/forester/applications/surfacing";
241 final static private boolean IGNORE_DUFS_DEFAULT = true;
242 final static private boolean IGNORE_COMBINATION_WITH_SAME_DEFAULLT = false;
243 final static private double MAX_E_VALUE_DEFAULT = -1;
244 final static private int MAX_ALLOWED_OVERLAP_DEFAULT = -1;
245 private static final String RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION = "random_seed";
246 private static final String CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS = "consider_bdc_direction";
247 private static final String CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY = "consider_bdc_adj";
248 private static final String SEQ_EXTRACT_SUFFIX = ".prot";
249 private static final String PLUS_MINUS_ANALYSIS_OPTION = "plus_minus";
250 private static final String PLUS_MINUS_DOM_SUFFIX = "_plus_minus_dom.txt";
251 private static final String PLUS_MINUS_DOM_SUFFIX_HTML = "_plus_minus_dom.html";
252 private static final String PLUS_MINUS_DC_SUFFIX_HTML = "_plus_minus_dc.html";
253 private static final int PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT = 0;
254 private static final double PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT = 1.0;
255 private static final String PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_all.txt";
256 private static final String PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_passing.txt";
257 private static final String OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS = "all_prot";
258 private static final boolean VERBOSE = false;
259 private static final String OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_gains_counts";
260 private static final String OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_losses_counts";
261 private static final String DOMAIN_LENGTHS_ANALYSIS_SUFFIX = "_domain_lengths_analysis";
262 private static final boolean PERFORM_DOMAIN_LENGTH_ANALYSIS = true;
263 public static final String ALL_PFAMS_ENCOUNTERED_SUFFIX = "_all_encountered_pfams";
264 public static final String ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX = "_all_encountered_pfams_with_go_annotation";
265 public static final String ENCOUNTERED_PFAMS_SUMMARY_SUFFIX = "_encountered_pfams_summary";
266 public static final String ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX = "_all_pfams_gained_as_domains";
267 public static final String ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX = "_all_pfams_lost_as_domains";
268 public static final String ALL_PFAMS_GAINED_AS_DC_SUFFIX = "_all_pfams_gained_as_dc";
269 public static final String ALL_PFAMS_LOST_AS_DC_SUFFIX = "_all_pfams_lost_as_dc";
270 public static final String BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES = "PER_NODE_EVENTS";
271 public static final String BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES = "PER_SUBTREE_EVENTS";
272 public static final String D_PROMISCUITY_FILE_SUFFIX = "_domain_promiscuities";
273 private static final String LOG_FILE_SUFFIX = "_log.txt";
274 private static final String DATA_FILE_SUFFIX = "_domain_combination_data.txt";
275 private static final String DATA_FILE_DESC = "#SPECIES\tPRTEIN_ID\tN_TERM_DOMAIN\tC_TERM_DOMAIN\tN_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tC_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tN_TERM_DOMAIN_COUNTS_PER_PROTEIN\tC_TERM_DOMAIN_COUNTS_PER_PROTEIN";
276 private static final INDIVIDUAL_SCORE_CUTOFF INDIVIDUAL_SCORE_CUTOFF_DEFAULT = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE;
277 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_counts.txt";
278 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists.txt";
279 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping.txt";
280 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique.txt";
282 private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
283 final String[][] input_file_properties,
284 final String automated_pairwise_comparison_suffix,
285 final File outdir ) {
286 for( int i = 0; i < input_file_properties.length; ++i ) {
287 for( int j = 0; j < i; ++j ) {
288 final String species_i = input_file_properties[ i ][ 1 ];
289 final String species_j = input_file_properties[ j ][ 1 ];
290 String pairwise_similarities_output_file_str = PAIRWISE_DOMAIN_COMPARISONS_PREFIX + species_i + "_"
291 + species_j + automated_pairwise_comparison_suffix;
292 switch ( domain_similarity_print_option ) {
294 if ( !pairwise_similarities_output_file_str.endsWith( ".html" ) ) {
295 pairwise_similarities_output_file_str += ".html";
299 final String error = ForesterUtil
300 .isWritableFile( new File( outdir == null ? pairwise_similarities_output_file_str : outdir
301 + ForesterUtil.FILE_SEPARATOR + pairwise_similarities_output_file_str ) );
302 if ( !ForesterUtil.isEmpty( error ) ) {
303 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
309 private static StringBuilder createParametersAsString( final boolean ignore_dufs,
310 final double e_value_max,
311 final int max_allowed_overlap,
312 final boolean no_engulfing_overlaps,
313 final File cutoff_scores_file,
314 final BinaryDomainCombination.DomainCombinationType dc_type ) {
315 final StringBuilder parameters_sb = new StringBuilder();
316 parameters_sb.append( "E-value: " + e_value_max );
317 if ( cutoff_scores_file != null ) {
318 parameters_sb.append( ", Cutoff-scores-file: " + cutoff_scores_file );
321 parameters_sb.append( ", Cutoff-scores-file: not-set" );
323 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
324 parameters_sb.append( ", Max-overlap: " + max_allowed_overlap );
327 parameters_sb.append( ", Max-overlap: not-set" );
329 if ( no_engulfing_overlaps ) {
330 parameters_sb.append( ", Engulfing-overlaps: not-allowed" );
333 parameters_sb.append( ", Engulfing-overlaps: allowed" );
336 parameters_sb.append( ", Ignore-dufs: true" );
339 parameters_sb.append( ", Ignore-dufs: false" );
341 parameters_sb.append( ", DC type (if applicable): " + dc_type );
342 return parameters_sb;
346 * Warning: This side-effects 'all_bin_domain_combinations_encountered'!
350 * @param all_bin_domain_combinations_changed
351 * @param sum_of_all_domains_encountered
352 * @param all_bin_domain_combinations_encountered
353 * @param is_gains_analysis
354 * @throws IOException
356 private static void executeFitchGainsAnalysis( final File output_file,
357 final List<BinaryDomainCombination> all_bin_domain_combinations_changed,
358 final int sum_of_all_domains_encountered,
359 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered,
360 final boolean is_gains_analysis ) throws IOException {
361 SurfacingUtil.checkForOutputFileWriteability( output_file );
362 final Writer out = ForesterUtil.createBufferedWriter( output_file );
363 final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
364 .listToSortedCountsMap( all_bin_domain_combinations_changed );
365 final SortedSet<DomainId> all_domains_in_combination_changed_more_than_once = new TreeSet<DomainId>();
366 final SortedSet<DomainId> all_domains_in_combination_changed_only_once = new TreeSet<DomainId>();
369 for( final Object bdc_object : bdc_to_counts.keySet() ) {
370 final BinaryDomainCombination bdc = ( BinaryDomainCombination ) bdc_object;
371 final int count = bdc_to_counts.get( bdc_object );
373 ForesterUtil.unexpectedFatalError( PRG_NAME, "count < 1 " );
375 out.write( bdc + "\t" + count + ForesterUtil.LINE_SEPARATOR );
377 all_domains_in_combination_changed_more_than_once.add( bdc.getId0() );
378 all_domains_in_combination_changed_more_than_once.add( bdc.getId1() );
381 else if ( count == 1 ) {
382 all_domains_in_combination_changed_only_once.add( bdc.getId0() );
383 all_domains_in_combination_changed_only_once.add( bdc.getId1() );
387 final int all = all_bin_domain_combinations_encountered.size();
389 if ( !is_gains_analysis ) {
390 all_bin_domain_combinations_encountered.removeAll( all_bin_domain_combinations_changed );
391 never_lost = all_bin_domain_combinations_encountered.size();
392 for( final BinaryDomainCombination bdc : all_bin_domain_combinations_encountered ) {
393 out.write( bdc + "\t" + "0" + ForesterUtil.LINE_SEPARATOR );
396 if ( is_gains_analysis ) {
397 out.write( "Sum of all distinct domain combinations appearing once : " + one
398 + ForesterUtil.LINE_SEPARATOR );
399 out.write( "Sum of all distinct domain combinations appearing more than once : " + above_one
400 + ForesterUtil.LINE_SEPARATOR );
401 out.write( "Sum of all distinct domains in combinations apppearing only once : "
402 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
403 out.write( "Sum of all distinct domains in combinations apppearing more than once: "
404 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
407 out.write( "Sum of all distinct domain combinations never lost : " + never_lost
408 + ForesterUtil.LINE_SEPARATOR );
409 out.write( "Sum of all distinct domain combinations lost once : " + one
410 + ForesterUtil.LINE_SEPARATOR );
411 out.write( "Sum of all distinct domain combinations lost more than once : " + above_one
412 + ForesterUtil.LINE_SEPARATOR );
413 out.write( "Sum of all distinct domains in combinations lost only once : "
414 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
415 out.write( "Sum of all distinct domains in combinations lost more than once: "
416 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
418 out.write( "All binary combinations : " + all
419 + ForesterUtil.LINE_SEPARATOR );
420 out.write( "All domains : "
421 + sum_of_all_domains_encountered );
423 ForesterUtil.programMessage( surfacing.PRG_NAME,
424 "Wrote fitch domain combination dynamics counts analysis to \"" + output_file
428 private static void executePlusMinusAnalysis( final File output_file,
429 final List<String> plus_minus_analysis_high_copy_base,
430 final List<String> plus_minus_analysis_high_copy_target,
431 final List<String> plus_minus_analysis_low_copy,
432 final List<GenomeWideCombinableDomains> gwcd_list,
433 final SortedMap<Species, List<Protein>> protein_lists_per_species,
434 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
435 final Map<GoId, GoTerm> go_id_to_term_map,
436 final List<Object> plus_minus_analysis_numbers ) {
437 final Set<String> all_spec = new HashSet<String>();
438 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
439 all_spec.add( gwcd.getSpecies().getSpeciesId() );
441 final File html_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX_HTML );
442 final File plain_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX );
443 final File html_out_dc = new File( output_file + PLUS_MINUS_DC_SUFFIX_HTML );
444 final File all_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX );
445 final File passing_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX );
446 final File proteins_file_base = new File( output_file + "" );
447 final int min_diff = ( ( Integer ) plus_minus_analysis_numbers.get( 0 ) ).intValue();
448 final double factor = ( ( Double ) plus_minus_analysis_numbers.get( 1 ) ).doubleValue();
450 DomainCountsDifferenceUtil.calculateCopyNumberDifferences( gwcd_list,
451 protein_lists_per_species,
452 plus_minus_analysis_high_copy_base,
453 plus_minus_analysis_high_copy_target,
454 plus_minus_analysis_low_copy,
460 domain_id_to_go_ids_map,
462 all_domains_go_ids_out_dom,
463 passing_domains_go_ids_out_dom,
464 proteins_file_base );
466 catch ( final IOException e ) {
467 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
469 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
470 + html_out_dom + "\"" );
471 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
472 + plain_out_dom + "\"" );
473 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \"" + html_out_dc
475 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based passing GO ids to \""
476 + passing_domains_go_ids_out_dom + "\"" );
477 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based all GO ids to \""
478 + all_domains_go_ids_out_dom + "\"" );
481 private static Phylogeny[] getIntrees( final File[] intree_files,
482 final int number_of_genomes,
483 final String[][] input_file_properties ) {
484 final Phylogeny[] intrees = new Phylogeny[ intree_files.length ];
486 for( final File intree_file : intree_files ) {
487 Phylogeny intree = null;
488 final String error = ForesterUtil.isReadableFile( intree_file );
489 if ( !ForesterUtil.isEmpty( error ) ) {
490 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: "
494 final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
495 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
496 if ( p_array.length < 1 ) {
497 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
498 + "] does not contain any phylogeny in phyloXML format" );
500 else if ( p_array.length > 1 ) {
501 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
502 + "] contains more than one phylogeny in phyloXML format" );
504 intree = p_array[ 0 ];
506 catch ( final Exception e ) {
507 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file
510 if ( ( intree == null ) || intree.isEmpty() ) {
511 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
513 if ( !intree.isRooted() ) {
514 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
516 if ( intree.getNumberOfExternalNodes() < number_of_genomes ) {
517 ForesterUtil.fatalError( surfacing.PRG_NAME,
518 "number of external nodes [" + intree.getNumberOfExternalNodes()
519 + "] of input tree [" + intree_file
520 + "] is smaller than the number of genomes the be analyzed ["
521 + number_of_genomes + "]" );
523 final StringBuilder parent_names = new StringBuilder();
524 final int nodes_lacking_name = SurfacingUtil.getNumberOfNodesLackingName( intree, parent_names );
525 if ( nodes_lacking_name > 0 ) {
526 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] has "
527 + nodes_lacking_name + " node(s) lacking a name [parent names:" + parent_names + "]" );
529 preparePhylogenyForParsimonyAnalyses( intree, input_file_properties );
530 if ( !intree.isCompletelyBinary() ) {
531 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "input tree [" + intree_file
532 + "] is not completely binary" );
534 intrees[ i++ ] = intree;
539 private static List<Phylogeny> inferSpeciesTrees( final File outfile, final List<DistanceMatrix> distances_list ) {
540 final NeighborJoining nj = NeighborJoining.createInstance();
541 final List<Phylogeny> phylogenies = nj.execute( distances_list );
542 final PhylogenyWriter w = new PhylogenyWriter();
544 w.toNewHampshire( phylogenies, true, true, outfile, ";" );
546 catch ( final IOException e ) {
547 ForesterUtil.fatalError( PRG_NAME, "failed to write to outfile [" + outfile + "]: " + e.getMessage() );
552 private static void log( final String msg, final Writer w ) {
555 w.write( ForesterUtil.LINE_SEPARATOR );
557 catch ( final IOException e ) {
558 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
562 public static void main( final String args[] ) {
563 final long start_time = new Date().getTime();
564 // final StringBuffer log = new StringBuffer();
565 final StringBuilder html_desc = new StringBuilder();
566 ForesterUtil.printProgramInformation( surfacing.PRG_NAME,
567 surfacing.PRG_VERSION,
571 final String nl = ForesterUtil.LINE_SEPARATOR;
572 html_desc.append( "<table>" + nl );
573 html_desc.append( "<tr><td>Produced by:</td><td>" + surfacing.PRG_NAME + "</td></tr>" + nl );
574 html_desc.append( "<tr><td>Version:</td><td>" + surfacing.PRG_VERSION + "</td></tr>" + nl );
575 html_desc.append( "<tr><td>Release Date:</td><td>" + surfacing.PRG_DATE + "</td></tr>" + nl );
576 html_desc.append( "<tr><td>Contact:</td><td>" + surfacing.E_MAIL + "</td></tr>" + nl );
577 html_desc.append( "<tr><td>WWW:</td><td>" + surfacing.WWW + "</td></tr>" + nl );
578 CommandLineArguments cla = null;
580 cla = new CommandLineArguments( args );
582 catch ( final Exception e ) {
583 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
585 if ( cla.isOptionSet( surfacing.HELP_OPTION_1 ) || cla.isOptionSet( surfacing.HELP_OPTION_2 ) ) {
586 surfacing.printHelp();
589 if ( ( args.length < 1 ) ) {
590 surfacing.printHelp();
593 final List<String> allowed_options = new ArrayList<String>();
594 allowed_options.add( surfacing.NOT_IGNORE_DUFS_OPTION );
595 allowed_options.add( surfacing.MAX_E_VALUE_OPTION );
596 allowed_options.add( surfacing.DETAILEDNESS_OPTION );
597 allowed_options.add( surfacing.OUTPUT_FILE_OPTION );
598 allowed_options.add( surfacing.DOMAIN_SIMILARITY_SORT_OPTION );
599 allowed_options.add( surfacing.SPECIES_MATRIX_OPTION );
600 allowed_options.add( surfacing.SCORING_OPTION );
601 allowed_options.add( surfacing.MAX_ALLOWED_OVERLAP_OPTION );
602 allowed_options.add( surfacing.NO_ENGULFING_OVERLAP_OPTION );
603 allowed_options.add( surfacing.DOMAIN_COUNT_SORT_OPTION );
604 allowed_options.add( surfacing.CUTOFF_SCORE_FILE_OPTION );
605 allowed_options.add( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION );
606 allowed_options.add( surfacing.OUTPUT_DIR_OPTION );
607 allowed_options.add( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION );
608 allowed_options.add( surfacing.PFAM_TO_GO_FILE_USE_OPTION );
609 allowed_options.add( surfacing.GO_OBO_FILE_USE_OPTION );
610 allowed_options.add( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION );
611 allowed_options.add( surfacing.GO_NAMESPACE_LIMIT_OPTION );
612 allowed_options.add( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
613 allowed_options.add( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION );
614 allowed_options.add( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS );
615 allowed_options.add( JACKNIFE_OPTION );
616 allowed_options.add( JACKNIFE_RANDOM_SEED_OPTION );
617 allowed_options.add( JACKNIFE_RATIO_OPTION );
618 allowed_options.add( INPUT_SPECIES_TREE_OPTION );
619 //allowed_options.add( INFER_SPECIES_TREES_OPTION );
620 allowed_options.add( FILTER_POSITIVE_OPTION );
621 allowed_options.add( FILTER_NEGATIVE_OPTION );
622 allowed_options.add( INPUT_FILES_FROM_FILE_OPTION );
623 allowed_options.add( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
624 allowed_options.add( FILTER_NEGATIVE_DOMAINS_OPTION );
625 allowed_options.add( IGNORE_VIRAL_IDS );
626 allowed_options.add( SEQ_EXTRACT_OPTION );
627 allowed_options.add( SECONDARY_FEATURES_PARSIMONY_MAP_FILE );
628 allowed_options.add( PLUS_MINUS_ANALYSIS_OPTION );
629 allowed_options.add( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS );
630 allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS );
631 allowed_options.add( CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY );
632 boolean ignore_dufs = surfacing.IGNORE_DUFS_DEFAULT;
633 boolean ignore_combination_with_same = surfacing.IGNORE_COMBINATION_WITH_SAME_DEFAULLT;
634 double e_value_max = surfacing.MAX_E_VALUE_DEFAULT;
635 int max_allowed_overlap = surfacing.MAX_ALLOWED_OVERLAP_DEFAULT;
636 final String dissallowed_options = cla.validateAllowedOptionsAsString( allowed_options );
637 if ( dissallowed_options.length() > 0 ) {
638 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown option(s): " + dissallowed_options );
640 boolean output_binary_domain_combinationsfor_graph_analysis = false;
641 if ( cla.isOptionSet( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS ) ) {
642 output_binary_domain_combinationsfor_graph_analysis = true;
644 if ( cla.isOptionSet( surfacing.MAX_E_VALUE_OPTION ) ) {
646 e_value_max = cla.getOptionValueAsDouble( surfacing.MAX_E_VALUE_OPTION );
648 catch ( final Exception e ) {
649 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for E-value maximum" );
652 if ( cla.isOptionSet( surfacing.MAX_ALLOWED_OVERLAP_OPTION ) ) {
654 max_allowed_overlap = cla.getOptionValueAsInt( surfacing.MAX_ALLOWED_OVERLAP_OPTION );
656 catch ( final Exception e ) {
657 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for maximal allowed domain overlap" );
660 boolean no_engulfing_overlaps = false;
661 if ( cla.isOptionSet( surfacing.NO_ENGULFING_OVERLAP_OPTION ) ) {
662 no_engulfing_overlaps = true;
664 boolean ignore_virus_like_ids = false;
665 if ( cla.isOptionSet( surfacing.IGNORE_VIRAL_IDS ) ) {
666 ignore_virus_like_ids = true;
668 if ( cla.isOptionSet( surfacing.NOT_IGNORE_DUFS_OPTION ) ) {
671 if ( cla.isOptionSet( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION ) ) {
672 ignore_combination_with_same = true;
674 boolean ignore_domains_without_combs_in_all_spec = IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_DEFAULT;
675 if ( cla.isOptionSet( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION ) ) {
676 ignore_domains_without_combs_in_all_spec = true;
678 boolean ignore_species_specific_domains = IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION_DEFAULT;
679 if ( cla.isOptionSet( surfacing.IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION ) ) {
680 ignore_species_specific_domains = true;
682 File output_file = null;
683 if ( cla.isOptionSet( surfacing.OUTPUT_FILE_OPTION ) ) {
684 if ( !cla.isOptionValueSet( surfacing.OUTPUT_FILE_OPTION ) ) {
685 ForesterUtil.fatalError( surfacing.PRG_NAME,
686 "no value for domain combinations similarities output file: -"
687 + surfacing.OUTPUT_FILE_OPTION + "=<file>" );
689 output_file = new File( cla.getOptionValue( surfacing.OUTPUT_FILE_OPTION ) );
690 SurfacingUtil.checkForOutputFileWriteability( output_file );
692 File cutoff_scores_file = null;
693 Map<String, Double> individual_score_cutoffs = null;
694 if ( cla.isOptionSet( surfacing.CUTOFF_SCORE_FILE_OPTION ) ) {
695 if ( !cla.isOptionValueSet( surfacing.CUTOFF_SCORE_FILE_OPTION ) ) {
696 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for individual domain score cutoffs file: -"
697 + surfacing.CUTOFF_SCORE_FILE_OPTION + "=<file>" );
699 cutoff_scores_file = new File( cla.getOptionValue( surfacing.CUTOFF_SCORE_FILE_OPTION ) );
700 final String error = ForesterUtil.isReadableFile( cutoff_scores_file );
701 if ( !ForesterUtil.isEmpty( error ) ) {
702 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read individual domain score cutoffs file: "
706 final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, " " );
707 individual_score_cutoffs = scores_table.getColumnsAsMapDouble( 0, 1 );
709 catch ( final IOException e ) {
710 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from individual score cutoffs file: " + e );
713 BinaryDomainCombination.DomainCombinationType dc_type = BinaryDomainCombination.DomainCombinationType.BASIC;
714 if ( cla.isOptionSet( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS ) ) {
715 dc_type = BinaryDomainCombination.DomainCombinationType.DIRECTED;
717 if ( cla.isOptionSet( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY ) ) {
718 dc_type = BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT;
721 if ( cla.isOptionSet( surfacing.OUTPUT_DIR_OPTION ) ) {
722 if ( !cla.isOptionValueSet( surfacing.OUTPUT_DIR_OPTION ) ) {
723 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for output directory: -"
724 + surfacing.OUTPUT_DIR_OPTION + "=<dir>" );
726 out_dir = new File( cla.getOptionValue( surfacing.OUTPUT_DIR_OPTION ) );
727 if ( out_dir.exists() && ( out_dir.listFiles().length > 0 ) ) {
728 ForesterUtil.fatalError( surfacing.PRG_NAME, "\"" + out_dir + "\" aready exists and is not empty" );
730 if ( !out_dir.exists() ) {
731 final boolean success = out_dir.mkdir();
732 if ( !success || !out_dir.exists() ) {
733 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to create \"" + out_dir + "\"" );
736 if ( !out_dir.canWrite() ) {
737 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot write to \"" + out_dir + "\"" );
740 File positive_filter_file = null;
741 File negative_filter_file = null;
742 File negative_domains_filter_file = null;
743 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) && cla.isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
744 ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use both negative and positive protein filter" );
746 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION )
747 && ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) || cla
748 .isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) ) {
750 .fatalError( surfacing.PRG_NAME,
751 "attempt to use both negative or positive protein filter together wirh a negative domains filter" );
753 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) ) {
754 if ( !cla.isOptionValueSet( surfacing.FILTER_NEGATIVE_OPTION ) ) {
755 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for negative filter: -"
756 + surfacing.FILTER_NEGATIVE_OPTION + "=<file>" );
758 negative_filter_file = new File( cla.getOptionValue( surfacing.FILTER_NEGATIVE_OPTION ) );
759 final String msg = ForesterUtil.isReadableFile( negative_filter_file );
760 if ( !ForesterUtil.isEmpty( msg ) ) {
761 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + negative_filter_file + "\": "
765 else if ( cla.isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
766 if ( !cla.isOptionValueSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
767 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for positive filter: -"
768 + surfacing.FILTER_POSITIVE_OPTION + "=<file>" );
770 positive_filter_file = new File( cla.getOptionValue( surfacing.FILTER_POSITIVE_OPTION ) );
771 final String msg = ForesterUtil.isReadableFile( positive_filter_file );
772 if ( !ForesterUtil.isEmpty( msg ) ) {
773 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + positive_filter_file + "\": "
777 else if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) ) {
778 if ( !cla.isOptionValueSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) ) {
779 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for negative domains filter: -"
780 + surfacing.FILTER_NEGATIVE_DOMAINS_OPTION + "=<file>" );
782 negative_domains_filter_file = new File( cla.getOptionValue( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) );
783 final String msg = ForesterUtil.isReadableFile( negative_domains_filter_file );
784 if ( !ForesterUtil.isEmpty( msg ) ) {
785 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + negative_domains_filter_file
789 final List<String> plus_minus_analysis_high_copy_base_species = new ArrayList<String>();
790 final List<String> plus_minus_analysis_high_copy_target_species = new ArrayList<String>();
791 final List<String> plus_minus_analysis_high_low_copy_species = new ArrayList<String>();
792 final List<Object> plus_minus_analysis_numbers = new ArrayList<Object>();
793 processPlusMinusAnalysisOption( cla,
794 plus_minus_analysis_high_copy_base_species,
795 plus_minus_analysis_high_copy_target_species,
796 plus_minus_analysis_high_low_copy_species,
797 plus_minus_analysis_numbers );
798 File input_files_file = null;
799 String[] input_file_names_from_file = null;
800 if ( cla.isOptionSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
801 if ( !cla.isOptionValueSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
802 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input files file: -"
803 + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
805 input_files_file = new File( cla.getOptionValue( surfacing.INPUT_FILES_FROM_FILE_OPTION ) );
806 final String msg = ForesterUtil.isReadableFile( input_files_file );
807 if ( !ForesterUtil.isEmpty( msg ) ) {
808 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + input_files_file + "\": " + msg );
811 input_file_names_from_file = ForesterUtil.file2array( input_files_file );
813 catch ( final IOException e ) {
814 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read from \"" + input_files_file + "\": " + e );
817 if ( ( cla.getNumberOfNames() < 1 )
818 && ( ( input_file_names_from_file == null ) || ( input_file_names_from_file.length < 1 ) ) ) {
819 ForesterUtil.fatalError( surfacing.PRG_NAME,
820 "No hmmpfam output file indicated is input: use comand line directly or "
821 + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
823 DomainSimilarity.DomainSimilarityScoring scoring = SCORING_DEFAULT;
824 if ( cla.isOptionSet( surfacing.SCORING_OPTION ) ) {
825 if ( !cla.isOptionValueSet( surfacing.SCORING_OPTION ) ) {
826 ForesterUtil.fatalError( surfacing.PRG_NAME,
827 "no value for scoring method for domain combinations similarity calculation: -"
828 + surfacing.SCORING_OPTION + "=<"
829 + surfacing.SCORING_DOMAIN_COUNT_BASED + "|"
830 + surfacing.SCORING_PROTEIN_COUNT_BASED + "|"
831 + surfacing.SCORING_COMBINATION_BASED + ">\"" );
833 final String scoring_str = cla.getOptionValue( surfacing.SCORING_OPTION );
834 if ( scoring_str.equals( surfacing.SCORING_DOMAIN_COUNT_BASED ) ) {
835 scoring = DomainSimilarity.DomainSimilarityScoring.DOMAINS;
837 else if ( scoring_str.equals( surfacing.SCORING_COMBINATION_BASED ) ) {
838 scoring = DomainSimilarity.DomainSimilarityScoring.COMBINATIONS;
840 else if ( scoring_str.equals( surfacing.SCORING_PROTEIN_COUNT_BASED ) ) {
841 scoring = DomainSimilarity.DomainSimilarityScoring.PROTEINS;
844 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + scoring_str
845 + "\" for scoring method for domain combinations similarity calculation: \"-"
846 + surfacing.SCORING_OPTION + "=<" + surfacing.SCORING_DOMAIN_COUNT_BASED + "|"
847 + surfacing.SCORING_PROTEIN_COUNT_BASED + "|" + surfacing.SCORING_COMBINATION_BASED + ">\"" );
850 boolean sort_by_species_count_first = false;
851 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION ) ) {
852 sort_by_species_count_first = true;
854 boolean species_matrix = false;
855 if ( cla.isOptionSet( surfacing.SPECIES_MATRIX_OPTION ) ) {
856 species_matrix = true;
858 boolean output_protein_lists_for_all_domains = false;
859 if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS ) ) {
860 output_protein_lists_for_all_domains = true;
862 Detailedness detailedness = DETAILEDNESS_DEFAULT;
863 if ( cla.isOptionSet( surfacing.DETAILEDNESS_OPTION ) ) {
864 if ( !cla.isOptionValueSet( surfacing.DETAILEDNESS_OPTION ) ) {
865 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for -" + surfacing.DETAILEDNESS_OPTION + "=<"
866 + surfacing.DETAILEDNESS_BASIC + "|" + surfacing.DETAILEDNESS_LIST_IDS + "|"
867 + surfacing.DETAILEDNESS_PUNCTILIOUS + ">\"" );
869 final String detness = cla.getOptionValue( surfacing.DETAILEDNESS_OPTION ).toLowerCase();
870 if ( detness.equals( surfacing.DETAILEDNESS_BASIC ) ) {
871 detailedness = DomainSimilarityCalculator.Detailedness.BASIC;
873 else if ( detness.equals( surfacing.DETAILEDNESS_LIST_IDS ) ) {
874 detailedness = DomainSimilarityCalculator.Detailedness.LIST_COMBINING_DOMAIN_FOR_EACH_SPECIES;
876 else if ( detness.equals( surfacing.DETAILEDNESS_PUNCTILIOUS ) ) {
877 detailedness = DomainSimilarityCalculator.Detailedness.PUNCTILIOUS;
880 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + detness + "\" for detailedness: \"-"
881 + surfacing.DETAILEDNESS_OPTION + "=<" + surfacing.DETAILEDNESS_BASIC + "|"
882 + surfacing.DETAILEDNESS_LIST_IDS + "|" + surfacing.DETAILEDNESS_PUNCTILIOUS + ">\"" );
885 String automated_pairwise_comparison_suffix = null;
886 boolean perform_pwc = false;
887 boolean write_pwc_files = false;
888 if ( cla.isOptionSet( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION ) ) {
890 if ( !cla.isOptionValueSet( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION ) ) {
891 write_pwc_files = false;
894 write_pwc_files = true;
895 automated_pairwise_comparison_suffix = "_"
896 + cla.getOptionValue( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
899 String query_domain_ids = null;
900 if ( cla.isOptionSet( surfacing.SEQ_EXTRACT_OPTION ) ) {
901 if ( !cla.isOptionValueSet( surfacing.SEQ_EXTRACT_OPTION ) ) {
903 .fatalError( surfacing.PRG_NAME,
904 "no domain ids given for sequences with given domains to be extracted : -"
905 + surfacing.SEQ_EXTRACT_OPTION
906 + "=<ordered domain sequences, domain ids separated by '~', sequences separated by '#'>" );
908 query_domain_ids = cla.getOptionValue( surfacing.SEQ_EXTRACT_OPTION );
910 DomainSimilarity.DomainSimilaritySortField domain_similarity_sort_field = DOMAIN_SORT_FILD_DEFAULT;
911 DomainSimilarity.DomainSimilaritySortField domain_similarity_sort_field_for_automated_pwc = DOMAIN_SORT_FILD_DEFAULT;
912 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ) ) {
913 if ( !cla.isOptionValueSet( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ) ) {
914 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for domain combinations similarities sorting: -"
915 + surfacing.DOMAIN_SIMILARITY_SORT_OPTION + "=<" + surfacing.DOMAIN_SIMILARITY_SORT_ALPHA + "|"
916 + surfacing.DOMAIN_SIMILARITY_SORT_MAX + "|" + surfacing.DOMAIN_SIMILARITY_SORT_MIN + "|"
917 + surfacing.DOMAIN_SIMILARITY_SORT_MEAN + "|" + surfacing.DOMAIN_SIMILARITY_SORT_DIFF + "|"
918 + surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF + "|"
919 + surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF + "|"
920 + surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT + "|" + surfacing.DOMAIN_SIMILARITY_SORT_SD
923 final String sort_str = cla.getOptionValue( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ).toLowerCase();
924 if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_ALPHA ) ) {
925 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
926 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
928 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MAX ) ) {
929 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX;
930 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
932 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MIN ) ) {
933 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MIN;
934 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
936 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MEAN ) ) {
937 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MEAN;
938 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MEAN;
940 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT ) ) {
941 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.SPECIES_COUNT;
942 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
944 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_SD ) ) {
945 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.SD;
946 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
948 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_DIFF ) ) {
949 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX_DIFFERENCE;
950 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MAX_DIFFERENCE;
952 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF ) ) {
953 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
954 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
956 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF ) ) {
957 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE;
958 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE;
961 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort_str
962 + "\" for domain combinations similarities sorting: \"-"
963 + surfacing.DOMAIN_SIMILARITY_SORT_OPTION + "=<" + surfacing.DOMAIN_SIMILARITY_SORT_ALPHA + "|"
964 + surfacing.DOMAIN_SIMILARITY_SORT_MAX + "|" + surfacing.DOMAIN_SIMILARITY_SORT_MIN + "|"
965 + surfacing.DOMAIN_SIMILARITY_SORT_MEAN + "|" + surfacing.DOMAIN_SIMILARITY_SORT_DIFF + "|"
966 + surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF + "|"
967 + surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF + "|" + "|"
968 + surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT + "|" + surfacing.DOMAIN_SIMILARITY_SORT_SD
972 PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option = DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT;
973 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ) ) {
974 if ( !cla.isOptionValueSet( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ) ) {
975 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for print option: -"
976 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML + "|"
977 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML + "|"
978 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED + ">\"" );
980 final String sort = cla.getOptionValue( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ).toLowerCase();
981 if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML ) ) {
982 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.HTML;
984 else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML ) ) {
985 // domain_similarity_print_option =
986 // DomainSimilarity.PRINT_OPTION.SIMPLE_HTML;
987 ForesterUtil.fatalError( surfacing.PRG_NAME, "simple HTML output not implemented yet :(" );
989 else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED ) ) {
990 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.SIMPLE_TAB_DELIMITED;
993 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort + "\" for print option: -"
994 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML + "|"
995 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML + "|"
996 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED + ">\"" );
999 GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order = DOMAINS_SORT_ORDER_DEFAULT;
1000 if ( cla.isOptionSet( surfacing.DOMAIN_COUNT_SORT_OPTION ) ) {
1001 if ( !cla.isOptionValueSet( surfacing.DOMAIN_COUNT_SORT_OPTION ) ) {
1002 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for sorting of domain counts: -"
1003 + surfacing.DOMAIN_COUNT_SORT_OPTION + "=<" + surfacing.DOMAIN_COUNT_SORT_ALPHA + "|"
1004 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT + "|"
1005 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT + "|"
1006 + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
1008 final String sort = cla.getOptionValue( surfacing.DOMAIN_COUNT_SORT_OPTION ).toLowerCase();
1009 if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_ALPHA ) ) {
1010 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.ALPHABETICAL_KEY_ID;
1012 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT ) ) {
1013 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.KEY_DOMAIN_COUNT;
1015 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT ) ) {
1016 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.KEY_DOMAIN_PROTEINS_COUNT;
1018 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT ) ) {
1019 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.COMBINATIONS_COUNT;
1022 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort
1023 + "\" for sorting of domain counts: \"-" + surfacing.DOMAIN_COUNT_SORT_OPTION + "=<"
1024 + surfacing.DOMAIN_COUNT_SORT_ALPHA + "|" + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT + "|"
1025 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT + "|"
1026 + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
1029 String[][] input_file_properties = null;
1030 if ( input_file_names_from_file != null ) {
1031 input_file_properties = surfacing.processInputFileNames( input_file_names_from_file );
1034 input_file_properties = surfacing.processInputFileNames( cla.getNames() );
1036 final int number_of_genomes = input_file_properties.length;
1037 if ( number_of_genomes < 2 ) {
1038 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot analyze less than two files" );
1040 if ( ( number_of_genomes < 3 ) && perform_pwc ) {
1041 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use : -"
1042 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1043 + "=<suffix> to turn on pairwise analyses with less than three input files" );
1045 checkWriteabilityForPairwiseComparisons( domain_similarity_print_option,
1046 input_file_properties,
1047 automated_pairwise_comparison_suffix,
1049 for( int i = 0; i < number_of_genomes; i++ ) {
1050 File dcc_outfile = new File( input_file_properties[ i ][ 0 ]
1051 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1052 if ( out_dir != null ) {
1053 dcc_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + dcc_outfile );
1055 SurfacingUtil.checkForOutputFileWriteability( dcc_outfile );
1057 File pfam_to_go_file = null;
1058 Map<DomainId, List<GoId>> domain_id_to_go_ids_map = null;
1059 int domain_id_to_go_ids_count = 0;
1060 if ( cla.isOptionSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
1061 if ( !cla.isOptionValueSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
1062 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for Pfam to GO mapping file: -"
1063 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>" );
1065 pfam_to_go_file = new File( cla.getOptionValue( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) );
1066 final String error = ForesterUtil.isReadableFile( pfam_to_go_file );
1067 if ( !ForesterUtil.isEmpty( error ) ) {
1068 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read Pfam to GO mapping file: " + error );
1071 final PfamToGoParser parser = new PfamToGoParser( pfam_to_go_file );
1072 final List<PfamToGoMapping> pfam_to_go_mappings = parser.parse();
1073 domain_id_to_go_ids_map = SurfacingUtil.createDomainIdToGoIdMap( pfam_to_go_mappings );
1074 if ( parser.getMappingCount() < domain_id_to_go_ids_map.size() ) {
1075 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME,
1076 "parser.getMappingCount() < domain_id_to_go_ids_map.size()" );
1078 domain_id_to_go_ids_count = parser.getMappingCount();
1080 catch ( final IOException e ) {
1081 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from Pfam to GO mapping file: " + e );
1084 File go_obo_file = null;
1085 List<GoTerm> go_terms = null;
1086 if ( cla.isOptionSet( surfacing.GO_OBO_FILE_USE_OPTION ) ) {
1087 if ( !cla.isOptionValueSet( surfacing.GO_OBO_FILE_USE_OPTION ) ) {
1088 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for GO OBO file: -"
1089 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>" );
1091 if ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) {
1092 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use GO OBO file (-"
1093 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>) without Pfam to GO mapping file ("
1094 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>)" );
1096 go_obo_file = new File( cla.getOptionValue( surfacing.GO_OBO_FILE_USE_OPTION ) );
1097 final String error = ForesterUtil.isReadableFile( go_obo_file );
1098 if ( !ForesterUtil.isEmpty( error ) ) {
1099 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read GO OBO file: " + error );
1102 final OBOparser parser = new OBOparser( go_obo_file, OBOparser.ReturnType.BASIC_GO_TERM );
1103 go_terms = parser.parse();
1104 if ( parser.getGoTermCount() != go_terms.size() ) {
1106 .unexpectedFatalError( surfacing.PRG_NAME, "parser.getGoTermCount() != go_terms.size()" );
1109 catch ( final IOException e ) {
1110 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from GO OBO file: " + e );
1113 Map<GoId, GoTerm> go_id_to_term_map = null;
1114 if ( ( ( domain_id_to_go_ids_map != null ) && ( domain_id_to_go_ids_map.size() > 0 ) )
1115 && ( ( go_terms != null ) && ( go_terms.size() > 0 ) ) ) {
1116 go_id_to_term_map = GoUtils.createGoIdToGoTermMap( go_terms );
1118 GoNameSpace go_namespace_limit = null;
1119 if ( cla.isOptionSet( surfacing.GO_NAMESPACE_LIMIT_OPTION ) ) {
1120 if ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) {
1121 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use GO namespace limit (-"
1122 + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<namespace>) without Pfam to GO mapping file ("
1123 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>) and GO OBO file (-"
1124 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
1126 if ( !cla.isOptionValueSet( surfacing.GO_NAMESPACE_LIMIT_OPTION ) ) {
1127 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for GO namespace limit: \"-"
1128 + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<"
1129 + surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION + "|"
1130 + surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS + "|"
1131 + surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT + ">\"" );
1133 final String go_namespace_limit_str = cla.getOptionValue( surfacing.GO_NAMESPACE_LIMIT_OPTION )
1135 if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION ) ) {
1136 go_namespace_limit = GoNameSpace.createMolecularFunction();
1138 else if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS ) ) {
1139 go_namespace_limit = GoNameSpace.createBiologicalProcess();
1141 else if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT ) ) {
1142 go_namespace_limit = GoNameSpace.createCellularComponent();
1145 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + go_namespace_limit_str
1146 + "\" for GO namespace limit: \"-" + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<"
1147 + surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION + "|"
1148 + surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS + "|"
1149 + surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT + ">\"" );
1152 if ( ( domain_similarity_sort_field == DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE )
1153 && ( number_of_genomes > 2 ) ) {
1154 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
1156 boolean jacknifed_distances = false;
1157 int jacknife_resamplings = JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT;
1158 double jacknife_ratio = JACKNIFE_RATIO_DEFAULT;
1159 long random_seed = JACKNIFE_RANDOM_SEED_DEFAULT;
1160 if ( cla.isOptionSet( surfacing.JACKNIFE_OPTION ) ) {
1161 if ( ( number_of_genomes < 3 ) || !perform_pwc ) {
1162 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use jacknife resampling analysis (-"
1163 + surfacing.JACKNIFE_OPTION + "[=<number of resamplings>]) without pairwise analyses ("
1164 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1165 + "=<suffix for pairwise comparison output files>)" );
1167 jacknifed_distances = true;
1168 if ( cla.isOptionHasAValue( surfacing.JACKNIFE_OPTION ) ) {
1170 jacknife_resamplings = cla.getOptionValueAsInt( surfacing.JACKNIFE_OPTION );
1172 catch ( final IOException e ) {
1173 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for number of resamplings" );
1175 if ( jacknife_resamplings < 2 ) {
1176 ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use less than 2 resamplings" );
1179 if ( cla.isOptionSet( surfacing.JACKNIFE_RATIO_OPTION )
1180 && cla.isOptionHasAValue( surfacing.JACKNIFE_RATIO_OPTION ) ) {
1182 jacknife_ratio = cla.getOptionValueAsDouble( surfacing.JACKNIFE_RATIO_OPTION );
1184 catch ( final IOException e ) {
1185 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for jacknife ratio" );
1187 if ( ( jacknife_ratio <= 0.0 ) || ( jacknife_ratio >= 1.0 ) ) {
1188 ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use illegal value for jacknife ratio: "
1192 if ( cla.isOptionSet( surfacing.JACKNIFE_RANDOM_SEED_OPTION )
1193 && cla.isOptionHasAValue( surfacing.JACKNIFE_RANDOM_SEED_OPTION ) ) {
1195 random_seed = cla.getOptionValueAsLong( surfacing.JACKNIFE_RANDOM_SEED_OPTION );
1197 catch ( final IOException e ) {
1198 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for random generator seed" );
1202 // boolean infer_species_trees = false;
1203 // if ( cla.isOptionSet( surfacing.INFER_SPECIES_TREES_OPTION ) ) {
1204 // if ( ( output_file == null ) || ( number_of_genomes < 3 )
1205 // || ForesterUtil.isEmpty( automated_pairwise_comparison_suffix ) ) {
1206 // ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer species trees (-"
1207 // + surfacing.INFER_SPECIES_TREES_OPTION + " without pairwise analyses ("
1208 // + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1209 // + "=<suffix for pairwise comparison output files>)" );
1211 // infer_species_trees = true;
1213 File[] intree_files = null;
1214 Phylogeny[] intrees = null;
1215 if ( cla.isOptionSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
1216 // TODO FIXME if jacknife.... maybe not
1217 if ( number_of_genomes < 3 ) {
1218 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer gains and losses on input species trees (-"
1219 + surfacing.INPUT_SPECIES_TREE_OPTION + " without pairwise analyses ("
1220 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1221 + "=<suffix for pairwise comparison output files>)" );
1223 if ( !cla.isOptionValueSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
1224 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input tree: -"
1225 + surfacing.INPUT_SPECIES_TREE_OPTION + "=<tree file in phyloXML format>" );
1227 final String intrees_str = cla.getOptionValue( surfacing.INPUT_SPECIES_TREE_OPTION );
1228 if ( intrees_str.indexOf( "#" ) > 0 ) {
1229 final String[] intrees_strs = intrees_str.split( "#" );
1230 intree_files = new File[ intrees_strs.length ];
1232 for( final String s : intrees_strs ) {
1233 intree_files[ i++ ] = new File( s.trim() );
1237 intree_files = new File[ 1 ];
1238 intree_files[ 0 ] = new File( intrees_str );
1240 intrees = getIntrees( intree_files, number_of_genomes, input_file_properties );
1242 long random_number_seed_for_fitch_parsimony = 0l;
1243 boolean radomize_fitch_parsimony = false;
1244 if ( cla.isOptionSet( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION ) ) {
1245 if ( !cla.isOptionValueSet( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION ) ) {
1246 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for random number seed: -"
1247 + surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION + "=<seed>" );
1250 random_number_seed_for_fitch_parsimony = cla
1251 .getOptionValueAsLong( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
1253 catch ( final IOException e ) {
1254 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1256 radomize_fitch_parsimony = true;
1258 SortedSet<DomainId> filter = null;
1259 if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
1260 || ( negative_domains_filter_file != null ) ) {
1261 filter = new TreeSet<DomainId>();
1262 if ( positive_filter_file != null ) {
1263 processFilter( positive_filter_file, filter );
1265 else if ( negative_filter_file != null ) {
1266 processFilter( negative_filter_file, filter );
1268 else if ( negative_domains_filter_file != null ) {
1269 processFilter( negative_domains_filter_file, filter );
1272 Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps = null;
1273 File[] secondary_features_map_files = null;
1274 final File domain_lengths_analysis_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1275 + DOMAIN_LENGTHS_ANALYSIS_SUFFIX );
1276 if ( PERFORM_DOMAIN_LENGTH_ANALYSIS ) {
1277 SurfacingUtil.checkForOutputFileWriteability( domain_lengths_analysis_outfile );
1279 if ( cla.isOptionSet( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ) ) {
1280 if ( !cla.isOptionValueSet( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ) ) {
1281 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for secondary features map file: -"
1282 + surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE + "=<file>" );
1284 final String[] secondary_features_map_files_strs = cla
1285 .getOptionValue( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ).split( "#" );
1286 secondary_features_map_files = new File[ secondary_features_map_files_strs.length ];
1287 domain_id_to_secondary_features_maps = new Map[ secondary_features_map_files_strs.length ];
1289 for( final String secondary_features_map_files_str : secondary_features_map_files_strs ) {
1290 secondary_features_map_files[ i ] = new File( secondary_features_map_files_str );
1291 final String error = ForesterUtil.isReadableFile( secondary_features_map_files[ i ] );
1292 if ( !ForesterUtil.isEmpty( error ) ) {
1293 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read secondary features map file: " + error );
1296 domain_id_to_secondary_features_maps[ i ] = SurfacingUtil
1297 .createDomainIdToSecondaryFeaturesMap( secondary_features_map_files[ i ] );
1299 catch ( final IOException e ) {
1300 ForesterUtil.fatalError( surfacing.PRG_NAME,
1301 "cannot read secondary features map file: " + e.getMessage() );
1303 catch ( final Exception e ) {
1304 ForesterUtil.fatalError( surfacing.PRG_NAME, "problem with contents of features map file ["
1305 + secondary_features_map_files[ i ] + "]: " + e.getMessage() );
1310 if ( out_dir == null ) {
1311 ForesterUtil.fatalError( surfacing.PRG_NAME, "no output directory indicated (-"
1312 + surfacing.OUTPUT_DIR_OPTION + "=<dir>)" );
1314 if ( output_file == null ) {
1315 ForesterUtil.fatalError( surfacing.PRG_NAME, "no name for (main) output file indicated (-"
1316 + surfacing.OUTPUT_FILE_OPTION + "=<file>)" );
1318 if ( ( domain_id_to_go_ids_map == null ) || domain_id_to_go_ids_map.isEmpty() ) {
1319 ForesterUtil.fatalError( surfacing.PRG_NAME,
1320 "no (acceptable) Pfam to GO id mapping file provided ('pfam2go file') (-"
1321 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>)" );
1323 if ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) {
1324 ForesterUtil.fatalError( surfacing.PRG_NAME,
1325 "no (acceptable) go id to term mapping file provided ('GO OBO file') (-"
1326 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
1328 System.out.println( "Output directory : " + out_dir );
1329 if ( input_file_names_from_file != null ) {
1330 System.out.println( "Input files names from : " + input_files_file + " ["
1331 + input_file_names_from_file.length + " input files]" );
1332 html_desc.append( "<tr><td>Input files names from:</td><td>" + input_files_file + " ["
1333 + input_file_names_from_file.length + " input files]</td></tr>" + nl );
1335 if ( positive_filter_file != null ) {
1336 final int filter_size = filter.size();
1337 System.out.println( "Positive protein filter : " + positive_filter_file + " [" + filter_size
1339 html_desc.append( "<tr><td>Positive protein filter:</td><td>" + positive_filter_file + " [" + filter_size
1340 + " domain ids]</td></tr>" + nl );
1342 if ( negative_filter_file != null ) {
1343 final int filter_size = filter.size();
1344 System.out.println( "Negative protein filter : " + negative_filter_file + " [" + filter_size
1346 html_desc.append( "<tr><td>Negative protein filter:</td><td>" + negative_filter_file + " [" + filter_size
1347 + " domain ids]</td></tr>" + nl );
1349 if ( negative_domains_filter_file != null ) {
1350 final int filter_size = filter.size();
1351 System.out.println( "Negative domain filter : " + negative_domains_filter_file + " [" + filter_size
1353 html_desc.append( "<tr><td>Negative domain filter:</td><td>" + negative_domains_filter_file + " ["
1354 + filter_size + " domain ids]</td></tr>" + nl );
1356 if ( plus_minus_analysis_high_copy_base_species.size() > 0 ) {
1358 for( final String s : plus_minus_analysis_high_copy_base_species ) {
1359 plus0 += "+" + s + " ";
1362 for( final String s : plus_minus_analysis_high_copy_target_species ) {
1363 plus1 += "*" + s + " ";
1366 for( final String s : plus_minus_analysis_high_low_copy_species ) {
1367 minus += "-" + s + " ";
1369 System.out.println( "Plus-minus analysis : " + plus1 + "&& " + plus0 + "&& " + minus );
1370 html_desc.append( "<tr><td>Plus-minus analysis:</td><td>" + plus1 + "&& " + plus0 + "&& " + minus
1371 + "</td></tr>" + nl );
1373 if ( cutoff_scores_file != null ) {
1374 System.out.println( "Cutoff scores file : " + cutoff_scores_file );
1375 html_desc.append( "<tr><td>Cutoff scores file:</td><td>" + cutoff_scores_file + "</td></tr>" + nl );
1377 if ( e_value_max >= 0.0 ) {
1378 System.out.println( "E-value maximum (inclusive) : " + e_value_max );
1379 html_desc.append( "<tr><td>E-value maximum (inclusive):</td><td>" + e_value_max + "</td></tr>" + nl );
1381 System.out.println( "Ignore DUFs : " + ignore_dufs );
1382 if ( ignore_virus_like_ids ) {
1383 System.out.println( "Ignore virus like ids : " + ignore_virus_like_ids );
1384 html_desc.append( "<tr><td>Ignore virus, phage, transposition related ids:</td><td>"
1385 + ignore_virus_like_ids + "</td></tr>" + nl );
1387 html_desc.append( "<tr><td>Ignore DUFs:</td><td>" + ignore_dufs + "</td></tr>" + nl );
1388 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
1389 System.out.println( "Max allowed domain overlap : " + max_allowed_overlap );
1390 html_desc.append( "<tr><td>Max allowed domain overlap:</td><td>" + max_allowed_overlap + "</td></tr>" + nl );
1392 if ( no_engulfing_overlaps ) {
1393 System.out.println( "Ignore engulfed domains : " + no_engulfing_overlaps );
1394 html_desc.append( "<tr><td>Ignore (lower confidence) engulfed domains:</td><td>" + no_engulfing_overlaps
1395 + "</td></tr>" + nl );
1397 System.out.println( "Ignore singlet domains : " + ignore_domains_without_combs_in_all_spec );
1399 .append( "<tr><td>Ignore singlet domains for domain combination similarity analyses (not for parsimony analyses):</td><td>"
1400 + ignore_domains_without_combs_in_all_spec + "</td></tr>" + nl );
1401 System.out.println( "Ignore species specific doms: " + ignore_species_specific_domains );
1403 .append( "<tr><td>Ignore species specific domains for domain combination similarity analyses (not for parsimony analyses):</td><td>"
1404 + ignore_species_specific_domains + "</td></tr>" + nl );
1405 System.out.println( "Ignore combination with self: " + ignore_combination_with_same );
1406 html_desc.append( "<tr><td>Ignore combination with self for domain combination similarity analyses:</td><td>"
1407 + ignore_combination_with_same + "</td></tr>" + nl );
1409 System.out.println( "Consider directedness : "
1410 + ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) );
1411 html_desc.append( "<tr><td>Consider directedness of binary domain combinations:</td><td>"
1412 + ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) + "</td></tr>" + nl );
1413 if ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) {
1414 System.out.println( "Consider adjacency : "
1415 + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) );
1416 html_desc.append( "<tr><td>Consider djacency of binary domain combinations:</td><td>"
1417 + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) + "</td></tr>"
1420 System.out.print( "Domain counts sort order : " );
1421 switch ( dc_sort_order ) {
1422 case ALPHABETICAL_KEY_ID:
1423 System.out.println( "alphabetical" );
1425 case KEY_DOMAIN_COUNT:
1426 System.out.println( "domain count" );
1428 case KEY_DOMAIN_PROTEINS_COUNT:
1429 System.out.println( "domain proteins count" );
1431 case COMBINATIONS_COUNT:
1432 System.out.println( "domain combinations count" );
1435 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for dc sort order" );
1437 if ( domain_id_to_go_ids_map != null ) {
1438 System.out.println( "Pfam to GO mappings from : " + pfam_to_go_file + " [" + domain_id_to_go_ids_count
1440 html_desc.append( "<tr><td>Pfam to GO mappings from:</td><td>" + pfam_to_go_file + " ["
1441 + domain_id_to_go_ids_count + " mappings]" + "</td></tr>" + nl );
1443 if ( go_terms != null ) {
1444 System.out.println( "GO terms from : " + go_obo_file + " [" + go_terms.size() + " terms]" );
1445 html_desc.append( "<tr><td>GO terms from:</td><td>" + go_obo_file + " [" + go_terms.size() + " terms]"
1446 + "</td></tr>" + nl );
1448 if ( go_namespace_limit != null ) {
1449 System.out.println( "Limit GO terms to : " + go_namespace_limit.toString() );
1450 html_desc.append( "<tr><td>Limit GO terms to</td><td>" + go_namespace_limit + "</td></tr>" + nl );
1452 if ( perform_pwc ) {
1453 System.out.println( "Suffix for PWC files : " + automated_pairwise_comparison_suffix );
1454 html_desc.append( "<tr><td>Suffix for PWC files</td><td>" + automated_pairwise_comparison_suffix
1455 + "</td></tr>" + nl );
1457 if ( out_dir != null ) {
1458 System.out.println( "Output directory : " + out_dir );
1460 if ( query_domain_ids != null ) {
1461 System.out.println( "Query domains (ordered) : " + query_domain_ids );
1462 html_desc.append( "<tr><td></td><td>" + query_domain_ids + "</td></tr>" + nl );
1464 System.out.println( "Write similarities to : " + output_file );
1465 System.out.print( " Scoring method : " );
1466 html_desc.append( "<tr><td>Scoring method:</td><td>" );
1467 switch ( scoring ) {
1469 System.out.println( "domain combinations based" );
1470 html_desc.append( "domain combinations based" + "</td></tr>" + nl );
1473 System.out.println( "domain counts based" );
1474 html_desc.append( "domain counts based" + "</td></tr>" + nl );
1477 System.out.println( "domain proteins counts based" );
1478 html_desc.append( "domain proteins counts based" + "</td></tr>" + nl );
1481 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for scoring" );
1483 System.out.print( " Sort by : " );
1484 html_desc.append( "<tr><td>Sort by:</td><td>" );
1485 switch ( domain_similarity_sort_field ) {
1487 System.out.print( "score minimum" );
1488 html_desc.append( "score minimum" );
1491 System.out.print( "score maximum" );
1492 html_desc.append( "score maximum" );
1495 System.out.print( "score mean" );
1496 html_desc.append( "score mean" );
1499 System.out.print( "score standard deviation" );
1500 html_desc.append( "score standard deviation" );
1503 System.out.print( "species number" );
1504 html_desc.append( "species number" );
1507 System.out.print( "alphabetical domain identifier" );
1508 html_desc.append( "alphabetical domain identifier" );
1510 case MAX_DIFFERENCE:
1511 System.out.print( "(maximal) difference" );
1512 html_desc.append( "(maximal) difference" );
1514 case ABS_MAX_COUNTS_DIFFERENCE:
1515 System.out.print( "absolute (maximal) counts difference" );
1516 html_desc.append( "absolute (maximal) counts difference" );
1518 case MAX_COUNTS_DIFFERENCE:
1519 System.out.print( "(maximal) counts difference" );
1520 html_desc.append( "(maximal) counts difference" );
1523 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for similarities" );
1525 if ( sort_by_species_count_first ) {
1526 System.out.println( " (sort by species count first)" );
1527 html_desc.append( " (sort by species count first)" );
1530 System.out.println();
1532 html_desc.append( "</td></tr>" + nl );
1533 System.out.print( " Detailedness : " );
1534 switch ( detailedness ) {
1536 System.out.println( "basic" );
1538 case LIST_COMBINING_DOMAIN_FOR_EACH_SPECIES:
1539 System.out.println( "list combining domains for each species" );
1542 System.out.println( "punctilious" );
1545 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for detailedness" );
1547 System.out.print( " Print option : " );
1548 switch ( domain_similarity_print_option ) {
1550 System.out.println( "HTML" );
1552 case SIMPLE_TAB_DELIMITED:
1553 System.out.println( "simple tab delimited" );
1556 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for print option" );
1558 System.out.print( " Species matrix : " + species_matrix );
1559 System.out.println();
1560 final File dc_data_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file + DATA_FILE_SUFFIX );
1561 System.out.println( "Domain comb data output : " + dc_data_file );
1562 html_desc.append( "<tr><td>Domain combination data output:</td><td> " + dc_data_file + " </td></tr>" );
1563 System.out.println();
1564 if ( perform_pwc ) {
1565 System.out.println( "Pairwise comparisons: " );
1566 html_desc.append( "<tr><td>Pairwise comparisons:</td><td></td></tr>" );
1567 System.out.print( " Sort by : " );
1568 html_desc.append( "<tr><td>Sort by:</td><td>" );
1569 switch ( domain_similarity_sort_field_for_automated_pwc ) {
1571 System.out.print( "score mean" );
1572 html_desc.append( "score mean" );
1575 System.out.print( "alphabetical domain identifier" );
1576 html_desc.append( "alphabetical domain identifier" );
1578 case MAX_DIFFERENCE:
1579 System.out.print( "difference" );
1580 html_desc.append( "difference" );
1582 case ABS_MAX_COUNTS_DIFFERENCE:
1583 System.out.print( "absolute counts difference" );
1584 html_desc.append( "absolute counts difference" );
1586 case MAX_COUNTS_DIFFERENCE:
1587 System.out.print( "counts difference" );
1588 html_desc.append( "counts difference" );
1592 .unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for similarities" );
1594 System.out.println();
1595 html_desc.append( "</td></tr>" + nl );
1596 if ( jacknifed_distances ) {
1597 html_desc.append( "<tr><td>Jacknife:</td><td>" + jacknife_resamplings + " resamplings</td></tr>" + nl );
1598 html_desc.append( "<tr><td>Jacknife ratio:</td><td>" + ForesterUtil.round( jacknife_ratio, 2 )
1599 + "</td></tr>" + nl );
1600 html_desc.append( "<tr><td>Jacknife random number seed:</td><td>" + random_seed + "</td></tr>" + nl );
1601 System.out.println( " Jacknife : " + jacknife_resamplings + " resamplings" );
1602 System.out.println( " Ratio : " + ForesterUtil.round( jacknife_ratio, 2 ) );
1603 System.out.println( " Random number seed : " + random_seed );
1605 // if ( infer_species_trees ) {
1606 // html_desc.append( "<tr><td>Infer species trees:</td><td>true</td></tr>" + nl );
1607 // System.out.println( " Infer species trees : true" );
1609 if ( ( intrees != null ) && ( intrees.length > 0 ) ) {
1610 for( final File intree_file : intree_files ) {
1611 html_desc.append( "<tr><td>Intree for gain/loss parsimony analysis:</td><td>" + intree_file
1612 + "</td></tr>" + nl );
1613 System.out.println( " Intree for gain/loss pars.: " + intree_file );
1616 if ( radomize_fitch_parsimony ) {
1617 html_desc.append( "<tr><td> Random number seed for Fitch parsimony analysis:</td><td>"
1618 + random_number_seed_for_fitch_parsimony + "</td></tr>" + nl );
1619 System.out.println( " Random number seed : " + random_number_seed_for_fitch_parsimony );
1621 if ( ( domain_id_to_secondary_features_maps != null ) && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
1622 for( int i = 0; i < secondary_features_map_files.length; i++ ) {
1623 html_desc.append( "<tr><td>Secondary features map file:</td><td>"
1624 + secondary_features_map_files[ i ] + "</td></tr>" + nl );
1625 System.out.println( "Secondary features map file : " + secondary_features_map_files[ i ]
1626 + " [mappings for " + domain_id_to_secondary_features_maps[ i ].size() + " domain ids]" );
1628 System.out.println();
1629 System.out.println( "Domain ids to secondary features map:" );
1630 for( final DomainId domain_id : domain_id_to_secondary_features_maps[ i ].keySet() ) {
1631 System.out.print( domain_id.getId() );
1632 System.out.print( " => " );
1633 for( final String sec : domain_id_to_secondary_features_maps[ i ].get( domain_id ) ) {
1634 System.out.print( sec );
1635 System.out.print( " " );
1637 System.out.println();
1642 } // if ( perform_pwc ) {
1643 System.out.println();
1644 html_desc.append( "<tr><td>Command line:</td><td>\n" + cla.getCommandLineArgsAsString() + "\n</td></tr>" + nl );
1645 System.out.println( "Command line : " + cla.getCommandLineArgsAsString() );
1646 BufferedWriter[] query_domains_writer_ary = null;
1647 List<DomainId>[] query_domain_ids_array = null;
1648 if ( query_domain_ids != null ) {
1649 final String[] query_domain_ids_str_array = query_domain_ids.split( "#" );
1650 query_domain_ids_array = new ArrayList[ query_domain_ids_str_array.length ];
1651 query_domains_writer_ary = new BufferedWriter[ query_domain_ids_str_array.length ];
1652 for( int i = 0; i < query_domain_ids_str_array.length; i++ ) {
1653 String query_domain_ids_str = query_domain_ids_str_array[ i ];
1654 final String[] query_domain_ids_str_ary = query_domain_ids_str.split( "~" );
1655 final List<DomainId> query = new ArrayList<DomainId>();
1656 for( final String element : query_domain_ids_str_ary ) {
1657 query.add( new DomainId( element ) );
1659 query_domain_ids_array[ i ] = query;
1660 query_domain_ids_str = query_domain_ids_str.replace( '~', '_' );
1661 String protein_names_writer_str = query_domain_ids_str + surfacing.SEQ_EXTRACT_SUFFIX;
1662 if ( out_dir != null ) {
1663 protein_names_writer_str = out_dir + ForesterUtil.FILE_SEPARATOR + protein_names_writer_str;
1666 query_domains_writer_ary[ i ] = new BufferedWriter( new FileWriter( protein_names_writer_str ) );
1668 catch ( final IOException e ) {
1669 ForesterUtil.fatalError( surfacing.PRG_NAME, "Could not open [" + protein_names_writer_str + "]: "
1670 + e.getLocalizedMessage() );
1674 SortedMap<Species, List<Protein>> protein_lists_per_species = null; //This will only be created if neede.
1675 boolean need_protein_lists_per_species = false;
1676 if ( ( plus_minus_analysis_high_copy_base_species.size() > 0 ) || output_protein_lists_for_all_domains ) {
1677 need_protein_lists_per_species = true;
1679 if ( need_protein_lists_per_species ) {
1680 protein_lists_per_species = new TreeMap<Species, List<Protein>>();
1682 final List<GenomeWideCombinableDomains> gwcd_list = new ArrayList<GenomeWideCombinableDomains>( number_of_genomes );
1683 final SortedSet<DomainId> all_domains_encountered = new TreeSet<DomainId>();
1684 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered = new TreeSet<BinaryDomainCombination>();
1685 List<BinaryDomainCombination> all_bin_domain_combinations_gained_fitch = null;
1686 List<BinaryDomainCombination> all_bin_domain_combinations_lost_fitch = null;
1687 if ( ( intrees != null ) && ( intrees.length == 1 ) ) {
1688 all_bin_domain_combinations_gained_fitch = new ArrayList<BinaryDomainCombination>();
1689 all_bin_domain_combinations_lost_fitch = new ArrayList<BinaryDomainCombination>();
1691 final DomainLengthsTable domain_lengths_table = new DomainLengthsTable();
1692 final File per_genome_domain_promiscuity_statistics_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR
1693 + output_file + D_PROMISCUITY_FILE_SUFFIX );
1694 BufferedWriter per_genome_domain_promiscuity_statistics_writer = null;
1696 per_genome_domain_promiscuity_statistics_writer = new BufferedWriter( new FileWriter( per_genome_domain_promiscuity_statistics_file ) );
1697 per_genome_domain_promiscuity_statistics_writer.write( "Species:\t" );
1698 per_genome_domain_promiscuity_statistics_writer.write( "Mean:\t" );
1699 per_genome_domain_promiscuity_statistics_writer.write( "SD:\t" );
1700 per_genome_domain_promiscuity_statistics_writer.write( "Median:\t" );
1701 per_genome_domain_promiscuity_statistics_writer.write( "Min:\t" );
1702 per_genome_domain_promiscuity_statistics_writer.write( "Max:\t" );
1703 per_genome_domain_promiscuity_statistics_writer.write( "N:\t" );
1704 per_genome_domain_promiscuity_statistics_writer.write( "Max Promiscuous Domains:"
1705 + ForesterUtil.LINE_SEPARATOR );
1707 catch ( final IOException e2 ) {
1708 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1710 final File log_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file + LOG_FILE_SUFFIX );
1711 BufferedWriter log_writer = null;
1713 log_writer = new BufferedWriter( new FileWriter( log_file ) );
1715 catch ( final IOException e2 ) {
1716 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1718 BufferedWriter dc_data_writer = null;
1720 dc_data_writer = new BufferedWriter( new FileWriter( dc_data_file ) );
1721 dc_data_writer.write( DATA_FILE_DESC );
1722 dc_data_writer.write( ForesterUtil.LINE_SEPARATOR );
1724 catch ( final IOException e2 ) {
1725 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1727 final DescriptiveStatistics protein_coverage_stats = new BasicDescriptiveStatistics();
1728 final DescriptiveStatistics all_genomes_domains_per_potein_stats = new BasicDescriptiveStatistics();
1729 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo = new TreeMap<Integer, Integer>();
1730 final SortedSet<String> domains_which_are_always_single = new TreeSet<String>();
1731 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not = new TreeSet<String>();
1732 final SortedSet<String> domains_which_never_single = new TreeSet<String>();
1733 BufferedWriter domains_per_potein_stats_writer = null;
1735 domains_per_potein_stats_writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR
1736 + output_file + "__domains_per_potein_stats.txt" ) );
1737 domains_per_potein_stats_writer.write( "Genome" );
1738 domains_per_potein_stats_writer.write( "\t" );
1739 domains_per_potein_stats_writer.write( "Mean" );
1740 domains_per_potein_stats_writer.write( "\t" );
1741 domains_per_potein_stats_writer.write( "SD" );
1742 domains_per_potein_stats_writer.write( "\t" );
1743 domains_per_potein_stats_writer.write( "Median" );
1744 domains_per_potein_stats_writer.write( "\t" );
1745 domains_per_potein_stats_writer.write( "N" );
1746 domains_per_potein_stats_writer.write( "\t" );
1747 domains_per_potein_stats_writer.write( "Min" );
1748 domains_per_potein_stats_writer.write( "\t" );
1749 domains_per_potein_stats_writer.write( "Max" );
1750 domains_per_potein_stats_writer.write( "\n" );
1752 catch ( final IOException e3 ) {
1753 e3.printStackTrace();
1756 for( int i = 0; i < number_of_genomes; ++i ) {
1757 System.out.println();
1758 System.out.println( ( i + 1 ) + "/" + number_of_genomes );
1759 log( ( i + 1 ) + "/" + number_of_genomes, log_writer );
1760 System.out.println( "Processing : " + input_file_properties[ i ][ 0 ] );
1761 log( "Genome : " + input_file_properties[ i ][ 0 ], log_writer );
1762 HmmscanPerDomainTableParser parser = null;
1763 INDIVIDUAL_SCORE_CUTOFF ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF.NONE;
1764 if ( individual_score_cutoffs != null ) {
1765 ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF_DEFAULT;
1767 if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
1768 || ( negative_domains_filter_file != null ) ) {
1769 HmmscanPerDomainTableParser.FilterType filter_type = HmmscanPerDomainTableParser.FilterType.NONE;
1770 if ( positive_filter_file != null ) {
1771 filter_type = HmmscanPerDomainTableParser.FilterType.POSITIVE_PROTEIN;
1773 else if ( negative_filter_file != null ) {
1774 filter_type = HmmscanPerDomainTableParser.FilterType.NEGATIVE_PROTEIN;
1776 else if ( negative_domains_filter_file != null ) {
1777 filter_type = HmmscanPerDomainTableParser.FilterType.NEGATIVE_DOMAIN;
1779 parser = new HmmscanPerDomainTableParser( new File( input_file_properties[ i ][ 0 ] ),
1780 input_file_properties[ i ][ 1 ],
1787 parser = new HmmscanPerDomainTableParser( new File( input_file_properties[ i ][ 0 ] ),
1788 input_file_properties[ i ][ 1 ],
1792 if ( e_value_max >= 0.0 ) {
1793 parser.setEValueMaximum( e_value_max );
1795 parser.setIgnoreDufs( ignore_dufs );
1796 parser.setIgnoreVirusLikeIds( ignore_virus_like_ids );
1797 parser.setIgnoreEngulfedDomains( no_engulfing_overlaps );
1798 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
1799 parser.setMaxAllowedOverlap( max_allowed_overlap );
1801 parser.setReturnType( HmmscanPerDomainTableParser.ReturnType.UNORDERED_PROTEIN_DOMAIN_COLLECTION_PER_PROTEIN );
1802 if ( individual_score_cutoffs != null ) {
1803 parser.setIndividualScoreCutoffs( individual_score_cutoffs );
1805 List<Protein> protein_list = null;
1807 protein_list = parser.parse();
1809 catch ( final IOException e ) {
1810 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1812 catch ( final Exception e ) {
1813 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, e.getMessage(), e );
1816 System.out.println( "Domains ignored due to negative domain filter: " );
1817 ForesterUtil.printCountingMap( parser.getDomainsIgnoredDueToNegativeDomainFilterCountsMap() );
1818 System.out.println( "Domains ignored due to virus like id: " );
1819 ForesterUtil.printCountingMap( parser.getDomainsIgnoredDueToVirusLikeIdCountsMap() );
1821 final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
1822 protein_coverage_stats.addValue( coverage );
1823 System.out.println( "Number of proteins encountered : " + parser.getProteinsEncountered() );
1824 log( "Number of proteins encountered : " + parser.getProteinsEncountered(), log_writer );
1825 System.out.println( "Number of proteins stored : " + protein_list.size() );
1826 log( "Number of proteins stored : " + protein_list.size(), log_writer );
1827 System.out.println( "Coverage : "
1828 + ForesterUtil.roundToInt( 100.0 * coverage ) + "%" );
1829 log( "Coverage : " + ForesterUtil.roundToInt( 100.0 * coverage )
1830 + "%", log_writer );
1831 System.out.println( "Domains encountered : " + parser.getDomainsEncountered() );
1832 log( "Domains encountered : " + parser.getDomainsEncountered(), log_writer );
1833 System.out.println( "Domains stored : " + parser.getDomainsStored() );
1834 log( "Domains stored : " + parser.getDomainsStored(), log_writer );
1835 System.out.println( "Distinct domains stored : "
1836 + parser.getDomainsStoredSet().size() );
1837 log( "Distinct domains stored : " + parser.getDomainsStoredSet().size(), log_writer );
1838 System.out.println( "Domains ignored due to individual score cutoffs: "
1839 + parser.getDomainsIgnoredDueToIndividualScoreCutoff() );
1840 log( "Domains ignored due to individual score cutoffs: "
1841 + parser.getDomainsIgnoredDueToIndividualScoreCutoff(),
1843 System.out.println( "Domains ignored due to E-value : "
1844 + parser.getDomainsIgnoredDueToEval() );
1845 log( "Domains ignored due to E-value : " + parser.getDomainsIgnoredDueToEval(), log_writer );
1846 System.out.println( "Domains ignored due to DUF designation : "
1847 + parser.getDomainsIgnoredDueToDuf() );
1848 log( "Domains ignored due to DUF designation : " + parser.getDomainsIgnoredDueToDuf(), log_writer );
1849 if ( ignore_virus_like_ids ) {
1850 System.out.println( "Domains ignored due virus like ids : "
1851 + parser.getDomainsIgnoredDueToVirusLikeIds() );
1852 log( "Domains ignored due virus like ids : " + parser.getDomainsIgnoredDueToVirusLikeIds(),
1855 System.out.println( "Domains ignored due negative domain filter : "
1856 + parser.getDomainsIgnoredDueToNegativeDomainFilter() );
1857 log( "Domains ignored due negative domain filter : "
1858 + parser.getDomainsIgnoredDueToNegativeDomainFilter(),
1860 System.out.println( "Domains ignored due to overlap : "
1861 + parser.getDomainsIgnoredDueToOverlap() );
1862 log( "Domains ignored due to overlap : " + parser.getDomainsIgnoredDueToOverlap(),
1864 if ( negative_filter_file != null ) {
1865 System.out.println( "Proteins ignored due to negative filter : "
1866 + parser.getProteinsIgnoredDueToFilter() );
1867 log( "Proteins ignored due to negative filter : " + parser.getProteinsIgnoredDueToFilter(),
1870 if ( positive_filter_file != null ) {
1871 System.out.println( "Proteins ignored due to positive filter : "
1872 + parser.getProteinsIgnoredDueToFilter() );
1873 log( "Proteins ignored due to positive filter : " + parser.getProteinsIgnoredDueToFilter(),
1876 System.out.println( "Time for processing : " + parser.getTime() + "ms" );
1877 log( "", log_writer );
1878 html_desc.append( "<tr><td>" + input_file_properties[ i ][ 0 ] + " [species: "
1879 + input_file_properties[ i ][ 1 ] + "]" + ":</td><td>domains analyzed: "
1880 + parser.getDomainsStored() + "; domains ignored: [ind score cutoffs: "
1881 + parser.getDomainsIgnoredDueToIndividualScoreCutoff() + "] [E-value cutoff: "
1882 + parser.getDomainsIgnoredDueToEval() + "] [DUF: " + parser.getDomainsIgnoredDueToDuf()
1883 + "] [virus like ids: " + parser.getDomainsIgnoredDueToVirusLikeIds()
1884 + "] [negative domain filter: " + parser.getDomainsIgnoredDueToNegativeDomainFilter()
1885 + "] [overlap: " + parser.getDomainsIgnoredDueToOverlap() + "]" );
1886 if ( negative_filter_file != null ) {
1887 html_desc.append( "; proteins ignored due to negative filter: "
1888 + parser.getProteinsIgnoredDueToFilter() );
1890 if ( positive_filter_file != null ) {
1891 html_desc.append( "; proteins ignored due to positive filter: "
1892 + parser.getProteinsIgnoredDueToFilter() );
1894 html_desc.append( "</td></tr>" + nl );
1895 // domain_partner_counts_array[ i ] =
1896 // Methods.getDomainPartnerCounts( protein_domain_collections_array[
1898 // false, input_file_properties[ i ][ 1 ] );
1901 for( final Protein protein : protein_list ) {
1902 dc_data_writer.write( SurfacingUtil.proteinToDomainCombinations( protein, count + "", "\t" )
1907 catch ( final IOException e ) {
1908 ForesterUtil.fatalError( surfacing.PRG_NAME, e.toString() );
1910 SurfacingUtil.domainsPerProteinsStatistics( input_file_properties[ i ][ 1 ],
1912 all_genomes_domains_per_potein_stats,
1913 all_genomes_domains_per_potein_histo,
1914 domains_which_are_always_single,
1915 domains_which_are_sometimes_single_sometimes_not,
1916 domains_which_never_single,
1917 domains_per_potein_stats_writer );
1918 gwcd_list.add( BasicGenomeWideCombinableDomains
1919 .createInstance( protein_list,
1920 ignore_combination_with_same,
1921 new BasicSpecies( input_file_properties[ i ][ 1 ] ),
1922 domain_id_to_go_ids_map,
1924 domain_lengths_table.addLengths( protein_list );
1925 if ( gwcd_list.get( i ).getSize() > 0 ) {
1926 SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
1928 per_genome_domain_promiscuity_statistics_writer,
1932 if ( output_binary_domain_combinationsfor_graph_analysis ) {
1933 SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
1939 SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
1940 SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
1941 all_bin_domain_combinations_encountered );
1943 if ( query_domains_writer_ary != null ) {
1944 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
1946 SurfacingUtil.extractProteinNames( protein_list,
1947 query_domain_ids_array[ j ],
1948 query_domains_writer_ary[ j ],
1950 query_domains_writer_ary[ j ].flush();
1952 catch ( final IOException e ) {
1953 e.printStackTrace();
1957 if ( need_protein_lists_per_species ) {
1958 protein_lists_per_species.put( new BasicSpecies( input_file_properties[ i ][ 1 ] ), protein_list );
1963 catch ( final IOException e2 ) {
1964 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
1967 } // for( int i = 0; i < number_of_genomes; ++i ) {
1968 ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
1969 + per_genome_domain_promiscuity_statistics_file );
1972 domains_per_potein_stats_writer.write( "ALL" );
1973 domains_per_potein_stats_writer.write( "\t" );
1974 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.arithmeticMean() + "" );
1975 domains_per_potein_stats_writer.write( "\t" );
1976 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.sampleStandardDeviation() + "" );
1977 domains_per_potein_stats_writer.write( "\t" );
1978 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.median() + "" );
1979 domains_per_potein_stats_writer.write( "\t" );
1980 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getN() + "" );
1981 domains_per_potein_stats_writer.write( "\t" );
1982 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMin() + "" );
1983 domains_per_potein_stats_writer.write( "\t" );
1984 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMax() + "" );
1985 domains_per_potein_stats_writer.write( "\n" );
1986 domains_per_potein_stats_writer.close();
1987 printOutPercentageOfMultidomainProteins( all_genomes_domains_per_potein_histo, log_writer );
1988 ForesterUtil.map2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1989 + "__all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
1990 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1991 + "__domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
1992 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1993 + "__domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
1994 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1995 + "__domains_always_combined.txt" ), domains_which_never_single, "\n" );
1996 ForesterUtil.programMessage( PRG_NAME,
1997 "Average of proteins with a least one domain assigned: "
1998 + ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
1999 + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)" );
2000 ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: " + 100
2001 * protein_coverage_stats.getMin() + "%-" + 100 * protein_coverage_stats.getMax() + "%" );
2002 log( "Average of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.arithmeticMean() )
2003 + "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
2004 log( "Range of prot with a least one dom assigned : " + 100 * protein_coverage_stats.getMin() + "%-"
2005 + 100 * protein_coverage_stats.getMax() + "%", log_writer );
2007 catch ( final IOException e2 ) {
2008 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
2010 if ( query_domains_writer_ary != null ) {
2011 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
2013 query_domains_writer_ary[ j ].close();
2015 catch ( final IOException e ) {
2016 ForesterUtil.fatalError( surfacing.PRG_NAME, e.toString() );
2021 per_genome_domain_promiscuity_statistics_writer.close();
2022 dc_data_writer.close();
2025 catch ( final IOException e2 ) {
2026 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
2028 if ( PERFORM_DOMAIN_LENGTH_ANALYSIS ) {
2030 SurfacingUtil.executeDomainLengthAnalysis( input_file_properties,
2032 domain_lengths_table,
2033 domain_lengths_analysis_outfile );
2035 catch ( final IOException e1 ) {
2036 ForesterUtil.fatalError( surfacing.PRG_NAME, e1.toString() );
2038 System.out.println();
2039 ForesterUtil.programMessage( PRG_NAME, "Wrote domain length data to: " + domain_lengths_analysis_outfile );
2040 System.out.println();
2042 final long analysis_start_time = new Date().getTime();
2043 PairwiseDomainSimilarityCalculator pw_calc = null;
2044 // double[] values_for_all_scores_histogram = null;
2045 final DomainSimilarityCalculator calc = new BasicDomainSimilarityCalculator( domain_similarity_sort_field,
2046 sort_by_species_count_first,
2047 number_of_genomes == 2 );
2048 switch ( scoring ) {
2050 pw_calc = new CombinationsBasedPairwiseDomainSimilarityCalculator();
2053 pw_calc = new DomainCountsBasedPairwiseSimilarityCalculator();
2056 pw_calc = new ProteinCountsBasedPairwiseDomainSimilarityCalculator();
2059 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for scoring" );
2061 DomainSimilarityCalculator.GoAnnotationOutput go_annotation_output = DomainSimilarityCalculator.GoAnnotationOutput.NONE;
2062 if ( domain_id_to_go_ids_map != null ) {
2063 go_annotation_output = DomainSimilarityCalculator.GoAnnotationOutput.ALL;
2065 final SortedSet<DomainSimilarity> similarities = calc
2066 .calculateSimilarities( pw_calc,
2068 ignore_domains_without_combs_in_all_spec,
2069 ignore_species_specific_domains );
2070 SurfacingUtil.decoratePrintableDomainSimilarities( similarities,
2072 go_annotation_output,
2074 go_namespace_limit );
2075 DescriptiveStatistics pw_stats = null;
2077 String my_outfile = output_file.toString();
2078 Map<Character, Writer> split_writers = null;
2079 Writer writer = null;
2080 if ( similarities.size() > MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING ) {
2081 if ( my_outfile.endsWith( ".html" ) ) {
2082 my_outfile = my_outfile.substring( 0, my_outfile.length() - 5 );
2084 split_writers = new HashMap<Character, Writer>();
2085 createSplitWriters( out_dir, my_outfile, split_writers );
2087 else if ( !my_outfile.endsWith( ".html" ) ) {
2088 my_outfile += ".html";
2089 writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) );
2091 List<Species> species_order = null;
2092 if ( species_matrix ) {
2093 species_order = new ArrayList<Species>();
2094 for( int i = 0; i < number_of_genomes; i++ ) {
2095 species_order.add( new BasicSpecies( input_file_properties[ i ][ 1 ] ) );
2098 html_desc.append( "<tr><td>Sum of all distinct binary combinations:</td><td>"
2099 + all_bin_domain_combinations_encountered.size() + "</td></tr>" + nl );
2100 html_desc.append( "<tr><td>Sum of all distinct domains:</td><td>" + all_domains_encountered.size()
2101 + "</td></tr>" + nl );
2102 html_desc.append( "<tr><td>Analysis date/time:</td><td>"
2103 + new java.text.SimpleDateFormat( "yyyy.MM.dd HH:mm:ss" ).format( new java.util.Date() )
2104 + "</td></tr>" + nl );
2105 html_desc.append( "</table>" + nl );
2106 pw_stats = SurfacingUtil
2107 .writeDomainSimilaritiesToFile( html_desc,
2108 new StringBuilder( number_of_genomes + " genomes" ),
2112 number_of_genomes == 2,
2114 domain_similarity_print_option,
2115 domain_similarity_sort_field,
2118 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote main output (includes domain similarities) to: \""
2119 + ( out_dir == null ? my_outfile : out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) + "\"" );
2121 catch ( final IOException e ) {
2122 ForesterUtil.fatalError( surfacing.PRG_NAME, "Failed to write similarites to: \"" + output_file + "\" ["
2123 + e.getMessage() + "]" );
2125 System.out.println();
2126 // values_for_all_scores_histogram = pw_stats.getDataAsDoubleArray();
2127 final Species[] species = new Species[ number_of_genomes ];
2128 for( int i = 0; i < number_of_genomes; ++i ) {
2129 species[ i ] = new BasicSpecies( input_file_properties[ i ][ 1 ] );
2131 List<Phylogeny> inferred_trees = null;
2132 if ( ( number_of_genomes > 2 ) && perform_pwc ) {
2133 final PairwiseGenomeComparator pwgc = new PairwiseGenomeComparator();
2134 pwgc.performPairwiseComparisons( html_desc,
2135 sort_by_species_count_first,
2137 ignore_domains_without_combs_in_all_spec,
2138 ignore_species_specific_domains,
2139 domain_similarity_sort_field_for_automated_pwc,
2140 domain_similarity_print_option,
2142 domain_id_to_go_ids_map,
2149 automated_pairwise_comparison_suffix,
2151 surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX,
2155 String matrix_output_file = new String( output_file.toString() );
2156 if ( matrix_output_file.indexOf( '.' ) > 1 ) {
2157 matrix_output_file = matrix_output_file.substring( 0, matrix_output_file.indexOf( '.' ) );
2159 if ( out_dir != null ) {
2160 matrix_output_file = out_dir + ForesterUtil.FILE_SEPARATOR + matrix_output_file;
2161 output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
2163 SurfacingUtil.writeMatrixToFile( new File( matrix_output_file
2164 + surfacing.MATRIX_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX ), pwgc.getDomainDistanceScoresMeans() );
2166 .writeMatrixToFile( new File( matrix_output_file
2167 + surfacing.MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ),
2168 pwgc.getSharedBinaryCombinationsBasedDistances() );
2169 SurfacingUtil.writeMatrixToFile( new File( matrix_output_file
2170 + surfacing.MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ),
2171 pwgc.getSharedDomainsBasedDistances() );
2172 final Phylogeny nj_gd = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2173 + surfacing.NJ_TREE_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX ), pwgc.getDomainDistanceScoresMeans()
2175 final Phylogeny nj_bc = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2176 + surfacing.NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ), pwgc
2177 .getSharedBinaryCombinationsBasedDistances().get( 0 ) );
2178 final Phylogeny nj_d = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2179 + surfacing.NJ_TREE_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ), pwgc
2180 .getSharedDomainsBasedDistances().get( 0 ) );
2181 inferred_trees = new ArrayList<Phylogeny>();
2182 inferred_trees.add( nj_gd );
2183 inferred_trees.add( nj_bc );
2184 inferred_trees.add( nj_d );
2185 if ( jacknifed_distances ) {
2186 pwgc.performPairwiseComparisonsJacknifed( species,
2190 jacknife_resamplings,
2194 .writeMatrixToFile( new File( matrix_output_file
2196 + ForesterUtil.round( jacknife_ratio, 2 )
2198 + jacknife_resamplings
2199 + surfacing.MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ),
2200 pwgc.getSharedBinaryCombinationsBasedDistances() );
2202 .writeMatrixToFile( new File( matrix_output_file + "_" + ForesterUtil.round( jacknife_ratio, 2 )
2203 + "_" + jacknife_resamplings
2204 + surfacing.MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ),
2205 pwgc.getSharedDomainsBasedDistances() );
2206 // if ( infer_species_trees ) {
2207 // inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
2208 // + INFERRED_SBC_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc
2209 // .getSharedBinaryCombinationsBasedDistances() );
2210 // inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
2211 // + INFERRED_SD_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc.getSharedDomainsBasedDistances() );
2214 } // if ( ( output_file != null ) && ( number_of_genomes > 2 ) && !isEmpty( automated_pairwise_comparison_suffix ) )
2215 if ( ( out_dir != null ) && ( !perform_pwc ) ) {
2216 output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
2218 writePresentToNexus( output_file, positive_filter_file, filter, gwcd_list );
2219 if ( ( ( intrees != null ) && ( intrees.length > 0 ) ) && ( number_of_genomes > 2 ) ) {
2220 final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
2222 max_allowed_overlap,
2223 no_engulfing_overlaps,
2227 if ( radomize_fitch_parsimony ) {
2228 s += random_number_seed_for_fitch_parsimony + "_";
2231 for( final Phylogeny intree : intrees ) {
2232 final String outfile_name = ForesterUtil.removeSuffix( output_file.toString() ) + s
2233 + ForesterUtil.removeSuffix( intree_files[ i ].toString() );
2234 final DomainParsimonyCalculator domain_parsimony = DomainParsimonyCalculator.createInstance( intree,
2236 SurfacingUtil.executeParsimonyAnalysis( random_number_seed_for_fitch_parsimony,
2237 radomize_fitch_parsimony,
2241 domain_id_to_go_ids_map,
2244 parameters_sb.toString(),
2245 domain_id_to_secondary_features_maps,
2246 positive_filter_file == null ? null : filter,
2247 output_binary_domain_combinationsfor_graph_analysis,
2248 all_bin_domain_combinations_gained_fitch,
2249 all_bin_domain_combinations_lost_fitch,
2251 // Listing of all domain combinations gained is only done if only one input tree is used.
2252 if ( ( domain_id_to_secondary_features_maps != null )
2253 && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
2255 for( final Map<DomainId, Set<String>> domain_id_to_secondary_features_map : domain_id_to_secondary_features_maps ) {
2256 final Map<Species, MappingResults> mapping_results_map = new TreeMap<Species, MappingResults>();
2257 final DomainParsimonyCalculator secondary_features_parsimony = DomainParsimonyCalculator
2258 .createInstance( intree, gwcd_list, domain_id_to_secondary_features_map );
2260 .executeParsimonyAnalysisForSecondaryFeatures( outfile_name
2262 + secondary_features_map_files[ j++ ],
2263 secondary_features_parsimony,
2265 parameters_sb.toString(),
2266 mapping_results_map );
2268 System.out.println();
2269 System.out.println( "Mapping to secondary features:" );
2270 for( final Species spec : mapping_results_map.keySet() ) {
2271 final MappingResults mapping_results = mapping_results_map.get( spec );
2272 final int total_domains = mapping_results.getSumOfFailures()
2273 + mapping_results.getSumOfSuccesses();
2274 System.out.print( spec + ":" );
2275 System.out.print( " mapped domains = " + mapping_results.getSumOfSuccesses() );
2276 System.out.print( ", not mapped domains = " + mapping_results.getSumOfFailures() );
2277 if ( total_domains > 0 ) {
2278 System.out.println( ", mapped ratio = "
2279 + ( 100 * mapping_results.getSumOfSuccesses() / total_domains ) + "%" );
2282 System.out.println( ", mapped ratio = n/a (total domains = 0 )" );
2289 } // for( final Phylogeny intree : intrees ) {
2291 if ( plus_minus_analysis_high_copy_base_species.size() > 0 ) {
2292 executePlusMinusAnalysis( output_file,
2293 plus_minus_analysis_high_copy_base_species,
2294 plus_minus_analysis_high_copy_target_species,
2295 plus_minus_analysis_high_low_copy_species,
2297 protein_lists_per_species,
2298 domain_id_to_go_ids_map,
2300 plus_minus_analysis_numbers );
2302 if ( output_protein_lists_for_all_domains ) {
2303 writeProteinListsForAllSpecies( out_dir, protein_lists_per_species, gwcd_list );
2305 // if ( ( intrees != null ) && ( intrees.length > 0 ) && ( inferred_trees != null ) && ( inferred_trees.size() > 0 ) ) {
2306 // final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
2308 // max_allowed_overlap,
2309 // no_engulfing_overlaps,
2310 // cutoff_scores_file );
2312 // if ( radomize_fitch_parsimony ) {
2313 // s += random_number_seed_for_fitch_parsimony + "_";
2316 // for( final Phylogeny inferred_tree : inferred_trees ) {
2317 // if ( !inferred_tree.isRooted() ) {
2318 // intrees[ 0 ].getRoot().getName();
2321 // final String outfile_name = ForesterUtil.removeSuffix( inferred_tree.getName() ) + s;
2322 // final DomainParsimonyCalculator domain_parsimony = DomainParsimonyCalculator
2323 // .createInstance( inferred_tree, gwcd_list );
2324 // SurfacingUtil.executeParsimonyAnalysis( random_number_seed_for_fitch_parsimony,
2325 // radomize_fitch_parsimony,
2327 // domain_parsimony,
2329 // domain_id_to_go_ids_map,
2330 // go_id_to_term_map,
2331 // go_namespace_limit,
2332 // parameters_sb.toString() );
2336 if ( all_bin_domain_combinations_gained_fitch != null ) {
2338 executeFitchGainsAnalysis( new File( output_file
2339 + surfacing.OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
2340 all_bin_domain_combinations_gained_fitch,
2341 all_domains_encountered.size(),
2342 all_bin_domain_combinations_encountered,
2345 catch ( final IOException e ) {
2346 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
2349 if ( all_bin_domain_combinations_lost_fitch != null ) {
2351 executeFitchGainsAnalysis( new File( output_file
2352 + surfacing.OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
2353 all_bin_domain_combinations_lost_fitch,
2354 all_domains_encountered.size(),
2355 all_bin_domain_combinations_encountered,
2358 catch ( final IOException e ) {
2359 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
2362 final Runtime rt = java.lang.Runtime.getRuntime();
2363 final long free_memory = rt.freeMemory() / 1000000;
2364 final long total_memory = rt.totalMemory() / 1000000;
2365 System.out.println();
2366 System.out.println( "Time for analysis : " + ( new Date().getTime() - analysis_start_time ) + "ms" );
2367 System.out.println( "Total running time: " + ( new Date().getTime() - start_time ) + "ms " );
2368 System.out.println( "Free memory : " + free_memory + "MB, total memory: " + total_memory + "MB" );
2369 System.out.println();
2370 System.out.println( "If this application is useful to you, please cite:" );
2371 System.out.println( surfacing.WWW );
2372 System.out.println();
2373 ForesterUtil.programMessage( PRG_NAME, "OK" );
2374 System.out.println();
2377 private static void createSplitWriters( final File out_dir,
2378 final String my_outfile,
2379 final Map<Character, Writer> split_writers ) throws IOException {
2380 split_writers.put( 'a', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2381 + "_domains_A.html" ) ) );
2382 split_writers.put( 'b', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2383 + "_domains_B.html" ) ) );
2384 split_writers.put( 'c', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2385 + "_domains_C.html" ) ) );
2386 split_writers.put( 'd', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2387 + "_domains_D.html" ) ) );
2388 split_writers.put( 'e', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2389 + "_domains_E.html" ) ) );
2390 split_writers.put( 'f', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2391 + "_domains_F.html" ) ) );
2392 split_writers.put( 'g', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2393 + "_domains_G.html" ) ) );
2394 split_writers.put( 'h', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2395 + "_domains_H.html" ) ) );
2396 split_writers.put( 'i', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2397 + "_domains_I.html" ) ) );
2398 split_writers.put( 'j', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2399 + "_domains_J.html" ) ) );
2400 split_writers.put( 'k', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2401 + "_domains_K.html" ) ) );
2402 split_writers.put( 'l', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2403 + "_domains_L.html" ) ) );
2404 split_writers.put( 'm', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2405 + "_domains_M.html" ) ) );
2406 split_writers.put( 'n', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2407 + "_domains_N.html" ) ) );
2408 split_writers.put( 'o', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2409 + "_domains_O.html" ) ) );
2410 split_writers.put( 'p', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2411 + "_domains_P.html" ) ) );
2412 split_writers.put( 'q', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2413 + "_domains_Q.html" ) ) );
2414 split_writers.put( 'r', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2415 + "_domains_R.html" ) ) );
2416 split_writers.put( 's', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2417 + "_domains_S.html" ) ) );
2418 split_writers.put( 't', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2419 + "_domains_T.html" ) ) );
2420 split_writers.put( 'u', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2421 + "_domains_U.html" ) ) );
2422 split_writers.put( 'v', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2423 + "_domains_V.html" ) ) );
2424 split_writers.put( 'w', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2425 + "_domains_W.html" ) ) );
2426 split_writers.put( 'x', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2427 + "_domains_X.html" ) ) );
2428 split_writers.put( 'y', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2429 + "_domains_Y.html" ) ) );
2430 split_writers.put( 'z', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2431 + "_domains_Z.html" ) ) );
2432 split_writers.put( '0', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2433 + "_domains_0.html" ) ) );
2436 private static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
2437 final Writer log_writer ) {
2439 for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
2440 sum += entry.getValue();
2442 final double percentage = 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) / sum;
2443 ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
2444 log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
2447 private static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
2448 final String[][] input_file_properties ) {
2449 final String[] genomes = new String[ input_file_properties.length ];
2450 for( int i = 0; i < input_file_properties.length; ++i ) {
2451 if ( intree.getNodes( input_file_properties[ i ][ 1 ] ).size() > 1 ) {
2452 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
2453 + "] is not unique in input tree " + intree.getName() );
2455 genomes[ i ] = input_file_properties[ i ][ 1 ];
2458 final PhylogenyNodeIterator it = intree.iteratorPostorder();
2459 while ( it.hasNext() ) {
2460 final PhylogenyNode n = it.next();
2461 if ( ForesterUtil.isEmpty( n.getName() ) ) {
2462 if ( n.getNodeData().isHasTaxonomy()
2463 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
2464 n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
2466 else if ( n.getNodeData().isHasTaxonomy()
2467 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
2468 n.setName( n.getNodeData().getTaxonomy().getScientificName() );
2470 else if ( n.getNodeData().isHasTaxonomy()
2471 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
2472 n.setName( n.getNodeData().getTaxonomy().getCommonName() );
2476 .fatalError( surfacing.PRG_NAME,
2477 "node with no name, scientific name, common name, or taxonomy code present" );
2482 final List<String> igns = PhylogenyMethods.deleteExternalNodesPositiveSelection( genomes, intree );
2483 if ( igns.size() > 0 ) {
2484 System.out.println( "Not using the following " + igns.size() + " nodes:" );
2485 for( int i = 0; i < igns.size(); ++i ) {
2486 System.out.println( " " + i + ": " + igns.get( i ) );
2488 System.out.println( "--" );
2490 for( int i = 0; i < input_file_properties.length; ++i ) {
2492 intree.getNode( input_file_properties[ i ][ 1 ] );
2494 catch ( final IllegalArgumentException e ) {
2495 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
2496 + "] not present/not unique in input tree" );
2501 // public static StringBuffer stringCombinableDomainsMapToStringBuffer(
2502 // final SortedMap<String, CombinableDomains> map ) {
2503 // final StringBuffer sb = new StringBuffer();
2504 // for( final Iterator<String> iter = map.keySet().iterator();
2505 // iter.hasNext(); ) {
2506 // final Object key = iter.next();
2507 // sb.append( ForesterUtil.pad( new StringBuffer( key.toString() ), 18, ' ',
2509 // final CombinableDomains domain_combination = map.get( key );
2510 // sb.append( ForesterUtil.pad( new StringBuffer( "" +
2511 // domain_combination.getNumberOfCombiningDomains() ), 8,
2513 // sb.append( domain_combination.toStringBuffer() );
2514 // sb.append( ForesterUtil.getLineSeparator() );
2518 private static void printHelp() {
2519 System.out.println();
2520 System.out.println( "Usage:" );
2521 System.out.println();
2522 System.out.println( "% java -Xms256m -Xmx512m -cp forester.jar org.forester.applications." + surfacing.PRG_NAME
2523 + " [options] <phylogen(y|ies) infile> [external node name 1] [name 2] ... [name n]" );
2524 System.out.println();
2525 System.out.println( " Note: This software might need a significant amount of memory (heap space);" );
2527 .println( " hence use \"-Xms128m -Xmx512m\" (or more) to prevent a \"java.lang.OutOfMemoryError\"." );
2528 System.out.println();
2529 System.out.println( " Options: " );
2530 System.out.println( surfacing.DETAILEDNESS_OPTION + ": level of detail for similarities output file (default:"
2531 + DETAILEDNESS_DEFAULT + ")" );
2532 System.out.println( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION
2533 + ": to ignore combinations with self (default: not to ignore)" );
2535 .println( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION
2536 + ": to ignore domains without combinations in any species (for similarity calc purposes, not for parsimony analyses) (default: not to ignore)" );
2538 .println( surfacing.IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION
2539 + ": to ignore domains specific to one species (for similarity calc purposes, not for parsimony analyses) (default: not to ignore)" );
2540 System.out.println( surfacing.NOT_IGNORE_DUFS_OPTION
2541 + ": to _not_ ignore DUFs (domains with unknown function) (default: ignore DUFs)" );
2543 .println( surfacing.IGNORE_VIRAL_IDS
2544 + ": to ignore domains with ids containing 'vir', 'retro', 'transpos', 'phage', or starting with 'rv' or 'gag_'" );
2545 System.out.println( surfacing.DOMAIN_SIMILARITY_SORT_OPTION + ": sorting for similarities (default: "
2546 + DOMAIN_SORT_FILD_DEFAULT + ")" );
2547 System.out.println( surfacing.OUTPUT_FILE_OPTION + ": name for (main) output file (mandatory)" );
2548 System.out.println( surfacing.MAX_E_VALUE_OPTION + ": max (inclusive) E-value" );
2549 System.out.println( surfacing.MAX_ALLOWED_OVERLAP_OPTION + ": maximal allowed domain overlap" );
2550 System.out.println( surfacing.NO_ENGULFING_OVERLAP_OPTION + ": to ignore engulfed lower confidence domains" );
2551 System.out.println( surfacing.SPECIES_MATRIX_OPTION + ": species matrix" );
2552 System.out.println( surfacing.SCORING_OPTION + ": scoring (default:" + SCORING_DEFAULT + ")" );
2553 System.out.println( surfacing.DOMAIN_COUNT_SORT_OPTION + ": sorting for domain counts (default:"
2554 + DOMAINS_SORT_ORDER_DEFAULT + ")" );
2555 System.out.println( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION + ": domain similarity print option (default:"
2556 + DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT + ")" );
2557 System.out.println( surfacing.CUTOFF_SCORE_FILE_OPTION + ": cutoff score file" );
2558 System.out.println( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION
2559 + ": sort by species count first" );
2560 System.out.println( surfacing.OUTPUT_DIR_OPTION + ": output directory" );
2561 System.out.println( surfacing.PFAM_TO_GO_FILE_USE_OPTION + ": Pfam to GO mapping file" );
2562 System.out.println( surfacing.GO_OBO_FILE_USE_OPTION + ": GO terms file (OBO format)" );
2563 System.out.println( surfacing.GO_NAMESPACE_LIMIT_OPTION + ": limit GO term to one GO namespace" );
2564 System.out.println( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
2565 + "[=<suffix for pairwise comparison output files>]: to perform pairwise comparison based analyses" );
2566 System.out.println( surfacing.INPUT_SPECIES_TREE_OPTION
2567 + ": species tree, to perform (Dollo, Fitch) parismony analyses" );
2569 .println( JACKNIFE_OPTION
2570 + ": perform jacknife resampling for domain and binary domain combination based distance matrices [default resamplings: "
2571 + JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT + "]" );
2572 System.out.println( JACKNIFE_RATIO_OPTION + ": ratio for jacknife resampling [default: "
2573 + JACKNIFE_RATIO_DEFAULT + "]" );
2574 System.out.println( JACKNIFE_RANDOM_SEED_OPTION
2575 + ": seed for random number generator for jacknife resampling [default: "
2576 + JACKNIFE_RANDOM_SEED_DEFAULT + "]" );
2577 // System.out.println( surfacing.INFER_SPECIES_TREES_OPTION
2578 // + ": to infer NJ species trees based on shared domains/binary domain combinations" );
2580 .println( surfacing.INPUT_SPECIES_TREE_OPTION
2581 + "=<treefiles in phyloXML format, separated by #>: to infer domain/binary domain combination gains/losses on given species trees" );
2582 System.out.println( surfacing.FILTER_POSITIVE_OPTION
2583 + "=<file>: to filter out proteins not containing at least one domain listed in <file>" );
2584 System.out.println( surfacing.FILTER_NEGATIVE_OPTION
2585 + "=<file>: to filter out proteins containing at least one domain listed in <file>" );
2586 System.out.println( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION
2587 + "=<file>: to filter out (ignore) domains listed in <file>" );
2588 System.out.println( surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>: to read input files from <file>" );
2590 .println( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION
2591 + "=<seed>: seed for random number generator for Fitch Parsimony analysis (type: long, default: no randomization - given a choice, prefer absence" );
2592 System.out.println( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS
2593 + ": to consider directedness in binary combinations: e.g. A-B != B-A" );
2594 System.out.println( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY
2595 + ": to consider directedness and adjacency in binary combinations" );
2597 .println( surfacing.SEQ_EXTRACT_OPTION
2598 + "=<domain ids (Pfam names)>: to extract sequence names of sequences containing matching domains and/or domain-sequences (order N to C) (domain separator: '~', domain sequences speparator: '#', e.g. 'NACHT#BIR~CARD')" );
2599 System.out.println( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE
2600 + "=<file>: to perfom parsimony analysis on secondary features" );
2601 System.out.println( surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>: to presence/absence genome analysis" );
2602 System.out.println( surfacing.DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS
2603 + ": to output binary domain combinations for (downstream) graph analysis" );
2604 System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS + ": to output all proteins per domain" );
2605 System.out.println();
2606 System.out.println();
2607 System.out.println( "Example: java -Xms128m -Xmx512m -cp path/to/forester.jar"
2608 + " org.forester.application.surfacing -detail=punctilious -o=TEST.html -pwc=TEST"
2609 + " -cos=Pfam_ls_22_TC2 -p2g=pfam2go -obo=gene_ontology_edit.obo "
2610 + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo "
2611 + "-ds_output=detailed_html -scoring=domains -sort=alpha -" + JACKNIFE_OPTION
2612 + "=50 human mouse brafl strpu" );
2613 System.out.println();
2616 private static void processFilter( final File filter_file, final SortedSet<DomainId> filter ) {
2617 SortedSet<String> filter_str = null;
2619 filter_str = ForesterUtil.file2set( filter_file );
2621 catch ( final IOException e ) {
2622 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2624 if ( filter_str != null ) {
2625 for( final String string : filter_str ) {
2626 filter.add( new DomainId( string ) );
2630 System.out.println( "Filter:" );
2631 for( final DomainId domainId : filter ) {
2632 System.out.println( domainId.getId() );
2637 private static String[][] processInputFileNames( final String[] names ) {
2638 final String[][] input_file_properties = new String[ names.length ][];
2639 for( int i = 0; i < names.length; ++i ) {
2640 if ( names[ i ].indexOf( SEPARATOR_FOR_INPUT_VALUES ) < 0 ) {
2641 input_file_properties[ i ] = new String[ 2 ];
2642 input_file_properties[ i ][ 0 ] = names[ i ];
2643 input_file_properties[ i ][ 1 ] = names[ i ];
2646 input_file_properties[ i ] = names[ i ].split( surfacing.SEPARATOR_FOR_INPUT_VALUES + "" );
2647 if ( input_file_properties[ i ].length != 3 ) {
2649 .fatalError( surfacing.PRG_NAME,
2650 "properties for the input files (hmmpfam output) are expected "
2651 + "to be in the following format \"<hmmpfam output file>#<species>\" (or just one word, which is both the filename and the species id), instead received \""
2652 + names[ i ] + "\"" );
2655 final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
2656 if ( !ForesterUtil.isEmpty( error ) ) {
2657 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
2660 return input_file_properties;
2663 private static void processPlusMinusAnalysisOption( final CommandLineArguments cla,
2664 final List<String> high_copy_base,
2665 final List<String> high_copy_target,
2666 final List<String> low_copy,
2667 final List<Object> numbers ) {
2668 if ( cla.isOptionSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
2669 if ( !cla.isOptionValueSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
2670 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for 'plus-minus' file: -"
2671 + surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>" );
2673 final File plus_minus_file = new File( cla.getOptionValue( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) );
2674 final String msg = ForesterUtil.isReadableFile( plus_minus_file );
2675 if ( !ForesterUtil.isEmpty( msg ) ) {
2676 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + plus_minus_file + "\": " + msg );
2678 processPlusMinusFile( plus_minus_file, high_copy_base, high_copy_target, low_copy, numbers );
2682 // First numbers is minimal difference, second is factor.
2683 private static void processPlusMinusFile( final File plus_minus_file,
2684 final List<String> high_copy_base,
2685 final List<String> high_copy_target,
2686 final List<String> low_copy,
2687 final List<Object> numbers ) {
2688 Set<String> species_set = null;
2689 int min_diff = PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT;
2690 double factor = PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT;
2692 species_set = ForesterUtil.file2set( plus_minus_file );
2694 catch ( final IOException e ) {
2695 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2697 if ( species_set != null ) {
2698 for( final String species : species_set ) {
2699 final String species_trimmed = species.substring( 1 );
2700 if ( species.startsWith( "+" ) ) {
2701 if ( low_copy.contains( species_trimmed ) ) {
2702 ForesterUtil.fatalError( surfacing.PRG_NAME,
2703 "species/genome names can not appear with both '+' and '-' suffix, as appears the case for: \""
2704 + species_trimmed + "\"" );
2706 high_copy_base.add( species_trimmed );
2708 else if ( species.startsWith( "*" ) ) {
2709 if ( low_copy.contains( species_trimmed ) ) {
2710 ForesterUtil.fatalError( surfacing.PRG_NAME,
2711 "species/genome names can not appear with both '*' and '-' suffix, as appears the case for: \""
2712 + species_trimmed + "\"" );
2714 high_copy_target.add( species_trimmed );
2716 else if ( species.startsWith( "-" ) ) {
2717 if ( high_copy_base.contains( species_trimmed ) || high_copy_target.contains( species_trimmed ) ) {
2718 ForesterUtil.fatalError( surfacing.PRG_NAME,
2719 "species/genome names can not appear with both '+' or '*' and '-' suffix, as appears the case for: \""
2720 + species_trimmed + "\"" );
2722 low_copy.add( species_trimmed );
2724 else if ( species.startsWith( "$D" ) ) {
2726 min_diff = Integer.parseInt( species.substring( 3 ) );
2728 catch ( final NumberFormatException e ) {
2729 ForesterUtil.fatalError( surfacing.PRG_NAME,
2730 "could not parse integer value for minimal difference from: \""
2731 + species.substring( 3 ) + "\"" );
2734 else if ( species.startsWith( "$F" ) ) {
2736 factor = Double.parseDouble( species.substring( 3 ) );
2738 catch ( final NumberFormatException e ) {
2739 ForesterUtil.fatalError( surfacing.PRG_NAME, "could not parse double value for factor from: \""
2740 + species.substring( 3 ) + "\"" );
2743 else if ( species.startsWith( "#" ) ) {
2748 .fatalError( surfacing.PRG_NAME,
2749 "species/genome names in 'plus minus' file must begin with '*' (high copy target genome), '+' (high copy base genomes), '-' (low copy genomes), '$D=<integer>' minimal Difference (default is 1), '$F=<double>' factor (default is 1.0), double), or '#' (ignore) suffix, encountered: \""
2752 numbers.add( new Integer( min_diff + "" ) );
2753 numbers.add( new Double( factor + "" ) );
2757 ForesterUtil.fatalError( surfacing.PRG_NAME, "'plus minus' file [" + plus_minus_file + "] appears empty" );
2761 private static void writePresentToNexus( final File output_file,
2762 final File positive_filter_file,
2763 final SortedSet<DomainId> filter,
2764 final List<GenomeWideCombinableDomains> gwcd_list ) {
2767 .writeMatrixToFile( DomainParsimonyCalculator
2768 .createMatrixOfDomainPresenceOrAbsence( gwcd_list, positive_filter_file == null ? null
2769 : filter ), output_file + DOMAINS_PRESENT_NEXUS, Format.NEXUS_BINARY );
2770 SurfacingUtil.writeMatrixToFile( DomainParsimonyCalculator
2771 .createMatrixOfBinaryDomainCombinationPresenceOrAbsence( gwcd_list ), output_file
2772 + BDC_PRESENT_NEXUS, Format.NEXUS_BINARY );
2774 catch ( final Exception e ) {
2775 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2779 private static void writeProteinListsForAllSpecies( final File output_dir,
2780 final SortedMap<Species, List<Protein>> protein_lists_per_species,
2781 final List<GenomeWideCombinableDomains> gwcd_list ) {
2782 final SortedSet<DomainId> all_domains = new TreeSet<DomainId>();
2783 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
2784 all_domains.addAll( gwcd.getAllDomainIds() );
2786 for( final DomainId domain : all_domains ) {
2787 final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain + SEQ_EXTRACT_SUFFIX );
2788 SurfacingUtil.checkForOutputFileWriteability( out );
2790 final Writer proteins_file_writer = new BufferedWriter( new FileWriter( out ) );
2791 SurfacingUtil.extractProteinNames( protein_lists_per_species, domain, proteins_file_writer, "\t" );
2792 proteins_file_writer.close();
2794 catch ( final IOException e ) {
2795 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2797 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote proteins list to \"" + out + "\"" );