3 // FORESTER -- software libraries and applications
4 // for evolutionary biology research and applications.
6 // Copyright (C) 2008-2009 Christian M. Zmasek
7 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
10 // This library is free software; you can redistribute it and/or
11 // modify it under the terms of the GNU Lesser General Public
12 // License as published by the Free Software Foundation; either
13 // version 2.1 of the License, or (at your option) any later version.
15 // This library is distributed in the hope that it will be useful,
16 // but WITHOUT ANY WARRANTY; without even the implied warranty of
17 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 // Lesser General Public License for more details.
20 // You should have received a copy of the GNU Lesser General Public
21 // License along with this library; if not, write to the Free Software
22 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
24 // Contact: phylosoft @ gmail . com
25 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
27 package org.forester.application;
29 import java.io.BufferedWriter;
31 import java.io.FileWriter;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.util.ArrayList;
35 import java.util.Date;
36 import java.util.HashMap;
37 import java.util.HashSet;
38 import java.util.Iterator;
39 import java.util.List;
41 import java.util.Map.Entry;
43 import java.util.SortedMap;
44 import java.util.SortedSet;
45 import java.util.TreeMap;
46 import java.util.TreeSet;
48 import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
49 import org.forester.go.GoId;
50 import org.forester.go.GoNameSpace;
51 import org.forester.go.GoTerm;
52 import org.forester.go.GoUtils;
53 import org.forester.go.OBOparser;
54 import org.forester.go.PfamToGoMapping;
55 import org.forester.go.PfamToGoParser;
56 import org.forester.io.parsers.HmmscanPerDomainTableParser;
57 import org.forester.io.parsers.HmmscanPerDomainTableParser.INDIVIDUAL_SCORE_CUTOFF;
58 import org.forester.io.parsers.util.ParserUtils;
59 import org.forester.phylogeny.Phylogeny;
60 import org.forester.phylogeny.PhylogenyMethods;
61 import org.forester.phylogeny.PhylogenyNode;
62 import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
63 import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
64 import org.forester.protein.BasicProtein;
65 import org.forester.protein.BinaryDomainCombination;
66 import org.forester.protein.Domain;
67 import org.forester.protein.DomainId;
68 import org.forester.protein.Protein;
69 import org.forester.species.BasicSpecies;
70 import org.forester.species.Species;
71 import org.forester.surfacing.BasicDomainSimilarityCalculator;
72 import org.forester.surfacing.BasicGenomeWideCombinableDomains;
73 import org.forester.surfacing.CombinationsBasedPairwiseDomainSimilarityCalculator;
74 import org.forester.surfacing.DomainCountsBasedPairwiseSimilarityCalculator;
75 import org.forester.surfacing.DomainCountsDifferenceUtil;
76 import org.forester.surfacing.DomainLengthsTable;
77 import org.forester.surfacing.DomainParsimonyCalculator;
78 import org.forester.surfacing.DomainSimilarity;
79 import org.forester.surfacing.DomainSimilarity.DomainSimilarityScoring;
80 import org.forester.surfacing.DomainSimilarity.DomainSimilaritySortField;
81 import org.forester.surfacing.DomainSimilarityCalculator;
82 import org.forester.surfacing.DomainSimilarityCalculator.Detailedness;
83 import org.forester.surfacing.GenomeWideCombinableDomains;
84 import org.forester.surfacing.GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder;
85 import org.forester.surfacing.MappingResults;
86 import org.forester.surfacing.PairwiseDomainSimilarityCalculator;
87 import org.forester.surfacing.PairwiseGenomeComparator;
88 import org.forester.surfacing.PrintableDomainSimilarity;
89 import org.forester.surfacing.PrintableDomainSimilarity.PRINT_OPTION;
90 import org.forester.surfacing.ProteinCountsBasedPairwiseDomainSimilarityCalculator;
91 import org.forester.surfacing.SurfacingUtil;
92 import org.forester.util.BasicDescriptiveStatistics;
93 import org.forester.util.BasicTable;
94 import org.forester.util.BasicTableParser;
95 import org.forester.util.CommandLineArguments;
96 import org.forester.util.DescriptiveStatistics;
97 import org.forester.util.ForesterConstants;
98 import org.forester.util.ForesterUtil;
100 public class surfacing {
102 private static final int MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING = 1000;
103 public final static String DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS = "graph_analysis_out";
104 public final static String DOMAIN_COMBINITONS_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS = "_dc.dot";
105 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_BC_OUTPUTFILE_SUFFIX_FOR_GRAPH_ANALYSIS = "_fitch_present_dc.dot";
106 public final static String DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX = ".dcc";
108 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_DOMAINS = "_dollo_gl_d";
109 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_BINARY_COMBINATIONS = "_dollo_gl_dc";
110 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_DOMAINS = "_fitch_gl_d";
111 public final static String PARSIMONY_OUTPUT_GL_SUFFIX_FITCH_BINARY_COMBINATIONS = "_fitch_gl_dc";
113 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_DOMAINS = "_dollo_glc_d";
114 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_BINARY_COMBINATIONS = "_dollo_glc_dc";
115 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_DOMAINS = "_fitch_glc_d";
116 public final static String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_FITCH_BINARY_COMBINATIONS = "_fitch_glc_dc";
118 public final static String PARSIMONY_OUTPUT_FITCH_GAINS_BC = "_fitch_gains_dc";
119 public final static String PARSIMONY_OUTPUT_FITCH_GAINS_HTML_BC = "_fitch_gains_dc.html";
120 public final static String PARSIMONY_OUTPUT_FITCH_LOSSES_BC = "_fitch_losses_dc";
121 public final static String PARSIMONY_OUTPUT_FITCH_LOSSES_HTML_BC = "_fitch_losses_dc.html";
122 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_BC = "_fitch_present_dc";
123 public final static String PARSIMONY_OUTPUT_FITCH_PRESENT_HTML_BC = "_fitch_present_dc.html";
124 public final static String PARSIMONY_OUTPUT_DOLLO_GAINS_D = "_dollo_gains_d";
125 public final static String PARSIMONY_OUTPUT_DOLLO_GAINS_HTML_D = "_dollo_gains_d.html";
126 public final static String PARSIMONY_OUTPUT_DOLLO_LOSSES_D = "_dollo_losses_d";
127 public final static String PARSIMONY_OUTPUT_DOLLO_LOSSES_HTML_D = "_dollo_losses_d.html";
128 public final static String PARSIMONY_OUTPUT_DOLLO_PRESENT_D = "_dollo_present_d";
129 public final static String PARSIMONY_OUTPUT_DOLLO_PRESENT_HTML_D = "_dollo_present_d.html";
130 public final static String DOMAINS_PRESENT_NEXUS = "_dom.nex";
131 public final static String BDC_PRESENT_NEXUS = "_dc.nex";
133 public final static String PRG_NAME = "surfacing";
134 public static final String DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_d_dollo"
135 + ForesterConstants.PHYLO_XML_SUFFIX;
136 public static final String DOMAINS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH = "_d_fitch"
137 + ForesterConstants.PHYLO_XML_SUFFIX;
138 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_dc_dollo"
139 + ForesterConstants.PHYLO_XML_SUFFIX;
140 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH = "_dc_fitch"
141 + ForesterConstants.PHYLO_XML_SUFFIX;
142 public static final String NEXUS_EXTERNAL_DOMAINS = "_dom.nex";
143 public static final String NEXUS_EXTERNAL_DOMAIN_COMBINATIONS = "_dc.nex";
144 public static final String NEXUS_SECONDARY_FEATURES = "_secondary_features.nex";
145 public static final String PARSIMONY_OUTPUT_GL_SUFFIX_DOLLO_SECONDARY_FEATURES = "_dollo_gl_secondary_features";
146 public static final String PARSIMONY_OUTPUT_GL_COUNTS_SUFFIX_DOLLO_SECONDARY_FEATURES = "_dollo_glc_secondary_features";
147 public static final String PARSIMONY_OUTPUT_DOLLO_GAINS_SECONDARY_FEATURES = "_dollo_gains_secondary_features";
148 public static final String PARSIMONY_OUTPUT_DOLLO_LOSSES_SECONDARY_FEATURES = "_dollo_losses_secondary_features";
149 public static final String PARSIMONY_OUTPUT_DOLLO_PRESENT_SECONDARY_FEATURES = "_dollo_present_secondary_features";
150 public static final String SECONDARY_FEATURES_PARSIMONY_TREE_OUTPUT_SUFFIX_DOLLO = "_secondary_features_dollo"
151 + ForesterConstants.PHYLO_XML_SUFFIX;
152 public static final String PARSIMONY_OUTPUT_DOLLO_ALL_GOID_D_ALL_NAMESPACES = "_dollo_goid_d";
153 public static final String PARSIMONY_OUTPUT_FITCH_ALL_GOID_BC_ALL_NAMESPACES = "_fitch_goid_dc";
154 final static private String HELP_OPTION_1 = "help";
155 final static private String HELP_OPTION_2 = "h";
156 final static private String OUTPUT_DIR_OPTION = "out_dir";
157 final static private String SCORING_OPTION = "scoring";
158 private static final DomainSimilarityScoring SCORING_DEFAULT = DomainSimilarity.DomainSimilarityScoring.COMBINATIONS;
159 final static private String SCORING_DOMAIN_COUNT_BASED = "domains";
160 final static private String SCORING_PROTEIN_COUNT_BASED = "proteins";
161 final static private String SCORING_COMBINATION_BASED = "combinations";
162 final static private String DETAILEDNESS_OPTION = "detail";
163 private final static Detailedness DETAILEDNESS_DEFAULT = DomainSimilarityCalculator.Detailedness.PUNCTILIOUS;
164 final static private String SPECIES_MATRIX_OPTION = "smatrix";
165 final static private String DETAILEDNESS_BASIC = "basic";
166 final static private String DETAILEDNESS_LIST_IDS = "list_ids";
167 final static private String DETAILEDNESS_PUNCTILIOUS = "punctilious";
168 final static private String DOMAIN_SIMILARITY_SORT_OPTION = "sort";
169 private static final DomainSimilaritySortField DOMAIN_SORT_FILD_DEFAULT = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
170 final static private String DOMAIN_SIMILARITY_SORT_MIN = "min";
171 final static private String DOMAIN_SIMILARITY_SORT_MAX = "max";
172 final static private String DOMAIN_SIMILARITY_SORT_SD = "sd";
173 final static private String DOMAIN_SIMILARITY_SORT_MEAN = "mean";
174 final static private String DOMAIN_SIMILARITY_SORT_DIFF = "diff";
175 final static private String DOMAIN_SIMILARITY_SORT_COUNTS_DIFF = "count_diff";
176 final static private String DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF = "abs_count_diff";
177 final static private String DOMAIN_SIMILARITY_SORT_SPECIES_COUNT = "species";
178 final static private String DOMAIN_SIMILARITY_SORT_ALPHA = "alpha";
179 final static private String DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION = "species_first";
180 final static private String DOMAIN_COUNT_SORT_OPTION = "dc_sort";
181 private static final GenomeWideCombinableDomainsSortOrder DOMAINS_SORT_ORDER_DEFAULT = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.ALPHABETICAL_KEY_ID;
182 final static private String DOMAIN_COUNT_SORT_ALPHA = "alpha";
183 final static private String DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT = "dom";
184 final static private String DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT = "prot";
185 final static private String DOMAIN_COUNT_SORT_COMBINATIONS_COUNT = "comb";
186 final static private String CUTOFF_SCORE_FILE_OPTION = "cos";
187 final static private String NOT_IGNORE_DUFS_OPTION = "dufs";
188 final static private String MAX_E_VALUE_OPTION = "e";
189 final static private String MAX_ALLOWED_OVERLAP_OPTION = "mo";
190 final static private String NO_ENGULFING_OVERLAP_OPTION = "no_eo";
191 final static private String IGNORE_COMBINATION_WITH_SAME_OPTION = "ignore_self_comb";
192 final static private String PAIRWISE_DOMAIN_COMPARISONS_PREFIX = "pwc_";
193 final static private String PAIRWISE_DOMAIN_COMPARISONS_OPTION = "pwc";
194 final static private String OUTPUT_FILE_OPTION = "o";
195 final static private String PFAM_TO_GO_FILE_USE_OPTION = "p2g";
196 final static private String GO_OBO_FILE_USE_OPTION = "obo";
197 final static private String GO_NAMESPACE_LIMIT_OPTION = "go_namespace";
198 final static private String GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION = "molecular_function";
199 final static private String GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS = "biological_process";
200 final static private String GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT = "cellular_component";
201 final static private String SECONDARY_FEATURES_PARSIMONY_MAP_FILE = "secondary";
202 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED = "simple_tab";
203 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML = "simple_html";
204 final static private String DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML = "detailed_html";
205 final static private String DOMAIN_SIMILARITY_PRINT_OPTION = "ds_output";
206 private static final PRINT_OPTION DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT = PrintableDomainSimilarity.PRINT_OPTION.HTML;
207 final static private String IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION = "ignore_singlet_domains";
208 final static private String IGNORE_VIRAL_IDS = "ignore_viral_ids";
209 final static private boolean IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_DEFAULT = false;
210 final static private String IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION = "ignore_species_specific_domains";
211 final static private boolean IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION_DEFAULT = false;
212 final static private String MATRIX_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX = "_mean_score.pwd";
213 final static private String MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX = "_domains.pwd";
214 final static private String MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX = "_bin_combinations.pwd";
215 final static private String NJ_TREE_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX = "_mean_score_NJ"
216 + ForesterConstants.PHYLO_XML_SUFFIX;
217 final static private String NJ_TREE_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX = "_domains_NJ"
218 + ForesterConstants.PHYLO_XML_SUFFIX;
219 final static private String NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX = "_bin_combinations_NJ"
220 + ForesterConstants.PHYLO_XML_SUFFIX;
221 final static private String JACKNIFE_OPTION = "jack";
222 final static private String JACKNIFE_RANDOM_SEED_OPTION = "seed";
223 final static private String JACKNIFE_RATIO_OPTION = "jack_ratio";
224 private static final int JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT = 100;
225 final static private long JACKNIFE_RANDOM_SEED_DEFAULT = 19;
226 final static private double JACKNIFE_RATIO_DEFAULT = 0.5;
227 //final static private String INFER_SPECIES_TREES_OPTION = "species_tree_inference";
228 final static private String FILTER_POSITIVE_OPTION = "pos_filter";
229 final static private String FILTER_NEGATIVE_OPTION = "neg_filter";
230 final static private String FILTER_NEGATIVE_DOMAINS_OPTION = "neg_dom_filter";
231 final static private String INPUT_FILES_FROM_FILE_OPTION = "input";
232 final static private String INPUT_SPECIES_TREE_OPTION = "species_tree";
233 final static private String SEQ_EXTRACT_OPTION = "prot_extract";
234 final static private char SEPARATOR_FOR_INPUT_VALUES = '#';
235 final static private String PRG_VERSION = "2.252";
236 final static private String PRG_DATE = "2012.08.01";
237 final static private String E_MAIL = "czmasek@burnham.org";
238 final static private String WWW = "www.phylosoft.org/forester/applications/surfacing";
239 final static private boolean IGNORE_DUFS_DEFAULT = true;
240 final static private boolean IGNORE_COMBINATION_WITH_SAME_DEFAULLT = false;
241 final static private double MAX_E_VALUE_DEFAULT = -1;
242 final static private int MAX_ALLOWED_OVERLAP_DEFAULT = -1;
243 private static final String RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION = "random_seed";
244 private static final String CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS = "consider_bdc_direction";
245 private static final String CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY = "consider_bdc_adj";
246 private static final String SEQ_EXTRACT_SUFFIX = ".prot";
247 private static final String PLUS_MINUS_ANALYSIS_OPTION = "plus_minus";
248 private static final String PLUS_MINUS_DOM_SUFFIX = "_plus_minus_dom.txt";
249 private static final String PLUS_MINUS_DOM_SUFFIX_HTML = "_plus_minus_dom.html";
250 private static final String PLUS_MINUS_DC_SUFFIX_HTML = "_plus_minus_dc.html";
251 private static final int PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT = 0;
252 private static final double PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT = 1.0;
253 private static final String PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_all.txt";
254 private static final String PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_passing.txt";
255 private static final String OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS = "all_prot";
256 final static private String OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION = "all_prot_e";
257 private static final boolean VERBOSE = false;
258 private static final String OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_gains_counts";
259 private static final String OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_losses_counts";
260 private static final String DOMAIN_LENGTHS_ANALYSIS_SUFFIX = "_domain_lengths_analysis";
261 private static final boolean PERFORM_DOMAIN_LENGTH_ANALYSIS = true;
262 public static final String ALL_PFAMS_ENCOUNTERED_SUFFIX = "_all_encountered_pfams";
263 public static final String ALL_PFAMS_ENCOUNTERED_WITH_GO_ANNOTATION_SUFFIX = "_all_encountered_pfams_with_go_annotation";
264 public static final String ENCOUNTERED_PFAMS_SUMMARY_SUFFIX = "_encountered_pfams_summary";
265 public static final String ALL_PFAMS_GAINED_AS_DOMAINS_SUFFIX = "_all_pfams_gained_as_domains";
266 public static final String ALL_PFAMS_LOST_AS_DOMAINS_SUFFIX = "_all_pfams_lost_as_domains";
267 public static final String ALL_PFAMS_GAINED_AS_DC_SUFFIX = "_all_pfams_gained_as_dc";
268 public static final String ALL_PFAMS_LOST_AS_DC_SUFFIX = "_all_pfams_lost_as_dc";
269 public static final String BASE_DIRECTORY_PER_NODE_DOMAIN_GAIN_LOSS_FILES = "PER_NODE_EVENTS";
270 public static final String BASE_DIRECTORY_PER_SUBTREE_DOMAIN_GAIN_LOSS_FILES = "PER_SUBTREE_EVENTS";
271 public static final String D_PROMISCUITY_FILE_SUFFIX = "_domain_promiscuities";
272 private static final String LOG_FILE_SUFFIX = "_log.txt";
273 private static final String DATA_FILE_SUFFIX = "_domain_combination_data.txt";
274 private static final String DATA_FILE_DESC = "#SPECIES\tPRTEIN_ID\tN_TERM_DOMAIN\tC_TERM_DOMAIN\tN_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tC_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tN_TERM_DOMAIN_COUNTS_PER_PROTEIN\tC_TERM_DOMAIN_COUNTS_PER_PROTEIN";
275 private static final INDIVIDUAL_SCORE_CUTOFF INDIVIDUAL_SCORE_CUTOFF_DEFAULT = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE; //TODO look at me! change?
276 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_counts.txt";
277 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists.txt";
278 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping.txt";
279 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique.txt";
280 public static final String LIMIT_SPEC_FOR_PROT_EX = null; // e.g. "HUMAN"; set to null for not using this feature (default).
281 public static final String BINARY_DOMAIN_COMBINATIONS_PARSIMONY_TREE_OUTPUT_SUFFIX_FITCH_MAPPED = "_dc_MAPPED_secondary_features_fitch"
282 + ForesterConstants.PHYLO_XML_SUFFIX;
283 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_counts_MAPPED.txt";
284 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_MAPPED.txt";
285 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_MAPPED.txt";
286 public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique_MAPPED.txt";
287 private static final boolean PERFORM_DC_REGAIN_PROTEINS_STATS = true;
288 private static final boolean DA_ANALYSIS = true;
290 private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
291 final String[][] input_file_properties,
292 final String automated_pairwise_comparison_suffix,
293 final File outdir ) {
294 for( int i = 0; i < input_file_properties.length; ++i ) {
295 for( int j = 0; j < i; ++j ) {
296 final String species_i = input_file_properties[ i ][ 1 ];
297 final String species_j = input_file_properties[ j ][ 1 ];
298 String pairwise_similarities_output_file_str = PAIRWISE_DOMAIN_COMPARISONS_PREFIX + species_i + "_"
299 + species_j + automated_pairwise_comparison_suffix;
300 switch ( domain_similarity_print_option ) {
302 if ( !pairwise_similarities_output_file_str.endsWith( ".html" ) ) {
303 pairwise_similarities_output_file_str += ".html";
307 final String error = ForesterUtil
308 .isWritableFile( new File( outdir == null ? pairwise_similarities_output_file_str : outdir
309 + ForesterUtil.FILE_SEPARATOR + pairwise_similarities_output_file_str ) );
310 if ( !ForesterUtil.isEmpty( error ) ) {
311 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
317 private static StringBuilder createParametersAsString( final boolean ignore_dufs,
318 final double e_value_max,
319 final int max_allowed_overlap,
320 final boolean no_engulfing_overlaps,
321 final File cutoff_scores_file,
322 final BinaryDomainCombination.DomainCombinationType dc_type ) {
323 final StringBuilder parameters_sb = new StringBuilder();
324 parameters_sb.append( "E-value: " + e_value_max );
325 if ( cutoff_scores_file != null ) {
326 parameters_sb.append( ", Cutoff-scores-file: " + cutoff_scores_file );
329 parameters_sb.append( ", Cutoff-scores-file: not-set" );
331 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
332 parameters_sb.append( ", Max-overlap: " + max_allowed_overlap );
335 parameters_sb.append( ", Max-overlap: not-set" );
337 if ( no_engulfing_overlaps ) {
338 parameters_sb.append( ", Engulfing-overlaps: not-allowed" );
341 parameters_sb.append( ", Engulfing-overlaps: allowed" );
344 parameters_sb.append( ", Ignore-dufs: true" );
347 parameters_sb.append( ", Ignore-dufs: false" );
349 parameters_sb.append( ", DC type (if applicable): " + dc_type );
350 return parameters_sb;
354 * Warning: This side-effects 'all_bin_domain_combinations_encountered'!
358 * @param all_bin_domain_combinations_changed
359 * @param sum_of_all_domains_encountered
360 * @param all_bin_domain_combinations_encountered
361 * @param is_gains_analysis
362 * @param protein_length_stats_by_dc
363 * @throws IOException
365 private static void executeFitchGainsAnalysis( final File output_file,
366 final List<BinaryDomainCombination> all_bin_domain_combinations_changed,
367 final int sum_of_all_domains_encountered,
368 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered,
369 final boolean is_gains_analysis ) throws IOException {
370 SurfacingUtil.checkForOutputFileWriteability( output_file );
371 final Writer out = ForesterUtil.createBufferedWriter( output_file );
372 final SortedMap<Object, Integer> bdc_to_counts = ForesterUtil
373 .listToSortedCountsMap( all_bin_domain_combinations_changed );
374 final SortedSet<DomainId> all_domains_in_combination_changed_more_than_once = new TreeSet<DomainId>();
375 final SortedSet<DomainId> all_domains_in_combination_changed_only_once = new TreeSet<DomainId>();
378 for( final Object bdc_object : bdc_to_counts.keySet() ) {
379 final BinaryDomainCombination bdc = ( BinaryDomainCombination ) bdc_object;
380 final int count = bdc_to_counts.get( bdc_object );
382 ForesterUtil.unexpectedFatalError( PRG_NAME, "count < 1 " );
384 out.write( bdc + "\t" + count + ForesterUtil.LINE_SEPARATOR );
386 all_domains_in_combination_changed_more_than_once.add( bdc.getId0() );
387 all_domains_in_combination_changed_more_than_once.add( bdc.getId1() );
390 else if ( count == 1 ) {
391 all_domains_in_combination_changed_only_once.add( bdc.getId0() );
392 all_domains_in_combination_changed_only_once.add( bdc.getId1() );
396 final int all = all_bin_domain_combinations_encountered.size();
398 if ( !is_gains_analysis ) {
399 all_bin_domain_combinations_encountered.removeAll( all_bin_domain_combinations_changed );
400 never_lost = all_bin_domain_combinations_encountered.size();
401 for( final BinaryDomainCombination bdc : all_bin_domain_combinations_encountered ) {
402 out.write( bdc + "\t" + "0" + ForesterUtil.LINE_SEPARATOR );
405 if ( is_gains_analysis ) {
406 out.write( "Sum of all distinct domain combinations appearing once : " + one
407 + ForesterUtil.LINE_SEPARATOR );
408 out.write( "Sum of all distinct domain combinations appearing more than once : " + above_one
409 + ForesterUtil.LINE_SEPARATOR );
410 out.write( "Sum of all distinct domains in combinations apppearing only once : "
411 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
412 out.write( "Sum of all distinct domains in combinations apppearing more than once: "
413 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
416 out.write( "Sum of all distinct domain combinations never lost : " + never_lost
417 + ForesterUtil.LINE_SEPARATOR );
418 out.write( "Sum of all distinct domain combinations lost once : " + one
419 + ForesterUtil.LINE_SEPARATOR );
420 out.write( "Sum of all distinct domain combinations lost more than once : " + above_one
421 + ForesterUtil.LINE_SEPARATOR );
422 out.write( "Sum of all distinct domains in combinations lost only once : "
423 + all_domains_in_combination_changed_only_once.size() + ForesterUtil.LINE_SEPARATOR );
424 out.write( "Sum of all distinct domains in combinations lost more than once: "
425 + all_domains_in_combination_changed_more_than_once.size() + ForesterUtil.LINE_SEPARATOR );
427 out.write( "All binary combinations : " + all
428 + ForesterUtil.LINE_SEPARATOR );
429 out.write( "All domains : "
430 + sum_of_all_domains_encountered );
432 ForesterUtil.programMessage( surfacing.PRG_NAME,
433 "Wrote fitch domain combination dynamics counts analysis to \"" + output_file
437 private static void executePlusMinusAnalysis( final File output_file,
438 final List<String> plus_minus_analysis_high_copy_base,
439 final List<String> plus_minus_analysis_high_copy_target,
440 final List<String> plus_minus_analysis_low_copy,
441 final List<GenomeWideCombinableDomains> gwcd_list,
442 final SortedMap<Species, List<Protein>> protein_lists_per_species,
443 final Map<DomainId, List<GoId>> domain_id_to_go_ids_map,
444 final Map<GoId, GoTerm> go_id_to_term_map,
445 final List<Object> plus_minus_analysis_numbers ) {
446 final Set<String> all_spec = new HashSet<String>();
447 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
448 all_spec.add( gwcd.getSpecies().getSpeciesId() );
450 final File html_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX_HTML );
451 final File plain_out_dom = new File( output_file + PLUS_MINUS_DOM_SUFFIX );
452 final File html_out_dc = new File( output_file + PLUS_MINUS_DC_SUFFIX_HTML );
453 final File all_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX );
454 final File passing_domains_go_ids_out_dom = new File( output_file + PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX );
455 final File proteins_file_base = new File( output_file + "" );
456 final int min_diff = ( ( Integer ) plus_minus_analysis_numbers.get( 0 ) ).intValue();
457 final double factor = ( ( Double ) plus_minus_analysis_numbers.get( 1 ) ).doubleValue();
459 DomainCountsDifferenceUtil.calculateCopyNumberDifferences( gwcd_list,
460 protein_lists_per_species,
461 plus_minus_analysis_high_copy_base,
462 plus_minus_analysis_high_copy_target,
463 plus_minus_analysis_low_copy,
469 domain_id_to_go_ids_map,
471 all_domains_go_ids_out_dom,
472 passing_domains_go_ids_out_dom,
473 proteins_file_base );
475 catch ( final IOException e ) {
476 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
478 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
479 + html_out_dom + "\"" );
480 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \""
481 + plain_out_dom + "\"" );
482 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis results to \"" + html_out_dc
484 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based passing GO ids to \""
485 + passing_domains_go_ids_out_dom + "\"" );
486 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote plus minus domain analysis based all GO ids to \""
487 + all_domains_go_ids_out_dom + "\"" );
490 private static Phylogeny[] getIntrees( final File[] intree_files,
491 final int number_of_genomes,
492 final String[][] input_file_properties ) {
493 final Phylogeny[] intrees = new Phylogeny[ intree_files.length ];
495 for( final File intree_file : intree_files ) {
496 Phylogeny intree = null;
497 final String error = ForesterUtil.isReadableFile( intree_file );
498 if ( !ForesterUtil.isEmpty( error ) ) {
499 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read input tree file [" + intree_file + "]: "
503 final Phylogeny[] p_array = ParserBasedPhylogenyFactory.getInstance()
504 .create( intree_file, ParserUtils.createParserDependingOnFileType( intree_file, true ) );
505 if ( p_array.length < 1 ) {
506 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
507 + "] does not contain any phylogeny in phyloXML format" );
509 else if ( p_array.length > 1 ) {
510 ForesterUtil.fatalError( surfacing.PRG_NAME, "file [" + intree_file
511 + "] contains more than one phylogeny in phyloXML format" );
513 intree = p_array[ 0 ];
515 catch ( final Exception e ) {
516 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read input tree from file [" + intree_file
519 if ( ( intree == null ) || intree.isEmpty() ) {
520 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is empty" );
522 if ( !intree.isRooted() ) {
523 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] is not rooted" );
525 if ( intree.getNumberOfExternalNodes() < number_of_genomes ) {
526 ForesterUtil.fatalError( surfacing.PRG_NAME,
527 "number of external nodes [" + intree.getNumberOfExternalNodes()
528 + "] of input tree [" + intree_file
529 + "] is smaller than the number of genomes the be analyzed ["
530 + number_of_genomes + "]" );
532 final StringBuilder parent_names = new StringBuilder();
533 final int nodes_lacking_name = SurfacingUtil.getNumberOfNodesLackingName( intree, parent_names );
534 if ( nodes_lacking_name > 0 ) {
535 ForesterUtil.fatalError( surfacing.PRG_NAME, "input tree [" + intree_file + "] has "
536 + nodes_lacking_name + " node(s) lacking a name [parent names:" + parent_names + "]" );
538 preparePhylogenyForParsimonyAnalyses( intree, input_file_properties );
539 if ( !intree.isCompletelyBinary() ) {
540 ForesterUtil.printWarningMessage( surfacing.PRG_NAME, "input tree [" + intree_file
541 + "] is not completely binary" );
543 intrees[ i++ ] = intree;
548 private static void log( final String msg, final Writer w ) {
551 w.write( ForesterUtil.LINE_SEPARATOR );
553 catch ( final IOException e ) {
554 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
558 public static void main( final String args[] ) {
559 final long start_time = new Date().getTime();
560 // final StringBuffer log = new StringBuffer();
561 final StringBuilder html_desc = new StringBuilder();
562 ForesterUtil.printProgramInformation( surfacing.PRG_NAME,
563 surfacing.PRG_VERSION,
567 final String nl = ForesterUtil.LINE_SEPARATOR;
568 html_desc.append( "<table>" + nl );
569 html_desc.append( "<tr><td>Produced by:</td><td>" + surfacing.PRG_NAME + "</td></tr>" + nl );
570 html_desc.append( "<tr><td>Version:</td><td>" + surfacing.PRG_VERSION + "</td></tr>" + nl );
571 html_desc.append( "<tr><td>Release Date:</td><td>" + surfacing.PRG_DATE + "</td></tr>" + nl );
572 html_desc.append( "<tr><td>Contact:</td><td>" + surfacing.E_MAIL + "</td></tr>" + nl );
573 html_desc.append( "<tr><td>WWW:</td><td>" + surfacing.WWW + "</td></tr>" + nl );
574 CommandLineArguments cla = null;
576 cla = new CommandLineArguments( args );
578 catch ( final Exception e ) {
579 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
581 if ( cla.isOptionSet( surfacing.HELP_OPTION_1 ) || cla.isOptionSet( surfacing.HELP_OPTION_2 ) ) {
582 surfacing.printHelp();
585 if ( ( args.length < 1 ) ) {
586 surfacing.printHelp();
589 final List<String> allowed_options = new ArrayList<String>();
590 allowed_options.add( surfacing.NOT_IGNORE_DUFS_OPTION );
591 allowed_options.add( surfacing.MAX_E_VALUE_OPTION );
592 allowed_options.add( surfacing.DETAILEDNESS_OPTION );
593 allowed_options.add( surfacing.OUTPUT_FILE_OPTION );
594 allowed_options.add( surfacing.DOMAIN_SIMILARITY_SORT_OPTION );
595 allowed_options.add( surfacing.SPECIES_MATRIX_OPTION );
596 allowed_options.add( surfacing.SCORING_OPTION );
597 allowed_options.add( surfacing.MAX_ALLOWED_OVERLAP_OPTION );
598 allowed_options.add( surfacing.NO_ENGULFING_OVERLAP_OPTION );
599 allowed_options.add( surfacing.DOMAIN_COUNT_SORT_OPTION );
600 allowed_options.add( surfacing.CUTOFF_SCORE_FILE_OPTION );
601 allowed_options.add( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION );
602 allowed_options.add( surfacing.OUTPUT_DIR_OPTION );
603 allowed_options.add( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION );
604 allowed_options.add( surfacing.PFAM_TO_GO_FILE_USE_OPTION );
605 allowed_options.add( surfacing.GO_OBO_FILE_USE_OPTION );
606 allowed_options.add( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION );
607 allowed_options.add( surfacing.GO_NAMESPACE_LIMIT_OPTION );
608 allowed_options.add( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
609 allowed_options.add( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION );
610 allowed_options.add( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS );
611 allowed_options.add( JACKNIFE_OPTION );
612 allowed_options.add( JACKNIFE_RANDOM_SEED_OPTION );
613 allowed_options.add( JACKNIFE_RATIO_OPTION );
614 allowed_options.add( INPUT_SPECIES_TREE_OPTION );
615 //allowed_options.add( INFER_SPECIES_TREES_OPTION );
616 allowed_options.add( FILTER_POSITIVE_OPTION );
617 allowed_options.add( FILTER_NEGATIVE_OPTION );
618 allowed_options.add( INPUT_FILES_FROM_FILE_OPTION );
619 allowed_options.add( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
620 allowed_options.add( FILTER_NEGATIVE_DOMAINS_OPTION );
621 allowed_options.add( IGNORE_VIRAL_IDS );
622 allowed_options.add( SEQ_EXTRACT_OPTION );
623 allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
624 allowed_options.add( SECONDARY_FEATURES_PARSIMONY_MAP_FILE );
625 allowed_options.add( PLUS_MINUS_ANALYSIS_OPTION );
626 allowed_options.add( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS );
627 allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS );
628 allowed_options.add( CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY );
629 boolean ignore_dufs = surfacing.IGNORE_DUFS_DEFAULT;
630 boolean ignore_combination_with_same = surfacing.IGNORE_COMBINATION_WITH_SAME_DEFAULLT;
631 double e_value_max = surfacing.MAX_E_VALUE_DEFAULT;
632 int max_allowed_overlap = surfacing.MAX_ALLOWED_OVERLAP_DEFAULT;
633 final String dissallowed_options = cla.validateAllowedOptionsAsString( allowed_options );
634 if ( dissallowed_options.length() > 0 ) {
635 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown option(s): " + dissallowed_options );
637 boolean output_binary_domain_combinationsfor_graph_analysis = false;
638 if ( cla.isOptionSet( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS ) ) {
639 output_binary_domain_combinationsfor_graph_analysis = true;
641 if ( cla.isOptionSet( surfacing.MAX_E_VALUE_OPTION ) ) {
643 e_value_max = cla.getOptionValueAsDouble( surfacing.MAX_E_VALUE_OPTION );
645 catch ( final Exception e ) {
646 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for E-value maximum" );
649 if ( cla.isOptionSet( surfacing.MAX_ALLOWED_OVERLAP_OPTION ) ) {
651 max_allowed_overlap = cla.getOptionValueAsInt( surfacing.MAX_ALLOWED_OVERLAP_OPTION );
653 catch ( final Exception e ) {
654 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for maximal allowed domain overlap" );
657 boolean no_engulfing_overlaps = false;
658 if ( cla.isOptionSet( surfacing.NO_ENGULFING_OVERLAP_OPTION ) ) {
659 no_engulfing_overlaps = true;
661 boolean ignore_virus_like_ids = false;
662 if ( cla.isOptionSet( surfacing.IGNORE_VIRAL_IDS ) ) {
663 ignore_virus_like_ids = true;
665 if ( cla.isOptionSet( surfacing.NOT_IGNORE_DUFS_OPTION ) ) {
668 if ( cla.isOptionSet( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION ) ) {
669 ignore_combination_with_same = true;
671 boolean ignore_domains_without_combs_in_all_spec = IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_DEFAULT;
672 if ( cla.isOptionSet( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION ) ) {
673 ignore_domains_without_combs_in_all_spec = true;
675 boolean ignore_species_specific_domains = IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION_DEFAULT;
676 if ( cla.isOptionSet( surfacing.IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION ) ) {
677 ignore_species_specific_domains = true;
679 File output_file = null;
680 if ( cla.isOptionSet( surfacing.OUTPUT_FILE_OPTION ) ) {
681 if ( !cla.isOptionValueSet( surfacing.OUTPUT_FILE_OPTION ) ) {
682 ForesterUtil.fatalError( surfacing.PRG_NAME,
683 "no value for domain combinations similarities output file: -"
684 + surfacing.OUTPUT_FILE_OPTION + "=<file>" );
686 output_file = new File( cla.getOptionValue( surfacing.OUTPUT_FILE_OPTION ) );
687 SurfacingUtil.checkForOutputFileWriteability( output_file );
689 File cutoff_scores_file = null;
690 Map<String, Double> individual_score_cutoffs = null;
691 if ( cla.isOptionSet( surfacing.CUTOFF_SCORE_FILE_OPTION ) ) {
692 if ( !cla.isOptionValueSet( surfacing.CUTOFF_SCORE_FILE_OPTION ) ) {
693 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for individual domain score cutoffs file: -"
694 + surfacing.CUTOFF_SCORE_FILE_OPTION + "=<file>" );
696 cutoff_scores_file = new File( cla.getOptionValue( surfacing.CUTOFF_SCORE_FILE_OPTION ) );
697 final String error = ForesterUtil.isReadableFile( cutoff_scores_file );
698 if ( !ForesterUtil.isEmpty( error ) ) {
699 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read individual domain score cutoffs file: "
703 final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, ' ' );
704 individual_score_cutoffs = scores_table.getColumnsAsMapDouble( 0, 1 );
706 catch ( final IOException e ) {
707 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from individual score cutoffs file: " + e );
710 BinaryDomainCombination.DomainCombinationType dc_type = BinaryDomainCombination.DomainCombinationType.BASIC;
711 if ( cla.isOptionSet( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS ) ) {
712 dc_type = BinaryDomainCombination.DomainCombinationType.DIRECTED;
714 if ( cla.isOptionSet( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY ) ) {
715 dc_type = BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT;
718 if ( cla.isOptionSet( surfacing.OUTPUT_DIR_OPTION ) ) {
719 if ( !cla.isOptionValueSet( surfacing.OUTPUT_DIR_OPTION ) ) {
720 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for output directory: -"
721 + surfacing.OUTPUT_DIR_OPTION + "=<dir>" );
723 out_dir = new File( cla.getOptionValue( surfacing.OUTPUT_DIR_OPTION ) );
724 if ( out_dir.exists() && ( out_dir.listFiles().length > 0 ) ) {
725 ForesterUtil.fatalError( surfacing.PRG_NAME, "\"" + out_dir + "\" aready exists and is not empty" );
727 if ( !out_dir.exists() ) {
728 final boolean success = out_dir.mkdir();
729 if ( !success || !out_dir.exists() ) {
730 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to create \"" + out_dir + "\"" );
733 if ( !out_dir.canWrite() ) {
734 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot write to \"" + out_dir + "\"" );
737 File positive_filter_file = null;
738 File negative_filter_file = null;
739 File negative_domains_filter_file = null;
740 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) && cla.isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
741 ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use both negative and positive protein filter" );
743 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION )
744 && ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) || cla
745 .isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) ) {
747 .fatalError( surfacing.PRG_NAME,
748 "attempt to use both negative or positive protein filter together wirh a negative domains filter" );
750 if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_OPTION ) ) {
751 if ( !cla.isOptionValueSet( surfacing.FILTER_NEGATIVE_OPTION ) ) {
752 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for negative filter: -"
753 + surfacing.FILTER_NEGATIVE_OPTION + "=<file>" );
755 negative_filter_file = new File( cla.getOptionValue( surfacing.FILTER_NEGATIVE_OPTION ) );
756 final String msg = ForesterUtil.isReadableFile( negative_filter_file );
757 if ( !ForesterUtil.isEmpty( msg ) ) {
758 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + negative_filter_file + "\": "
762 else if ( cla.isOptionSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
763 if ( !cla.isOptionValueSet( surfacing.FILTER_POSITIVE_OPTION ) ) {
764 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for positive filter: -"
765 + surfacing.FILTER_POSITIVE_OPTION + "=<file>" );
767 positive_filter_file = new File( cla.getOptionValue( surfacing.FILTER_POSITIVE_OPTION ) );
768 final String msg = ForesterUtil.isReadableFile( positive_filter_file );
769 if ( !ForesterUtil.isEmpty( msg ) ) {
770 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + positive_filter_file + "\": "
774 else if ( cla.isOptionSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) ) {
775 if ( !cla.isOptionValueSet( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) ) {
776 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for negative domains filter: -"
777 + surfacing.FILTER_NEGATIVE_DOMAINS_OPTION + "=<file>" );
779 negative_domains_filter_file = new File( cla.getOptionValue( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION ) );
780 final String msg = ForesterUtil.isReadableFile( negative_domains_filter_file );
781 if ( !ForesterUtil.isEmpty( msg ) ) {
782 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + negative_domains_filter_file
786 final List<String> plus_minus_analysis_high_copy_base_species = new ArrayList<String>();
787 final List<String> plus_minus_analysis_high_copy_target_species = new ArrayList<String>();
788 final List<String> plus_minus_analysis_high_low_copy_species = new ArrayList<String>();
789 final List<Object> plus_minus_analysis_numbers = new ArrayList<Object>();
790 processPlusMinusAnalysisOption( cla,
791 plus_minus_analysis_high_copy_base_species,
792 plus_minus_analysis_high_copy_target_species,
793 plus_minus_analysis_high_low_copy_species,
794 plus_minus_analysis_numbers );
795 File input_files_file = null;
796 String[] input_file_names_from_file = null;
797 if ( cla.isOptionSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
798 if ( !cla.isOptionValueSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
799 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input files file: -"
800 + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
802 input_files_file = new File( cla.getOptionValue( surfacing.INPUT_FILES_FROM_FILE_OPTION ) );
803 final String msg = ForesterUtil.isReadableFile( input_files_file );
804 if ( !ForesterUtil.isEmpty( msg ) ) {
805 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + input_files_file + "\": " + msg );
808 input_file_names_from_file = ForesterUtil.file2array( input_files_file );
810 catch ( final IOException e ) {
811 ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read from \"" + input_files_file + "\": " + e );
814 if ( ( cla.getNumberOfNames() < 1 )
815 && ( ( input_file_names_from_file == null ) || ( input_file_names_from_file.length < 1 ) ) ) {
816 ForesterUtil.fatalError( surfacing.PRG_NAME,
817 "No hmmpfam output file indicated is input: use comand line directly or "
818 + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
820 DomainSimilarity.DomainSimilarityScoring scoring = SCORING_DEFAULT;
821 if ( cla.isOptionSet( surfacing.SCORING_OPTION ) ) {
822 if ( !cla.isOptionValueSet( surfacing.SCORING_OPTION ) ) {
823 ForesterUtil.fatalError( surfacing.PRG_NAME,
824 "no value for scoring method for domain combinations similarity calculation: -"
825 + surfacing.SCORING_OPTION + "=<"
826 + surfacing.SCORING_DOMAIN_COUNT_BASED + "|"
827 + surfacing.SCORING_PROTEIN_COUNT_BASED + "|"
828 + surfacing.SCORING_COMBINATION_BASED + ">\"" );
830 final String scoring_str = cla.getOptionValue( surfacing.SCORING_OPTION );
831 if ( scoring_str.equals( surfacing.SCORING_DOMAIN_COUNT_BASED ) ) {
832 scoring = DomainSimilarity.DomainSimilarityScoring.DOMAINS;
834 else if ( scoring_str.equals( surfacing.SCORING_COMBINATION_BASED ) ) {
835 scoring = DomainSimilarity.DomainSimilarityScoring.COMBINATIONS;
837 else if ( scoring_str.equals( surfacing.SCORING_PROTEIN_COUNT_BASED ) ) {
838 scoring = DomainSimilarity.DomainSimilarityScoring.PROTEINS;
841 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + scoring_str
842 + "\" for scoring method for domain combinations similarity calculation: \"-"
843 + surfacing.SCORING_OPTION + "=<" + surfacing.SCORING_DOMAIN_COUNT_BASED + "|"
844 + surfacing.SCORING_PROTEIN_COUNT_BASED + "|" + surfacing.SCORING_COMBINATION_BASED + ">\"" );
847 boolean sort_by_species_count_first = false;
848 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION ) ) {
849 sort_by_species_count_first = true;
851 boolean species_matrix = false;
852 if ( cla.isOptionSet( surfacing.SPECIES_MATRIX_OPTION ) ) {
853 species_matrix = true;
855 boolean output_protein_lists_for_all_domains = false;
856 double output_list_of_all_proteins_per_domain_e_value_max = -1;
857 if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS ) ) {
858 output_protein_lists_for_all_domains = true;
860 if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION ) ) {
862 output_list_of_all_proteins_per_domain_e_value_max = cla
863 .getOptionValueAsDouble( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
865 catch ( final Exception e ) {
866 ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for per domain E-value maximum" );
871 Detailedness detailedness = DETAILEDNESS_DEFAULT;
872 if ( cla.isOptionSet( surfacing.DETAILEDNESS_OPTION ) ) {
873 if ( !cla.isOptionValueSet( surfacing.DETAILEDNESS_OPTION ) ) {
874 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for -" + surfacing.DETAILEDNESS_OPTION + "=<"
875 + surfacing.DETAILEDNESS_BASIC + "|" + surfacing.DETAILEDNESS_LIST_IDS + "|"
876 + surfacing.DETAILEDNESS_PUNCTILIOUS + ">\"" );
878 final String detness = cla.getOptionValue( surfacing.DETAILEDNESS_OPTION ).toLowerCase();
879 if ( detness.equals( surfacing.DETAILEDNESS_BASIC ) ) {
880 detailedness = DomainSimilarityCalculator.Detailedness.BASIC;
882 else if ( detness.equals( surfacing.DETAILEDNESS_LIST_IDS ) ) {
883 detailedness = DomainSimilarityCalculator.Detailedness.LIST_COMBINING_DOMAIN_FOR_EACH_SPECIES;
885 else if ( detness.equals( surfacing.DETAILEDNESS_PUNCTILIOUS ) ) {
886 detailedness = DomainSimilarityCalculator.Detailedness.PUNCTILIOUS;
889 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + detness + "\" for detailedness: \"-"
890 + surfacing.DETAILEDNESS_OPTION + "=<" + surfacing.DETAILEDNESS_BASIC + "|"
891 + surfacing.DETAILEDNESS_LIST_IDS + "|" + surfacing.DETAILEDNESS_PUNCTILIOUS + ">\"" );
894 String automated_pairwise_comparison_suffix = null;
895 boolean perform_pwc = false;
896 boolean write_pwc_files = false;
897 if ( cla.isOptionSet( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION ) ) {
899 if ( !cla.isOptionValueSet( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION ) ) {
900 write_pwc_files = false;
903 write_pwc_files = true;
904 automated_pairwise_comparison_suffix = "_"
905 + cla.getOptionValue( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION );
908 String query_domain_ids = null;
909 if ( cla.isOptionSet( surfacing.SEQ_EXTRACT_OPTION ) ) {
910 if ( !cla.isOptionValueSet( surfacing.SEQ_EXTRACT_OPTION ) ) {
912 .fatalError( surfacing.PRG_NAME,
913 "no domain ids given for sequences with given domains to be extracted : -"
914 + surfacing.SEQ_EXTRACT_OPTION
915 + "=<ordered domain sequences, domain ids separated by '~', sequences separated by '#'>" );
917 query_domain_ids = cla.getOptionValue( surfacing.SEQ_EXTRACT_OPTION );
919 DomainSimilarity.DomainSimilaritySortField domain_similarity_sort_field = DOMAIN_SORT_FILD_DEFAULT;
920 DomainSimilarity.DomainSimilaritySortField domain_similarity_sort_field_for_automated_pwc = DOMAIN_SORT_FILD_DEFAULT;
921 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ) ) {
922 if ( !cla.isOptionValueSet( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ) ) {
923 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for domain combinations similarities sorting: -"
924 + surfacing.DOMAIN_SIMILARITY_SORT_OPTION + "=<" + surfacing.DOMAIN_SIMILARITY_SORT_ALPHA + "|"
925 + surfacing.DOMAIN_SIMILARITY_SORT_MAX + "|" + surfacing.DOMAIN_SIMILARITY_SORT_MIN + "|"
926 + surfacing.DOMAIN_SIMILARITY_SORT_MEAN + "|" + surfacing.DOMAIN_SIMILARITY_SORT_DIFF + "|"
927 + surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF + "|"
928 + surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF + "|"
929 + surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT + "|" + surfacing.DOMAIN_SIMILARITY_SORT_SD
932 final String sort_str = cla.getOptionValue( surfacing.DOMAIN_SIMILARITY_SORT_OPTION ).toLowerCase();
933 if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_ALPHA ) ) {
934 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
935 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
937 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MAX ) ) {
938 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX;
939 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
941 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MIN ) ) {
942 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MIN;
943 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
945 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_MEAN ) ) {
946 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MEAN;
947 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MEAN;
949 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT ) ) {
950 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.SPECIES_COUNT;
951 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
953 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_SD ) ) {
954 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.SD;
955 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.DOMAIN_ID;
957 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_DIFF ) ) {
958 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX_DIFFERENCE;
959 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MAX_DIFFERENCE;
961 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF ) ) {
962 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
963 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
965 else if ( sort_str.equals( surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF ) ) {
966 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE;
967 domain_similarity_sort_field_for_automated_pwc = DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE;
970 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort_str
971 + "\" for domain combinations similarities sorting: \"-"
972 + surfacing.DOMAIN_SIMILARITY_SORT_OPTION + "=<" + surfacing.DOMAIN_SIMILARITY_SORT_ALPHA + "|"
973 + surfacing.DOMAIN_SIMILARITY_SORT_MAX + "|" + surfacing.DOMAIN_SIMILARITY_SORT_MIN + "|"
974 + surfacing.DOMAIN_SIMILARITY_SORT_MEAN + "|" + surfacing.DOMAIN_SIMILARITY_SORT_DIFF + "|"
975 + surfacing.DOMAIN_SIMILARITY_SORT_ABS_COUNTS_DIFF + "|"
976 + surfacing.DOMAIN_SIMILARITY_SORT_COUNTS_DIFF + "|" + "|"
977 + surfacing.DOMAIN_SIMILARITY_SORT_SPECIES_COUNT + "|" + surfacing.DOMAIN_SIMILARITY_SORT_SD
981 PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option = DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT;
982 if ( cla.isOptionSet( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ) ) {
983 if ( !cla.isOptionValueSet( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ) ) {
984 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for print option: -"
985 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML + "|"
986 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML + "|"
987 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED + ">\"" );
989 final String sort = cla.getOptionValue( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION ).toLowerCase();
990 if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML ) ) {
991 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.HTML;
993 else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML ) ) {
994 // domain_similarity_print_option =
995 // DomainSimilarity.PRINT_OPTION.SIMPLE_HTML;
996 ForesterUtil.fatalError( surfacing.PRG_NAME, "simple HTML output not implemented yet :(" );
998 else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED ) ) {
999 domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.SIMPLE_TAB_DELIMITED;
1002 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort + "\" for print option: -"
1003 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_DETAILED_HTML + "|"
1004 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML + "|"
1005 + surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED + ">\"" );
1008 GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder dc_sort_order = DOMAINS_SORT_ORDER_DEFAULT;
1009 if ( cla.isOptionSet( surfacing.DOMAIN_COUNT_SORT_OPTION ) ) {
1010 if ( !cla.isOptionValueSet( surfacing.DOMAIN_COUNT_SORT_OPTION ) ) {
1011 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for sorting of domain counts: -"
1012 + surfacing.DOMAIN_COUNT_SORT_OPTION + "=<" + surfacing.DOMAIN_COUNT_SORT_ALPHA + "|"
1013 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT + "|"
1014 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT + "|"
1015 + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
1017 final String sort = cla.getOptionValue( surfacing.DOMAIN_COUNT_SORT_OPTION ).toLowerCase();
1018 if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_ALPHA ) ) {
1019 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.ALPHABETICAL_KEY_ID;
1021 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT ) ) {
1022 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.KEY_DOMAIN_COUNT;
1024 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT ) ) {
1025 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.KEY_DOMAIN_PROTEINS_COUNT;
1027 else if ( sort.equals( surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT ) ) {
1028 dc_sort_order = GenomeWideCombinableDomains.GenomeWideCombinableDomainsSortOrder.COMBINATIONS_COUNT;
1031 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + sort
1032 + "\" for sorting of domain counts: \"-" + surfacing.DOMAIN_COUNT_SORT_OPTION + "=<"
1033 + surfacing.DOMAIN_COUNT_SORT_ALPHA + "|" + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_COUNT + "|"
1034 + surfacing.DOMAIN_COUNT_SORT_KEY_DOMAIN_PROTEINS_COUNT + "|"
1035 + surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
1038 String[][] input_file_properties = null;
1039 if ( input_file_names_from_file != null ) {
1040 input_file_properties = surfacing.processInputFileNames( input_file_names_from_file );
1043 input_file_properties = surfacing.processInputFileNames( cla.getNames() );
1045 final int number_of_genomes = input_file_properties.length;
1046 if ( number_of_genomes < 2 ) {
1047 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot analyze less than two files" );
1049 if ( ( number_of_genomes < 3 ) && perform_pwc ) {
1050 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use : -"
1051 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1052 + "=<suffix> to turn on pairwise analyses with less than three input files" );
1054 checkWriteabilityForPairwiseComparisons( domain_similarity_print_option,
1055 input_file_properties,
1056 automated_pairwise_comparison_suffix,
1058 for( int i = 0; i < number_of_genomes; i++ ) {
1059 File dcc_outfile = new File( input_file_properties[ i ][ 0 ]
1060 + surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
1061 if ( out_dir != null ) {
1062 dcc_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + dcc_outfile );
1064 SurfacingUtil.checkForOutputFileWriteability( dcc_outfile );
1066 File pfam_to_go_file = null;
1067 Map<DomainId, List<GoId>> domain_id_to_go_ids_map = null;
1068 int domain_id_to_go_ids_count = 0;
1069 if ( cla.isOptionSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
1070 if ( !cla.isOptionValueSet( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) ) {
1071 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for Pfam to GO mapping file: -"
1072 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>" );
1074 pfam_to_go_file = new File( cla.getOptionValue( surfacing.PFAM_TO_GO_FILE_USE_OPTION ) );
1075 final String error = ForesterUtil.isReadableFile( pfam_to_go_file );
1076 if ( !ForesterUtil.isEmpty( error ) ) {
1077 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read Pfam to GO mapping file: " + error );
1080 final PfamToGoParser parser = new PfamToGoParser( pfam_to_go_file );
1081 final List<PfamToGoMapping> pfam_to_go_mappings = parser.parse();
1082 domain_id_to_go_ids_map = SurfacingUtil.createDomainIdToGoIdMap( pfam_to_go_mappings );
1083 if ( parser.getMappingCount() < domain_id_to_go_ids_map.size() ) {
1084 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME,
1085 "parser.getMappingCount() < domain_id_to_go_ids_map.size()" );
1087 domain_id_to_go_ids_count = parser.getMappingCount();
1089 catch ( final IOException e ) {
1090 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from Pfam to GO mapping file: " + e );
1093 File go_obo_file = null;
1094 List<GoTerm> go_terms = null;
1095 if ( cla.isOptionSet( surfacing.GO_OBO_FILE_USE_OPTION ) ) {
1096 if ( !cla.isOptionValueSet( surfacing.GO_OBO_FILE_USE_OPTION ) ) {
1097 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for GO OBO file: -"
1098 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>" );
1100 if ( ( domain_id_to_go_ids_map == null ) || ( domain_id_to_go_ids_map.size() < 1 ) ) {
1101 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use GO OBO file (-"
1102 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>) without Pfam to GO mapping file ("
1103 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>)" );
1105 go_obo_file = new File( cla.getOptionValue( surfacing.GO_OBO_FILE_USE_OPTION ) );
1106 final String error = ForesterUtil.isReadableFile( go_obo_file );
1107 if ( !ForesterUtil.isEmpty( error ) ) {
1108 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read GO OBO file: " + error );
1111 final OBOparser parser = new OBOparser( go_obo_file, OBOparser.ReturnType.BASIC_GO_TERM );
1112 go_terms = parser.parse();
1113 if ( parser.getGoTermCount() != go_terms.size() ) {
1115 .unexpectedFatalError( surfacing.PRG_NAME, "parser.getGoTermCount() != go_terms.size()" );
1118 catch ( final IOException e ) {
1119 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read from GO OBO file: " + e );
1122 Map<GoId, GoTerm> go_id_to_term_map = null;
1123 if ( ( ( domain_id_to_go_ids_map != null ) && ( domain_id_to_go_ids_map.size() > 0 ) )
1124 && ( ( go_terms != null ) && ( go_terms.size() > 0 ) ) ) {
1125 go_id_to_term_map = GoUtils.createGoIdToGoTermMap( go_terms );
1127 GoNameSpace go_namespace_limit = null;
1128 if ( cla.isOptionSet( surfacing.GO_NAMESPACE_LIMIT_OPTION ) ) {
1129 if ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) {
1130 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use GO namespace limit (-"
1131 + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<namespace>) without Pfam to GO mapping file ("
1132 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>) and GO OBO file (-"
1133 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
1135 if ( !cla.isOptionValueSet( surfacing.GO_NAMESPACE_LIMIT_OPTION ) ) {
1136 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for GO namespace limit: \"-"
1137 + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<"
1138 + surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION + "|"
1139 + surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS + "|"
1140 + surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT + ">\"" );
1142 final String go_namespace_limit_str = cla.getOptionValue( surfacing.GO_NAMESPACE_LIMIT_OPTION )
1144 if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION ) ) {
1145 go_namespace_limit = GoNameSpace.createMolecularFunction();
1147 else if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS ) ) {
1148 go_namespace_limit = GoNameSpace.createBiologicalProcess();
1150 else if ( go_namespace_limit_str.equals( surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT ) ) {
1151 go_namespace_limit = GoNameSpace.createCellularComponent();
1154 ForesterUtil.fatalError( surfacing.PRG_NAME, "unknown value \"" + go_namespace_limit_str
1155 + "\" for GO namespace limit: \"-" + surfacing.GO_NAMESPACE_LIMIT_OPTION + "=<"
1156 + surfacing.GO_NAMESPACE_LIMIT_OPTION_MOLECULAR_FUNCTION + "|"
1157 + surfacing.GO_NAMESPACE_LIMIT_OPTION_BIOLOGICAL_PROCESS + "|"
1158 + surfacing.GO_NAMESPACE_LIMIT_OPTION_CELLULAR_COMPONENT + ">\"" );
1161 if ( ( domain_similarity_sort_field == DomainSimilarity.DomainSimilaritySortField.MAX_COUNTS_DIFFERENCE )
1162 && ( number_of_genomes > 2 ) ) {
1163 domain_similarity_sort_field = DomainSimilarity.DomainSimilaritySortField.ABS_MAX_COUNTS_DIFFERENCE;
1165 boolean jacknifed_distances = false;
1166 int jacknife_resamplings = JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT;
1167 double jacknife_ratio = JACKNIFE_RATIO_DEFAULT;
1168 long random_seed = JACKNIFE_RANDOM_SEED_DEFAULT;
1169 if ( cla.isOptionSet( surfacing.JACKNIFE_OPTION ) ) {
1170 if ( ( number_of_genomes < 3 ) || !perform_pwc ) {
1171 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot use jacknife resampling analysis (-"
1172 + surfacing.JACKNIFE_OPTION + "[=<number of resamplings>]) without pairwise analyses ("
1173 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1174 + "=<suffix for pairwise comparison output files>)" );
1176 jacknifed_distances = true;
1177 if ( cla.isOptionHasAValue( surfacing.JACKNIFE_OPTION ) ) {
1179 jacknife_resamplings = cla.getOptionValueAsInt( surfacing.JACKNIFE_OPTION );
1181 catch ( final IOException e ) {
1182 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for number of resamplings" );
1184 if ( jacknife_resamplings < 2 ) {
1185 ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use less than 2 resamplings" );
1188 if ( cla.isOptionSet( surfacing.JACKNIFE_RATIO_OPTION )
1189 && cla.isOptionHasAValue( surfacing.JACKNIFE_RATIO_OPTION ) ) {
1191 jacknife_ratio = cla.getOptionValueAsDouble( surfacing.JACKNIFE_RATIO_OPTION );
1193 catch ( final IOException e ) {
1194 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for jacknife ratio" );
1196 if ( ( jacknife_ratio <= 0.0 ) || ( jacknife_ratio >= 1.0 ) ) {
1197 ForesterUtil.fatalError( surfacing.PRG_NAME, "attempt to use illegal value for jacknife ratio: "
1201 if ( cla.isOptionSet( surfacing.JACKNIFE_RANDOM_SEED_OPTION )
1202 && cla.isOptionHasAValue( surfacing.JACKNIFE_RANDOM_SEED_OPTION ) ) {
1204 random_seed = cla.getOptionValueAsLong( surfacing.JACKNIFE_RANDOM_SEED_OPTION );
1206 catch ( final IOException e ) {
1207 ForesterUtil.fatalError( surfacing.PRG_NAME, "illegal format for random generator seed" );
1211 // boolean infer_species_trees = false;
1212 // if ( cla.isOptionSet( surfacing.INFER_SPECIES_TREES_OPTION ) ) {
1213 // if ( ( output_file == null ) || ( number_of_genomes < 3 )
1214 // || ForesterUtil.isEmpty( automated_pairwise_comparison_suffix ) ) {
1215 // ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer species trees (-"
1216 // + surfacing.INFER_SPECIES_TREES_OPTION + " without pairwise analyses ("
1217 // + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1218 // + "=<suffix for pairwise comparison output files>)" );
1220 // infer_species_trees = true;
1222 File[] intree_files = null;
1223 Phylogeny[] intrees = null;
1224 if ( cla.isOptionSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
1225 // TODO FIXME if jacknife.... maybe not
1226 if ( number_of_genomes < 3 ) {
1227 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer gains and losses on input species trees (-"
1228 + surfacing.INPUT_SPECIES_TREE_OPTION + " without pairwise analyses ("
1229 + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
1230 + "=<suffix for pairwise comparison output files>)" );
1232 if ( !cla.isOptionValueSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
1233 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input tree: -"
1234 + surfacing.INPUT_SPECIES_TREE_OPTION + "=<tree file in phyloXML format>" );
1236 final String intrees_str = cla.getOptionValue( surfacing.INPUT_SPECIES_TREE_OPTION );
1237 if ( intrees_str.indexOf( "#" ) > 0 ) {
1238 final String[] intrees_strs = intrees_str.split( "#" );
1239 intree_files = new File[ intrees_strs.length ];
1241 for( final String s : intrees_strs ) {
1242 intree_files[ i++ ] = new File( s.trim() );
1246 intree_files = new File[ 1 ];
1247 intree_files[ 0 ] = new File( intrees_str );
1249 intrees = getIntrees( intree_files, number_of_genomes, input_file_properties );
1251 long random_number_seed_for_fitch_parsimony = 0l;
1252 boolean radomize_fitch_parsimony = false;
1253 if ( cla.isOptionSet( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION ) ) {
1254 if ( !cla.isOptionValueSet( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION ) ) {
1255 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for random number seed: -"
1256 + surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION + "=<seed>" );
1259 random_number_seed_for_fitch_parsimony = cla
1260 .getOptionValueAsLong( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
1262 catch ( final IOException e ) {
1263 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1265 radomize_fitch_parsimony = true;
1267 SortedSet<DomainId> filter = null;
1268 if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
1269 || ( negative_domains_filter_file != null ) ) {
1270 filter = new TreeSet<DomainId>();
1271 if ( positive_filter_file != null ) {
1272 processFilter( positive_filter_file, filter );
1274 else if ( negative_filter_file != null ) {
1275 processFilter( negative_filter_file, filter );
1277 else if ( negative_domains_filter_file != null ) {
1278 processFilter( negative_domains_filter_file, filter );
1281 Map<DomainId, Set<String>>[] domain_id_to_secondary_features_maps = null;
1282 File[] secondary_features_map_files = null;
1283 final File domain_lengths_analysis_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
1284 + DOMAIN_LENGTHS_ANALYSIS_SUFFIX );
1285 if ( PERFORM_DOMAIN_LENGTH_ANALYSIS ) {
1286 SurfacingUtil.checkForOutputFileWriteability( domain_lengths_analysis_outfile );
1288 if ( cla.isOptionSet( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ) ) {
1289 if ( !cla.isOptionValueSet( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ) ) {
1290 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for secondary features map file: -"
1291 + surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE + "=<file>" );
1293 final String[] secondary_features_map_files_strs = cla
1294 .getOptionValue( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE ).split( "#" );
1295 secondary_features_map_files = new File[ secondary_features_map_files_strs.length ];
1296 domain_id_to_secondary_features_maps = new Map[ secondary_features_map_files_strs.length ];
1298 for( final String secondary_features_map_files_str : secondary_features_map_files_strs ) {
1299 secondary_features_map_files[ i ] = new File( secondary_features_map_files_str );
1300 final String error = ForesterUtil.isReadableFile( secondary_features_map_files[ i ] );
1301 if ( !ForesterUtil.isEmpty( error ) ) {
1302 ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot read secondary features map file: " + error );
1305 domain_id_to_secondary_features_maps[ i ] = SurfacingUtil
1306 .createDomainIdToSecondaryFeaturesMap( secondary_features_map_files[ i ] );
1308 catch ( final IOException e ) {
1309 ForesterUtil.fatalError( surfacing.PRG_NAME,
1310 "cannot read secondary features map file: " + e.getMessage() );
1312 catch ( final Exception e ) {
1313 ForesterUtil.fatalError( surfacing.PRG_NAME, "problem with contents of features map file ["
1314 + secondary_features_map_files[ i ] + "]: " + e.getMessage() );
1319 if ( out_dir == null ) {
1320 ForesterUtil.fatalError( surfacing.PRG_NAME, "no output directory indicated (-"
1321 + surfacing.OUTPUT_DIR_OPTION + "=<dir>)" );
1323 if ( output_file == null ) {
1324 ForesterUtil.fatalError( surfacing.PRG_NAME, "no name for (main) output file indicated (-"
1325 + surfacing.OUTPUT_FILE_OPTION + "=<file>)" );
1327 if ( ( domain_id_to_go_ids_map == null ) || domain_id_to_go_ids_map.isEmpty() ) {
1328 ForesterUtil.fatalError( surfacing.PRG_NAME,
1329 "no (acceptable) Pfam to GO id mapping file provided ('pfam2go file') (-"
1330 + surfacing.PFAM_TO_GO_FILE_USE_OPTION + "=<file>)" );
1332 if ( ( go_id_to_term_map == null ) || go_id_to_term_map.isEmpty() ) {
1333 ForesterUtil.fatalError( surfacing.PRG_NAME,
1334 "no (acceptable) go id to term mapping file provided ('GO OBO file') (-"
1335 + surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
1337 System.out.println( "Output directory : " + out_dir );
1338 if ( input_file_names_from_file != null ) {
1339 System.out.println( "Input files names from : " + input_files_file + " ["
1340 + input_file_names_from_file.length + " input files]" );
1341 html_desc.append( "<tr><td>Input files names from:</td><td>" + input_files_file + " ["
1342 + input_file_names_from_file.length + " input files]</td></tr>" + nl );
1344 if ( positive_filter_file != null ) {
1345 final int filter_size = filter.size();
1346 System.out.println( "Positive protein filter : " + positive_filter_file + " [" + filter_size
1348 html_desc.append( "<tr><td>Positive protein filter:</td><td>" + positive_filter_file + " [" + filter_size
1349 + " domain ids]</td></tr>" + nl );
1351 if ( negative_filter_file != null ) {
1352 final int filter_size = filter.size();
1353 System.out.println( "Negative protein filter : " + negative_filter_file + " [" + filter_size
1355 html_desc.append( "<tr><td>Negative protein filter:</td><td>" + negative_filter_file + " [" + filter_size
1356 + " domain ids]</td></tr>" + nl );
1358 if ( negative_domains_filter_file != null ) {
1359 final int filter_size = filter.size();
1360 System.out.println( "Negative domain filter : " + negative_domains_filter_file + " [" + filter_size
1362 html_desc.append( "<tr><td>Negative domain filter:</td><td>" + negative_domains_filter_file + " ["
1363 + filter_size + " domain ids]</td></tr>" + nl );
1365 if ( plus_minus_analysis_high_copy_base_species.size() > 0 ) {
1367 for( final String s : plus_minus_analysis_high_copy_base_species ) {
1368 plus0 += "+" + s + " ";
1371 for( final String s : plus_minus_analysis_high_copy_target_species ) {
1372 plus1 += "*" + s + " ";
1375 for( final String s : plus_minus_analysis_high_low_copy_species ) {
1376 minus += "-" + s + " ";
1378 System.out.println( "Plus-minus analysis : " + plus1 + "&& " + plus0 + "&& " + minus );
1379 html_desc.append( "<tr><td>Plus-minus analysis:</td><td>" + plus1 + "&& " + plus0 + "&& " + minus
1380 + "</td></tr>" + nl );
1382 if ( cutoff_scores_file != null ) {
1383 System.out.println( "Cutoff scores file : " + cutoff_scores_file );
1384 html_desc.append( "<tr><td>Cutoff scores file:</td><td>" + cutoff_scores_file + "</td></tr>" + nl );
1386 if ( e_value_max >= 0.0 ) {
1387 System.out.println( "E-value maximum (inclusive) : " + e_value_max );
1388 html_desc.append( "<tr><td>E-value maximum (inclusive):</td><td>" + e_value_max + "</td></tr>" + nl );
1390 if ( output_protein_lists_for_all_domains ) {
1391 System.out.println( "Domain E-value max : " + output_list_of_all_proteins_per_domain_e_value_max );
1392 html_desc.append( "<tr><td>Protein lists: E-value maximum per domain (inclusive):</td><td>"
1393 + output_list_of_all_proteins_per_domain_e_value_max + "</td></tr>" + nl );
1395 System.out.println( "Ignore DUFs : " + ignore_dufs );
1396 if ( ignore_virus_like_ids ) {
1397 System.out.println( "Ignore virus like ids : " + ignore_virus_like_ids );
1398 html_desc.append( "<tr><td>Ignore virus, phage, transposition related ids:</td><td>"
1399 + ignore_virus_like_ids + "</td></tr>" + nl );
1401 html_desc.append( "<tr><td>Ignore DUFs:</td><td>" + ignore_dufs + "</td></tr>" + nl );
1402 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
1403 System.out.println( "Max allowed domain overlap : " + max_allowed_overlap );
1404 html_desc.append( "<tr><td>Max allowed domain overlap:</td><td>" + max_allowed_overlap + "</td></tr>" + nl );
1406 if ( no_engulfing_overlaps ) {
1407 System.out.println( "Ignore engulfed domains : " + no_engulfing_overlaps );
1408 html_desc.append( "<tr><td>Ignore (lower confidence) engulfed domains:</td><td>" + no_engulfing_overlaps
1409 + "</td></tr>" + nl );
1411 System.out.println( "Ignore singlet domains : " + ignore_domains_without_combs_in_all_spec );
1413 .append( "<tr><td>Ignore singlet domains for domain combination similarity analyses (not for parsimony analyses):</td><td>"
1414 + ignore_domains_without_combs_in_all_spec + "</td></tr>" + nl );
1415 System.out.println( "Ignore species specific doms: " + ignore_species_specific_domains );
1417 .append( "<tr><td>Ignore species specific domains for domain combination similarity analyses (not for parsimony analyses):</td><td>"
1418 + ignore_species_specific_domains + "</td></tr>" + nl );
1419 System.out.println( "Ignore combination with self: " + ignore_combination_with_same );
1420 html_desc.append( "<tr><td>Ignore combination with self for domain combination similarity analyses:</td><td>"
1421 + ignore_combination_with_same + "</td></tr>" + nl );
1422 System.out.println( "Consider directedness : "
1423 + ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) );
1424 html_desc.append( "<tr><td>Consider directedness of binary domain combinations:</td><td>"
1425 + ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) + "</td></tr>" + nl );
1426 if ( dc_type != BinaryDomainCombination.DomainCombinationType.BASIC ) {
1427 System.out.println( "Consider adjacency : "
1428 + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) );
1429 html_desc.append( "<tr><td>Consider djacency of binary domain combinations:</td><td>"
1430 + ( dc_type == BinaryDomainCombination.DomainCombinationType.DIRECTED_ADJACTANT ) + "</td></tr>"
1433 System.out.print( "Domain counts sort order : " );
1434 switch ( dc_sort_order ) {
1435 case ALPHABETICAL_KEY_ID:
1436 System.out.println( "alphabetical" );
1438 case KEY_DOMAIN_COUNT:
1439 System.out.println( "domain count" );
1441 case KEY_DOMAIN_PROTEINS_COUNT:
1442 System.out.println( "domain proteins count" );
1444 case COMBINATIONS_COUNT:
1445 System.out.println( "domain combinations count" );
1448 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for dc sort order" );
1450 if ( domain_id_to_go_ids_map != null ) {
1451 System.out.println( "Pfam to GO mappings from : " + pfam_to_go_file + " [" + domain_id_to_go_ids_count
1453 html_desc.append( "<tr><td>Pfam to GO mappings from:</td><td>" + pfam_to_go_file + " ["
1454 + domain_id_to_go_ids_count + " mappings]" + "</td></tr>" + nl );
1456 if ( go_terms != null ) {
1457 System.out.println( "GO terms from : " + go_obo_file + " [" + go_terms.size() + " terms]" );
1458 html_desc.append( "<tr><td>GO terms from:</td><td>" + go_obo_file + " [" + go_terms.size() + " terms]"
1459 + "</td></tr>" + nl );
1461 if ( go_namespace_limit != null ) {
1462 System.out.println( "Limit GO terms to : " + go_namespace_limit.toString() );
1463 html_desc.append( "<tr><td>Limit GO terms to</td><td>" + go_namespace_limit + "</td></tr>" + nl );
1465 if ( perform_pwc ) {
1466 System.out.println( "Suffix for PWC files : " + automated_pairwise_comparison_suffix );
1467 html_desc.append( "<tr><td>Suffix for PWC files</td><td>" + automated_pairwise_comparison_suffix
1468 + "</td></tr>" + nl );
1470 if ( out_dir != null ) {
1471 System.out.println( "Output directory : " + out_dir );
1473 if ( query_domain_ids != null ) {
1474 System.out.println( "Query domains (ordered) : " + query_domain_ids );
1475 html_desc.append( "<tr><td></td><td>" + query_domain_ids + "</td></tr>" + nl );
1477 System.out.println( "Write similarities to : " + output_file );
1478 System.out.print( " Scoring method : " );
1479 html_desc.append( "<tr><td>Scoring method:</td><td>" );
1480 switch ( scoring ) {
1482 System.out.println( "domain combinations based" );
1483 html_desc.append( "domain combinations based" + "</td></tr>" + nl );
1486 System.out.println( "domain counts based" );
1487 html_desc.append( "domain counts based" + "</td></tr>" + nl );
1490 System.out.println( "domain proteins counts based" );
1491 html_desc.append( "domain proteins counts based" + "</td></tr>" + nl );
1494 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for scoring" );
1496 System.out.print( " Sort by : " );
1497 html_desc.append( "<tr><td>Sort by:</td><td>" );
1498 switch ( domain_similarity_sort_field ) {
1500 System.out.print( "score minimum" );
1501 html_desc.append( "score minimum" );
1504 System.out.print( "score maximum" );
1505 html_desc.append( "score maximum" );
1508 System.out.print( "score mean" );
1509 html_desc.append( "score mean" );
1512 System.out.print( "score standard deviation" );
1513 html_desc.append( "score standard deviation" );
1516 System.out.print( "species number" );
1517 html_desc.append( "species number" );
1520 System.out.print( "alphabetical domain identifier" );
1521 html_desc.append( "alphabetical domain identifier" );
1523 case MAX_DIFFERENCE:
1524 System.out.print( "(maximal) difference" );
1525 html_desc.append( "(maximal) difference" );
1527 case ABS_MAX_COUNTS_DIFFERENCE:
1528 System.out.print( "absolute (maximal) counts difference" );
1529 html_desc.append( "absolute (maximal) counts difference" );
1531 case MAX_COUNTS_DIFFERENCE:
1532 System.out.print( "(maximal) counts difference" );
1533 html_desc.append( "(maximal) counts difference" );
1536 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for similarities" );
1538 if ( sort_by_species_count_first ) {
1539 System.out.println( " (sort by species count first)" );
1540 html_desc.append( " (sort by species count first)" );
1543 System.out.println();
1545 html_desc.append( "</td></tr>" + nl );
1546 System.out.print( " Detailedness : " );
1547 switch ( detailedness ) {
1549 System.out.println( "basic" );
1551 case LIST_COMBINING_DOMAIN_FOR_EACH_SPECIES:
1552 System.out.println( "list combining domains for each species" );
1555 System.out.println( "punctilious" );
1558 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for detailedness" );
1560 System.out.print( " Print option : " );
1561 switch ( domain_similarity_print_option ) {
1563 System.out.println( "HTML" );
1565 case SIMPLE_TAB_DELIMITED:
1566 System.out.println( "simple tab delimited" );
1569 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for print option" );
1571 System.out.print( " Species matrix : " + species_matrix );
1572 System.out.println();
1573 final File dc_data_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file + DATA_FILE_SUFFIX );
1574 System.out.println( "Domain comb data output : " + dc_data_file );
1575 html_desc.append( "<tr><td>Domain combination data output:</td><td> " + dc_data_file + " </td></tr>" );
1576 System.out.println();
1577 if ( perform_pwc ) {
1578 System.out.println( "Pairwise comparisons: " );
1579 html_desc.append( "<tr><td>Pairwise comparisons:</td><td></td></tr>" );
1580 System.out.print( " Sort by : " );
1581 html_desc.append( "<tr><td>Sort by:</td><td>" );
1582 switch ( domain_similarity_sort_field_for_automated_pwc ) {
1584 System.out.print( "score mean" );
1585 html_desc.append( "score mean" );
1588 System.out.print( "alphabetical domain identifier" );
1589 html_desc.append( "alphabetical domain identifier" );
1591 case MAX_DIFFERENCE:
1592 System.out.print( "difference" );
1593 html_desc.append( "difference" );
1595 case ABS_MAX_COUNTS_DIFFERENCE:
1596 System.out.print( "absolute counts difference" );
1597 html_desc.append( "absolute counts difference" );
1599 case MAX_COUNTS_DIFFERENCE:
1600 System.out.print( "counts difference" );
1601 html_desc.append( "counts difference" );
1605 .unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for similarities" );
1607 System.out.println();
1608 html_desc.append( "</td></tr>" + nl );
1609 if ( jacknifed_distances ) {
1610 html_desc.append( "<tr><td>Jacknife:</td><td>" + jacknife_resamplings + " resamplings</td></tr>" + nl );
1611 html_desc.append( "<tr><td>Jacknife ratio:</td><td>" + ForesterUtil.round( jacknife_ratio, 2 )
1612 + "</td></tr>" + nl );
1613 html_desc.append( "<tr><td>Jacknife random number seed:</td><td>" + random_seed + "</td></tr>" + nl );
1614 System.out.println( " Jacknife : " + jacknife_resamplings + " resamplings" );
1615 System.out.println( " Ratio : " + ForesterUtil.round( jacknife_ratio, 2 ) );
1616 System.out.println( " Random number seed : " + random_seed );
1618 // if ( infer_species_trees ) {
1619 // html_desc.append( "<tr><td>Infer species trees:</td><td>true</td></tr>" + nl );
1620 // System.out.println( " Infer species trees : true" );
1622 if ( ( intrees != null ) && ( intrees.length > 0 ) ) {
1623 for( final File intree_file : intree_files ) {
1624 html_desc.append( "<tr><td>Intree for gain/loss parsimony analysis:</td><td>" + intree_file
1625 + "</td></tr>" + nl );
1626 System.out.println( " Intree for gain/loss pars.: " + intree_file );
1629 if ( radomize_fitch_parsimony ) {
1630 html_desc.append( "<tr><td> Random number seed for Fitch parsimony analysis:</td><td>"
1631 + random_number_seed_for_fitch_parsimony + "</td></tr>" + nl );
1632 System.out.println( " Random number seed : " + random_number_seed_for_fitch_parsimony );
1634 if ( ( domain_id_to_secondary_features_maps != null ) && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
1635 for( int i = 0; i < secondary_features_map_files.length; i++ ) {
1636 html_desc.append( "<tr><td>Secondary features map file:</td><td>"
1637 + secondary_features_map_files[ i ] + "</td></tr>" + nl );
1638 System.out.println( "Secondary features map file : " + secondary_features_map_files[ i ]
1639 + " [mappings for " + domain_id_to_secondary_features_maps[ i ].size() + " domain ids]" );
1641 System.out.println();
1642 System.out.println( "Domain ids to secondary features map:" );
1643 for( final DomainId domain_id : domain_id_to_secondary_features_maps[ i ].keySet() ) {
1644 System.out.print( domain_id.getId() );
1645 System.out.print( " => " );
1646 for( final String sec : domain_id_to_secondary_features_maps[ i ].get( domain_id ) ) {
1647 System.out.print( sec );
1648 System.out.print( " " );
1650 System.out.println();
1655 } // if ( perform_pwc ) {
1656 System.out.println();
1657 html_desc.append( "<tr><td>Command line:</td><td>\n" + cla.getCommandLineArgsAsString() + "\n</td></tr>" + nl );
1658 System.out.println( "Command line : " + cla.getCommandLineArgsAsString() );
1659 BufferedWriter[] query_domains_writer_ary = null;
1660 List<DomainId>[] query_domain_ids_array = null;
1661 if ( query_domain_ids != null ) {
1662 final String[] query_domain_ids_str_array = query_domain_ids.split( "#" );
1663 query_domain_ids_array = new ArrayList[ query_domain_ids_str_array.length ];
1664 query_domains_writer_ary = new BufferedWriter[ query_domain_ids_str_array.length ];
1665 for( int i = 0; i < query_domain_ids_str_array.length; i++ ) {
1666 String query_domain_ids_str = query_domain_ids_str_array[ i ];
1667 final String[] query_domain_ids_str_ary = query_domain_ids_str.split( "~" );
1668 final List<DomainId> query = new ArrayList<DomainId>();
1669 for( final String element : query_domain_ids_str_ary ) {
1670 query.add( new DomainId( element ) );
1672 query_domain_ids_array[ i ] = query;
1673 query_domain_ids_str = query_domain_ids_str.replace( '~', '_' );
1674 String protein_names_writer_str = query_domain_ids_str + surfacing.SEQ_EXTRACT_SUFFIX;
1675 if ( out_dir != null ) {
1676 protein_names_writer_str = out_dir + ForesterUtil.FILE_SEPARATOR + protein_names_writer_str;
1679 query_domains_writer_ary[ i ] = new BufferedWriter( new FileWriter( protein_names_writer_str ) );
1681 catch ( final IOException e ) {
1682 ForesterUtil.fatalError( surfacing.PRG_NAME, "Could not open [" + protein_names_writer_str + "]: "
1683 + e.getLocalizedMessage() );
1687 SortedMap<Species, List<Protein>> protein_lists_per_species = null; //This will only be created if neede.
1688 boolean need_protein_lists_per_species = false;
1689 if ( ( plus_minus_analysis_high_copy_base_species.size() > 0 ) || output_protein_lists_for_all_domains ) {
1690 need_protein_lists_per_species = true;
1692 if ( need_protein_lists_per_species ) {
1693 protein_lists_per_species = new TreeMap<Species, List<Protein>>();
1695 final List<GenomeWideCombinableDomains> gwcd_list = new ArrayList<GenomeWideCombinableDomains>( number_of_genomes );
1696 final SortedSet<DomainId> all_domains_encountered = new TreeSet<DomainId>();
1697 final SortedSet<BinaryDomainCombination> all_bin_domain_combinations_encountered = new TreeSet<BinaryDomainCombination>();
1698 List<BinaryDomainCombination> all_bin_domain_combinations_gained_fitch = null;
1699 List<BinaryDomainCombination> all_bin_domain_combinations_lost_fitch = null;
1700 if ( ( intrees != null ) && ( intrees.length == 1 ) ) {
1701 all_bin_domain_combinations_gained_fitch = new ArrayList<BinaryDomainCombination>();
1702 all_bin_domain_combinations_lost_fitch = new ArrayList<BinaryDomainCombination>();
1704 final DomainLengthsTable domain_lengths_table = new DomainLengthsTable();
1705 final File per_genome_domain_promiscuity_statistics_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR
1706 + output_file + D_PROMISCUITY_FILE_SUFFIX );
1707 BufferedWriter per_genome_domain_promiscuity_statistics_writer = null;
1709 per_genome_domain_promiscuity_statistics_writer = new BufferedWriter( new FileWriter( per_genome_domain_promiscuity_statistics_file ) );
1710 per_genome_domain_promiscuity_statistics_writer.write( "Species:\t" );
1711 per_genome_domain_promiscuity_statistics_writer.write( "Mean:\t" );
1712 per_genome_domain_promiscuity_statistics_writer.write( "SD:\t" );
1713 per_genome_domain_promiscuity_statistics_writer.write( "Median:\t" );
1714 per_genome_domain_promiscuity_statistics_writer.write( "Min:\t" );
1715 per_genome_domain_promiscuity_statistics_writer.write( "Max:\t" );
1716 per_genome_domain_promiscuity_statistics_writer.write( "N:\t" );
1717 per_genome_domain_promiscuity_statistics_writer.write( "Max Promiscuous Domains:"
1718 + ForesterUtil.LINE_SEPARATOR );
1720 catch ( final IOException e2 ) {
1721 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1723 final File log_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file + LOG_FILE_SUFFIX );
1724 BufferedWriter log_writer = null;
1726 log_writer = new BufferedWriter( new FileWriter( log_file ) );
1728 catch ( final IOException e2 ) {
1729 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1731 BufferedWriter dc_data_writer = null;
1733 dc_data_writer = new BufferedWriter( new FileWriter( dc_data_file ) );
1734 dc_data_writer.write( DATA_FILE_DESC );
1735 dc_data_writer.write( ForesterUtil.LINE_SEPARATOR );
1737 catch ( final IOException e2 ) {
1738 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getMessage() );
1740 final DescriptiveStatistics protein_coverage_stats = new BasicDescriptiveStatistics();
1741 final DescriptiveStatistics all_genomes_domains_per_potein_stats = new BasicDescriptiveStatistics();
1742 final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo = new TreeMap<Integer, Integer>();
1743 final SortedSet<String> domains_which_are_always_single = new TreeSet<String>();
1744 final SortedSet<String> domains_which_are_sometimes_single_sometimes_not = new TreeSet<String>();
1745 final SortedSet<String> domains_which_never_single = new TreeSet<String>();
1746 BufferedWriter domains_per_potein_stats_writer = null;
1748 domains_per_potein_stats_writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR
1749 + output_file + "__domains_per_potein_stats.txt" ) );
1750 domains_per_potein_stats_writer.write( "Genome" );
1751 domains_per_potein_stats_writer.write( "\t" );
1752 domains_per_potein_stats_writer.write( "Mean" );
1753 domains_per_potein_stats_writer.write( "\t" );
1754 domains_per_potein_stats_writer.write( "SD" );
1755 domains_per_potein_stats_writer.write( "\t" );
1756 domains_per_potein_stats_writer.write( "Median" );
1757 domains_per_potein_stats_writer.write( "\t" );
1758 domains_per_potein_stats_writer.write( "N" );
1759 domains_per_potein_stats_writer.write( "\t" );
1760 domains_per_potein_stats_writer.write( "Min" );
1761 domains_per_potein_stats_writer.write( "\t" );
1762 domains_per_potein_stats_writer.write( "Max" );
1763 domains_per_potein_stats_writer.write( "\n" );
1765 catch ( final IOException e3 ) {
1766 e3.printStackTrace();
1768 Map<String, DescriptiveStatistics> protein_length_stats_by_dc = null;
1769 Map<String, DescriptiveStatistics> domain_number_stats_by_dc = null;
1770 final Map<String, DescriptiveStatistics> domain_length_stats_by_domain = new HashMap<String, DescriptiveStatistics>();
1771 if ( PERFORM_DC_REGAIN_PROTEINS_STATS ) {
1772 protein_length_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
1773 domain_number_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
1776 final SortedMap<String, Set<String>> distinct_domain_architecutures_per_genome = new TreeMap<String, Set<String>>();
1777 final SortedMap<String, Integer> distinct_domain_architecuture_counts = new TreeMap<String, Integer>();
1778 for( int i = 0; i < number_of_genomes; ++i ) {
1779 System.out.println();
1780 System.out.println( ( i + 1 ) + "/" + number_of_genomes );
1781 log( ( i + 1 ) + "/" + number_of_genomes, log_writer );
1782 System.out.println( "Processing : " + input_file_properties[ i ][ 0 ] );
1783 log( "Genome : " + input_file_properties[ i ][ 0 ], log_writer );
1784 HmmscanPerDomainTableParser parser = null;
1785 INDIVIDUAL_SCORE_CUTOFF ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF.NONE;
1786 if ( individual_score_cutoffs != null ) {
1787 ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF_DEFAULT;
1789 if ( ( positive_filter_file != null ) || ( negative_filter_file != null )
1790 || ( negative_domains_filter_file != null ) ) {
1791 HmmscanPerDomainTableParser.FilterType filter_type = HmmscanPerDomainTableParser.FilterType.NONE;
1792 if ( positive_filter_file != null ) {
1793 filter_type = HmmscanPerDomainTableParser.FilterType.POSITIVE_PROTEIN;
1795 else if ( negative_filter_file != null ) {
1796 filter_type = HmmscanPerDomainTableParser.FilterType.NEGATIVE_PROTEIN;
1798 else if ( negative_domains_filter_file != null ) {
1799 filter_type = HmmscanPerDomainTableParser.FilterType.NEGATIVE_DOMAIN;
1801 parser = new HmmscanPerDomainTableParser( new File( input_file_properties[ i ][ 0 ] ),
1802 input_file_properties[ i ][ 1 ],
1809 parser = new HmmscanPerDomainTableParser( new File( input_file_properties[ i ][ 0 ] ),
1810 input_file_properties[ i ][ 1 ],
1814 if ( e_value_max >= 0.0 ) {
1815 parser.setEValueMaximum( e_value_max );
1817 parser.setIgnoreDufs( ignore_dufs );
1818 parser.setIgnoreVirusLikeIds( ignore_virus_like_ids );
1819 parser.setIgnoreEngulfedDomains( no_engulfing_overlaps );
1820 if ( max_allowed_overlap != surfacing.MAX_ALLOWED_OVERLAP_DEFAULT ) {
1821 parser.setMaxAllowedOverlap( max_allowed_overlap );
1823 parser.setReturnType( HmmscanPerDomainTableParser.ReturnType.UNORDERED_PROTEIN_DOMAIN_COLLECTION_PER_PROTEIN );
1824 if ( individual_score_cutoffs != null ) {
1825 parser.setIndividualScoreCutoffs( individual_score_cutoffs );
1827 List<Protein> protein_list = null;
1829 protein_list = parser.parse();
1831 catch ( final IOException e ) {
1832 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
1834 catch ( final Exception e ) {
1835 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, e.getMessage(), e );
1838 System.out.println( "Domains ignored due to negative domain filter: " );
1839 ForesterUtil.printCountingMap( parser.getDomainsIgnoredDueToNegativeDomainFilterCountsMap() );
1840 System.out.println( "Domains ignored due to virus like id: " );
1841 ForesterUtil.printCountingMap( parser.getDomainsIgnoredDueToVirusLikeIdCountsMap() );
1843 final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
1844 protein_coverage_stats.addValue( coverage );
1845 int distinct_das = -1;
1846 if ( DA_ANALYSIS ) {
1847 final String genome = input_file_properties[ i ][ 0 ];
1848 distinct_das = storeDomainArchitectures( genome,
1849 distinct_domain_architecutures_per_genome,
1851 distinct_domain_architecuture_counts );
1853 System.out.println( "Number of proteins encountered : " + parser.getProteinsEncountered() );
1854 log( "Number of proteins encountered : " + parser.getProteinsEncountered(), log_writer );
1855 System.out.println( "Number of proteins stored : " + protein_list.size() );
1856 log( "Number of proteins stored : " + protein_list.size(), log_writer );
1857 System.out.println( "Coverage : "
1858 + ForesterUtil.roundToInt( 100.0 * coverage ) + "%" );
1859 log( "Coverage : " + ForesterUtil.roundToInt( 100.0 * coverage )
1860 + "%", log_writer );
1861 System.out.println( "Domains encountered : " + parser.getDomainsEncountered() );
1862 log( "Domains encountered : " + parser.getDomainsEncountered(), log_writer );
1863 System.out.println( "Domains stored : " + parser.getDomainsStored() );
1864 log( "Domains stored : " + parser.getDomainsStored(), log_writer );
1865 System.out.println( "Distinct domains stored : "
1866 + parser.getDomainsStoredSet().size() );
1867 log( "Distinct domains stored : " + parser.getDomainsStoredSet().size(), log_writer );
1868 System.out.println( "Domains ignored due to individual score cutoffs: "
1869 + parser.getDomainsIgnoredDueToIndividualScoreCutoff() );
1870 log( "Domains ignored due to individual score cutoffs: "
1871 + parser.getDomainsIgnoredDueToIndividualScoreCutoff(),
1873 System.out.println( "Domains ignored due to E-value : "
1874 + parser.getDomainsIgnoredDueToEval() );
1875 log( "Domains ignored due to E-value : " + parser.getDomainsIgnoredDueToEval(), log_writer );
1876 System.out.println( "Domains ignored due to DUF designation : "
1877 + parser.getDomainsIgnoredDueToDuf() );
1878 log( "Domains ignored due to DUF designation : " + parser.getDomainsIgnoredDueToDuf(), log_writer );
1879 if ( ignore_virus_like_ids ) {
1880 System.out.println( "Domains ignored due virus like ids : "
1881 + parser.getDomainsIgnoredDueToVirusLikeIds() );
1882 log( "Domains ignored due virus like ids : " + parser.getDomainsIgnoredDueToVirusLikeIds(),
1885 System.out.println( "Domains ignored due negative domain filter : "
1886 + parser.getDomainsIgnoredDueToNegativeDomainFilter() );
1887 log( "Domains ignored due negative domain filter : "
1888 + parser.getDomainsIgnoredDueToNegativeDomainFilter(),
1890 System.out.println( "Domains ignored due to overlap : "
1891 + parser.getDomainsIgnoredDueToOverlap() );
1892 log( "Domains ignored due to overlap : " + parser.getDomainsIgnoredDueToOverlap(),
1894 if ( negative_filter_file != null ) {
1895 System.out.println( "Proteins ignored due to negative filter : "
1896 + parser.getProteinsIgnoredDueToFilter() );
1897 log( "Proteins ignored due to negative filter : " + parser.getProteinsIgnoredDueToFilter(),
1900 if ( positive_filter_file != null ) {
1901 System.out.println( "Proteins ignored due to positive filter : "
1902 + parser.getProteinsIgnoredDueToFilter() );
1903 log( "Proteins ignored due to positive filter : " + parser.getProteinsIgnoredDueToFilter(),
1906 if ( DA_ANALYSIS ) {
1907 System.out.println( "Distinct domain architectures stored : " + distinct_das );
1908 log( "Distinct domain architectures stored : " + distinct_das, log_writer );
1910 System.out.println( "Time for processing : " + parser.getTime() + "ms" );
1911 log( "", log_writer );
1912 html_desc.append( "<tr><td>" + input_file_properties[ i ][ 0 ] + " [species: "
1913 + input_file_properties[ i ][ 1 ] + "]" + ":</td><td>domains analyzed: "
1914 + parser.getDomainsStored() + "; domains ignored: [ind score cutoffs: "
1915 + parser.getDomainsIgnoredDueToIndividualScoreCutoff() + "] [E-value cutoff: "
1916 + parser.getDomainsIgnoredDueToEval() + "] [DUF: " + parser.getDomainsIgnoredDueToDuf()
1917 + "] [virus like ids: " + parser.getDomainsIgnoredDueToVirusLikeIds()
1918 + "] [negative domain filter: " + parser.getDomainsIgnoredDueToNegativeDomainFilter()
1919 + "] [overlap: " + parser.getDomainsIgnoredDueToOverlap() + "]" );
1920 if ( negative_filter_file != null ) {
1921 html_desc.append( "; proteins ignored due to negative filter: "
1922 + parser.getProteinsIgnoredDueToFilter() );
1924 if ( positive_filter_file != null ) {
1925 html_desc.append( "; proteins ignored due to positive filter: "
1926 + parser.getProteinsIgnoredDueToFilter() );
1928 html_desc.append( "</td></tr>" + nl );
1929 // domain_partner_counts_array[ i ] =
1930 // Methods.getDomainPartnerCounts( protein_domain_collections_array[
1932 // false, input_file_properties[ i ][ 1 ] );
1935 for( final Protein protein : protein_list ) {
1936 dc_data_writer.write( SurfacingUtil.proteinToDomainCombinations( protein, count + "", "\t" )
1939 for( final Domain d : protein.getProteinDomains() ) {
1940 final String d_str = d.getDomainId().toString();
1941 if ( !domain_length_stats_by_domain.containsKey( d_str ) ) {
1942 domain_length_stats_by_domain.put( d_str, new BasicDescriptiveStatistics() );
1944 domain_length_stats_by_domain.get( d_str ).addValue( d.getLength() );
1948 catch ( final IOException e ) {
1949 ForesterUtil.fatalError( surfacing.PRG_NAME, e.toString() );
1951 SurfacingUtil.domainsPerProteinsStatistics( input_file_properties[ i ][ 1 ],
1953 all_genomes_domains_per_potein_stats,
1954 all_genomes_domains_per_potein_histo,
1955 domains_which_are_always_single,
1956 domains_which_are_sometimes_single_sometimes_not,
1957 domains_which_never_single,
1958 domains_per_potein_stats_writer );
1959 domain_lengths_table.addLengths( protein_list );
1960 if ( !DA_ANALYSIS ) {
1961 gwcd_list.add( BasicGenomeWideCombinableDomains
1962 .createInstance( protein_list,
1963 ignore_combination_with_same,
1964 new BasicSpecies( input_file_properties[ i ][ 1 ] ),
1965 domain_id_to_go_ids_map,
1967 protein_length_stats_by_dc,
1968 domain_number_stats_by_dc ) );
1969 if ( gwcd_list.get( i ).getSize() > 0 ) {
1970 SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
1972 per_genome_domain_promiscuity_statistics_writer,
1976 if ( output_binary_domain_combinationsfor_graph_analysis ) {
1977 SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
1983 SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
1984 SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
1985 all_bin_domain_combinations_encountered );
1988 if ( query_domains_writer_ary != null ) {
1989 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
1991 SurfacingUtil.extractProteinNames( protein_list,
1992 query_domain_ids_array[ j ],
1993 query_domains_writer_ary[ j ],
1995 LIMIT_SPEC_FOR_PROT_EX );
1996 query_domains_writer_ary[ j ].flush();
1998 catch ( final IOException e ) {
1999 e.printStackTrace();
2003 if ( need_protein_lists_per_species ) {
2004 protein_lists_per_species.put( new BasicSpecies( input_file_properties[ i ][ 1 ] ), protein_list );
2009 catch ( final IOException e2 ) {
2010 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
2013 } // for( int i = 0; i < number_of_genomes; ++i ) {
2014 ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
2015 + per_genome_domain_promiscuity_statistics_file );
2017 if ( DA_ANALYSIS ) {
2018 performDomainArchitectureAnalysis( distinct_domain_architecutures_per_genome,
2019 distinct_domain_architecuture_counts,
2021 distinct_domain_architecutures_per_genome.clear();
2022 distinct_domain_architecuture_counts.clear();
2026 domains_per_potein_stats_writer.write( "ALL" );
2027 domains_per_potein_stats_writer.write( "\t" );
2028 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.arithmeticMean() + "" );
2029 domains_per_potein_stats_writer.write( "\t" );
2030 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.sampleStandardDeviation() + "" );
2031 domains_per_potein_stats_writer.write( "\t" );
2032 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.median() + "" );
2033 domains_per_potein_stats_writer.write( "\t" );
2034 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getN() + "" );
2035 domains_per_potein_stats_writer.write( "\t" );
2036 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMin() + "" );
2037 domains_per_potein_stats_writer.write( "\t" );
2038 domains_per_potein_stats_writer.write( all_genomes_domains_per_potein_stats.getMax() + "" );
2039 domains_per_potein_stats_writer.write( "\n" );
2040 domains_per_potein_stats_writer.close();
2041 printOutPercentageOfMultidomainProteins( all_genomes_domains_per_potein_histo, log_writer );
2042 ForesterUtil.map2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
2043 + "__all_genomes_domains_per_potein_histo.txt" ), all_genomes_domains_per_potein_histo, "\t", "\n" );
2044 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
2045 + "__domains_always_single_.txt" ), domains_which_are_always_single, "\n" );
2046 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
2047 + "__domains_single_or_combined.txt" ), domains_which_are_sometimes_single_sometimes_not, "\n" );
2048 ForesterUtil.collection2file( new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file
2049 + "__domains_always_combined.txt" ), domains_which_never_single, "\n" );
2050 ForesterUtil.programMessage( PRG_NAME,
2051 "Average of proteins with a least one domain assigned: "
2052 + ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
2053 + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)" );
2054 ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: "
2055 + ( 100 * protein_coverage_stats.getMin() ) + "%-" + ( 100 * protein_coverage_stats.getMax() )
2057 log( "Average of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.arithmeticMean() )
2058 + "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
2059 log( "Range of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.getMin() ) + "%-"
2060 + ( 100 * protein_coverage_stats.getMax() ) + "%", log_writer );
2062 catch ( final IOException e2 ) {
2063 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
2065 if ( query_domains_writer_ary != null ) {
2066 for( int j = 0; j < query_domain_ids_array.length; j++ ) {
2068 query_domains_writer_ary[ j ].close();
2070 catch ( final IOException e ) {
2071 ForesterUtil.fatalError( surfacing.PRG_NAME, e.toString() );
2076 per_genome_domain_promiscuity_statistics_writer.close();
2077 dc_data_writer.close();
2080 catch ( final IOException e2 ) {
2081 ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
2083 if ( PERFORM_DOMAIN_LENGTH_ANALYSIS ) {
2085 SurfacingUtil.executeDomainLengthAnalysis( input_file_properties,
2087 domain_lengths_table,
2088 domain_lengths_analysis_outfile );
2090 catch ( final IOException e1 ) {
2091 ForesterUtil.fatalError( surfacing.PRG_NAME, e1.toString() );
2093 System.out.println();
2094 ForesterUtil.programMessage( PRG_NAME, "Wrote domain length data to: " + domain_lengths_analysis_outfile );
2095 System.out.println();
2097 final long analysis_start_time = new Date().getTime();
2098 PairwiseDomainSimilarityCalculator pw_calc = null;
2099 // double[] values_for_all_scores_histogram = null;
2100 final DomainSimilarityCalculator calc = new BasicDomainSimilarityCalculator( domain_similarity_sort_field,
2101 sort_by_species_count_first,
2102 number_of_genomes == 2 );
2103 switch ( scoring ) {
2105 pw_calc = new CombinationsBasedPairwiseDomainSimilarityCalculator();
2108 pw_calc = new DomainCountsBasedPairwiseSimilarityCalculator();
2111 pw_calc = new ProteinCountsBasedPairwiseDomainSimilarityCalculator();
2114 ForesterUtil.unexpectedFatalError( surfacing.PRG_NAME, "unknown value for sorting for scoring" );
2116 DomainSimilarityCalculator.GoAnnotationOutput go_annotation_output = DomainSimilarityCalculator.GoAnnotationOutput.NONE;
2117 if ( domain_id_to_go_ids_map != null ) {
2118 go_annotation_output = DomainSimilarityCalculator.GoAnnotationOutput.ALL;
2120 final SortedSet<DomainSimilarity> similarities = calc
2121 .calculateSimilarities( pw_calc,
2123 ignore_domains_without_combs_in_all_spec,
2124 ignore_species_specific_domains );
2125 SurfacingUtil.decoratePrintableDomainSimilarities( similarities,
2127 go_annotation_output,
2129 go_namespace_limit );
2130 DescriptiveStatistics pw_stats = null;
2132 String my_outfile = output_file.toString();
2133 Map<Character, Writer> split_writers = null;
2134 Writer writer = null;
2135 if ( similarities.size() > MINIMAL_NUMBER_OF_SIMILARITIES_FOR_SPLITTING ) {
2136 if ( my_outfile.endsWith( ".html" ) ) {
2137 my_outfile = my_outfile.substring( 0, my_outfile.length() - 5 );
2139 split_writers = new HashMap<Character, Writer>();
2140 createSplitWriters( out_dir, my_outfile, split_writers );
2142 else if ( !my_outfile.endsWith( ".html" ) ) {
2143 my_outfile += ".html";
2144 writer = new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) );
2146 List<Species> species_order = null;
2147 if ( species_matrix ) {
2148 species_order = new ArrayList<Species>();
2149 for( int i = 0; i < number_of_genomes; i++ ) {
2150 species_order.add( new BasicSpecies( input_file_properties[ i ][ 1 ] ) );
2153 html_desc.append( "<tr><td>Sum of all distinct binary combinations:</td><td>"
2154 + all_bin_domain_combinations_encountered.size() + "</td></tr>" + nl );
2155 html_desc.append( "<tr><td>Sum of all distinct domains:</td><td>" + all_domains_encountered.size()
2156 + "</td></tr>" + nl );
2157 html_desc.append( "<tr><td>Analysis date/time:</td><td>"
2158 + new java.text.SimpleDateFormat( "yyyy.MM.dd HH:mm:ss" ).format( new java.util.Date() )
2159 + "</td></tr>" + nl );
2160 html_desc.append( "</table>" + nl );
2161 pw_stats = SurfacingUtil
2162 .writeDomainSimilaritiesToFile( html_desc,
2163 new StringBuilder( number_of_genomes + " genomes" ),
2167 number_of_genomes == 2,
2169 domain_similarity_print_option,
2170 domain_similarity_sort_field,
2173 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote main output (includes domain similarities) to: \""
2174 + ( out_dir == null ? my_outfile : out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile ) + "\"" );
2176 catch ( final IOException e ) {
2177 ForesterUtil.fatalError( surfacing.PRG_NAME, "Failed to write similarites to: \"" + output_file + "\" ["
2178 + e.getMessage() + "]" );
2180 System.out.println();
2181 // values_for_all_scores_histogram = pw_stats.getDataAsDoubleArray();
2182 final Species[] species = new Species[ number_of_genomes ];
2183 for( int i = 0; i < number_of_genomes; ++i ) {
2184 species[ i ] = new BasicSpecies( input_file_properties[ i ][ 1 ] );
2186 List<Phylogeny> inferred_trees = null;
2187 if ( ( number_of_genomes > 2 ) && perform_pwc ) {
2188 final PairwiseGenomeComparator pwgc = new PairwiseGenomeComparator();
2189 pwgc.performPairwiseComparisons( html_desc,
2190 sort_by_species_count_first,
2192 ignore_domains_without_combs_in_all_spec,
2193 ignore_species_specific_domains,
2194 domain_similarity_sort_field_for_automated_pwc,
2195 domain_similarity_print_option,
2197 domain_id_to_go_ids_map,
2204 automated_pairwise_comparison_suffix,
2206 surfacing.PAIRWISE_DOMAIN_COMPARISONS_PREFIX,
2210 String matrix_output_file = new String( output_file.toString() );
2211 if ( matrix_output_file.indexOf( '.' ) > 1 ) {
2212 matrix_output_file = matrix_output_file.substring( 0, matrix_output_file.indexOf( '.' ) );
2214 if ( out_dir != null ) {
2215 matrix_output_file = out_dir + ForesterUtil.FILE_SEPARATOR + matrix_output_file;
2216 output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
2218 SurfacingUtil.writeMatrixToFile( new File( matrix_output_file
2219 + surfacing.MATRIX_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX ), pwgc.getDomainDistanceScoresMeans() );
2221 .writeMatrixToFile( new File( matrix_output_file
2222 + surfacing.MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ),
2223 pwgc.getSharedBinaryCombinationsBasedDistances() );
2224 SurfacingUtil.writeMatrixToFile( new File( matrix_output_file
2225 + surfacing.MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ),
2226 pwgc.getSharedDomainsBasedDistances() );
2227 final Phylogeny nj_gd = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2228 + surfacing.NJ_TREE_MEAN_SCORE_BASED_GENOME_DISTANCE_SUFFIX ), pwgc.getDomainDistanceScoresMeans()
2230 final Phylogeny nj_bc = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2231 + surfacing.NJ_TREE_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ), pwgc
2232 .getSharedBinaryCombinationsBasedDistances().get( 0 ) );
2233 final Phylogeny nj_d = SurfacingUtil.createNjTreeBasedOnMatrixToFile( new File( matrix_output_file
2234 + surfacing.NJ_TREE_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ), pwgc
2235 .getSharedDomainsBasedDistances().get( 0 ) );
2236 inferred_trees = new ArrayList<Phylogeny>();
2237 inferred_trees.add( nj_gd );
2238 inferred_trees.add( nj_bc );
2239 inferred_trees.add( nj_d );
2240 if ( jacknifed_distances ) {
2241 pwgc.performPairwiseComparisonsJacknifed( species,
2245 jacknife_resamplings,
2249 .writeMatrixToFile( new File( matrix_output_file
2251 + ForesterUtil.round( jacknife_ratio, 2 )
2253 + jacknife_resamplings
2254 + surfacing.MATRIX_SHARED_BIN_COMBINATIONS_BASED_GENOME_DISTANCE_SUFFIX ),
2255 pwgc.getSharedBinaryCombinationsBasedDistances() );
2257 .writeMatrixToFile( new File( matrix_output_file + "_" + ForesterUtil.round( jacknife_ratio, 2 )
2258 + "_" + jacknife_resamplings
2259 + surfacing.MATRIX_SHARED_DOMAINS_BASED_GENOME_DISTANCE_SUFFIX ),
2260 pwgc.getSharedDomainsBasedDistances() );
2261 // if ( infer_species_trees ) {
2262 // inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
2263 // + INFERRED_SBC_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc
2264 // .getSharedBinaryCombinationsBasedDistances() );
2265 // inferSpeciesTrees( new File( output_file + "_" + jacknife_resamplings
2266 // + INFERRED_SD_BASED_NJ_SPECIES_TREE_SUFFIX ), pwgc.getSharedDomainsBasedDistances() );
2269 } // if ( ( output_file != null ) && ( number_of_genomes > 2 ) && !isEmpty( automated_pairwise_comparison_suffix ) )
2270 if ( ( out_dir != null ) && ( !perform_pwc ) ) {
2271 output_file = new File( out_dir + ForesterUtil.FILE_SEPARATOR + output_file );
2273 writePresentToNexus( output_file, positive_filter_file, filter, gwcd_list );
2274 if ( ( ( intrees != null ) && ( intrees.length > 0 ) ) && ( number_of_genomes > 2 ) ) {
2275 final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
2277 max_allowed_overlap,
2278 no_engulfing_overlaps,
2282 if ( radomize_fitch_parsimony ) {
2283 s += random_number_seed_for_fitch_parsimony + "_";
2286 for( final Phylogeny intree : intrees ) {
2287 final String outfile_name = ForesterUtil.removeSuffix( output_file.toString() ) + s
2288 + ForesterUtil.removeSuffix( intree_files[ i ].toString() );
2289 final DomainParsimonyCalculator domain_parsimony = DomainParsimonyCalculator.createInstance( intree,
2291 SurfacingUtil.executeParsimonyAnalysis( random_number_seed_for_fitch_parsimony,
2292 radomize_fitch_parsimony,
2296 domain_id_to_go_ids_map,
2299 parameters_sb.toString(),
2300 domain_id_to_secondary_features_maps,
2301 positive_filter_file == null ? null : filter,
2302 output_binary_domain_combinationsfor_graph_analysis,
2303 all_bin_domain_combinations_gained_fitch,
2304 all_bin_domain_combinations_lost_fitch,
2306 protein_length_stats_by_dc,
2307 domain_number_stats_by_dc,
2308 domain_length_stats_by_domain );
2309 // Listing of all domain combinations gained is only done if only one input tree is used.
2310 if ( ( domain_id_to_secondary_features_maps != null )
2311 && ( domain_id_to_secondary_features_maps.length > 0 ) ) {
2313 for( final Map<DomainId, Set<String>> domain_id_to_secondary_features_map : domain_id_to_secondary_features_maps ) {
2314 final Map<Species, MappingResults> mapping_results_map = new TreeMap<Species, MappingResults>();
2315 final DomainParsimonyCalculator secondary_features_parsimony = DomainParsimonyCalculator
2316 .createInstance( intree, gwcd_list, domain_id_to_secondary_features_map );
2318 .executeParsimonyAnalysisForSecondaryFeatures( outfile_name
2320 + secondary_features_map_files[ j++ ],
2321 secondary_features_parsimony,
2323 parameters_sb.toString(),
2324 mapping_results_map );
2326 System.out.println();
2327 System.out.println( "Mapping to secondary features:" );
2328 for( final Species spec : mapping_results_map.keySet() ) {
2329 final MappingResults mapping_results = mapping_results_map.get( spec );
2330 final int total_domains = mapping_results.getSumOfFailures()
2331 + mapping_results.getSumOfSuccesses();
2332 System.out.print( spec + ":" );
2333 System.out.print( " mapped domains = " + mapping_results.getSumOfSuccesses() );
2334 System.out.print( ", not mapped domains = " + mapping_results.getSumOfFailures() );
2335 if ( total_domains > 0 ) {
2336 System.out.println( ", mapped ratio = "
2337 + ( ( 100 * mapping_results.getSumOfSuccesses() ) / total_domains ) + "%" );
2340 System.out.println( ", mapped ratio = n/a (total domains = 0 )" );
2347 } // for( final Phylogeny intree : intrees ) {
2349 if ( plus_minus_analysis_high_copy_base_species.size() > 0 ) {
2350 executePlusMinusAnalysis( output_file,
2351 plus_minus_analysis_high_copy_base_species,
2352 plus_minus_analysis_high_copy_target_species,
2353 plus_minus_analysis_high_low_copy_species,
2355 protein_lists_per_species,
2356 domain_id_to_go_ids_map,
2358 plus_minus_analysis_numbers );
2360 if ( output_protein_lists_for_all_domains ) {
2361 writeProteinListsForAllSpecies( out_dir,
2362 protein_lists_per_species,
2364 output_list_of_all_proteins_per_domain_e_value_max );
2366 if ( all_bin_domain_combinations_gained_fitch != null ) {
2368 executeFitchGainsAnalysis( new File( output_file
2369 + surfacing.OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
2370 all_bin_domain_combinations_gained_fitch,
2371 all_domains_encountered.size(),
2372 all_bin_domain_combinations_encountered,
2375 catch ( final IOException e ) {
2376 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
2379 if ( all_bin_domain_combinations_lost_fitch != null ) {
2381 executeFitchGainsAnalysis( new File( output_file
2382 + surfacing.OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX ),
2383 all_bin_domain_combinations_lost_fitch,
2384 all_domains_encountered.size(),
2385 all_bin_domain_combinations_encountered,
2388 catch ( final IOException e ) {
2389 ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
2392 final Runtime rt = java.lang.Runtime.getRuntime();
2393 final long free_memory = rt.freeMemory() / 1000000;
2394 final long total_memory = rt.totalMemory() / 1000000;
2395 ForesterUtil.programMessage( PRG_NAME, "Time for analysis : " + ( new Date().getTime() - analysis_start_time )
2397 ForesterUtil.programMessage( PRG_NAME, "Total running time: " + ( new Date().getTime() - start_time ) + "ms " );
2398 ForesterUtil.programMessage( PRG_NAME, "Free memory : " + free_memory + "MB, total memory: "
2399 + total_memory + "MB" );
2400 ForesterUtil.programMessage( PRG_NAME, "If this application is useful to you, please cite:" );
2401 ForesterUtil.programMessage( PRG_NAME, surfacing.WWW );
2402 ForesterUtil.programMessage( PRG_NAME, "OK" );
2403 System.out.println();
2406 private static void performDomainArchitectureAnalysis( final SortedMap<String, Set<String>> domain_architecutures,
2407 final SortedMap<String, Integer> domain_architecuture_counts,
2408 final int min_count ) {
2409 final StringBuilder unique_das = new StringBuilder();
2410 final Iterator<Entry<String, Integer>> it = domain_architecuture_counts.entrySet().iterator();
2411 System.out.println( "Domain Architecture Counts (min count: " + min_count + " ):" );
2412 while ( it.hasNext() ) {
2413 final Map.Entry<String, Integer> e = it.next();
2414 final String da = e.getKey();
2415 final int count = e.getValue();
2416 if ( count >= min_count ) {
2417 System.out.println( da + "\t" + count );
2420 final Iterator<Entry<String, Set<String>>> it2 = domain_architecutures.entrySet().iterator();
2421 while ( it2.hasNext() ) {
2422 final Map.Entry<String, Set<String>> e2 = it2.next();
2423 final String genome = e2.getKey();
2424 final Set<String> das = e2.getValue();
2425 if ( das.contains( da ) ) {
2426 unique_das.append( genome + "\t" + da + ForesterUtil.LINE_SEPARATOR );
2431 System.out.println();
2432 System.out.println();
2433 System.out.println( "Unique Domain Architectures:" );
2434 System.out.println( unique_das );
2435 System.out.println();
2436 System.out.println();
2439 private static int storeDomainArchitectures( final String genome,
2440 final SortedMap<String, Set<String>> domain_architecutures,
2441 final List<Protein> protein_list,
2442 final Map<String, Integer> distinct_domain_architecuture_counts ) {
2443 final Set<String> da = new HashSet<String>();
2444 domain_architecutures.put( genome, da );
2445 for( final Protein protein : protein_list ) {
2446 final String da_str = ( ( BasicProtein ) protein ).toDomainArchitectureString( "~" );
2447 if ( !da.contains( da_str ) ) {
2448 if ( !distinct_domain_architecuture_counts.containsKey( da_str ) ) {
2449 distinct_domain_architecuture_counts.put( da_str, 1 );
2452 distinct_domain_architecuture_counts.put( da_str,
2453 distinct_domain_architecuture_counts.get( da_str ) + 1 );
2461 private static void createSplitWriters( final File out_dir,
2462 final String my_outfile,
2463 final Map<Character, Writer> split_writers ) throws IOException {
2464 split_writers.put( 'a', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2465 + "_domains_A.html" ) ) );
2466 split_writers.put( 'b', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2467 + "_domains_B.html" ) ) );
2468 split_writers.put( 'c', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2469 + "_domains_C.html" ) ) );
2470 split_writers.put( 'd', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2471 + "_domains_D.html" ) ) );
2472 split_writers.put( 'e', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2473 + "_domains_E.html" ) ) );
2474 split_writers.put( 'f', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2475 + "_domains_F.html" ) ) );
2476 split_writers.put( 'g', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2477 + "_domains_G.html" ) ) );
2478 split_writers.put( 'h', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2479 + "_domains_H.html" ) ) );
2480 split_writers.put( 'i', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2481 + "_domains_I.html" ) ) );
2482 split_writers.put( 'j', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2483 + "_domains_J.html" ) ) );
2484 split_writers.put( 'k', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2485 + "_domains_K.html" ) ) );
2486 split_writers.put( 'l', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2487 + "_domains_L.html" ) ) );
2488 split_writers.put( 'm', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2489 + "_domains_M.html" ) ) );
2490 split_writers.put( 'n', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2491 + "_domains_N.html" ) ) );
2492 split_writers.put( 'o', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2493 + "_domains_O.html" ) ) );
2494 split_writers.put( 'p', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2495 + "_domains_P.html" ) ) );
2496 split_writers.put( 'q', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2497 + "_domains_Q.html" ) ) );
2498 split_writers.put( 'r', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2499 + "_domains_R.html" ) ) );
2500 split_writers.put( 's', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2501 + "_domains_S.html" ) ) );
2502 split_writers.put( 't', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2503 + "_domains_T.html" ) ) );
2504 split_writers.put( 'u', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2505 + "_domains_U.html" ) ) );
2506 split_writers.put( 'v', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2507 + "_domains_V.html" ) ) );
2508 split_writers.put( 'w', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2509 + "_domains_W.html" ) ) );
2510 split_writers.put( 'x', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2511 + "_domains_X.html" ) ) );
2512 split_writers.put( 'y', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2513 + "_domains_Y.html" ) ) );
2514 split_writers.put( 'z', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2515 + "_domains_Z.html" ) ) );
2516 split_writers.put( '0', new BufferedWriter( new FileWriter( out_dir + ForesterUtil.FILE_SEPARATOR + my_outfile
2517 + "_domains_0.html" ) ) );
2520 private static void printOutPercentageOfMultidomainProteins( final SortedMap<Integer, Integer> all_genomes_domains_per_potein_histo,
2521 final Writer log_writer ) {
2523 for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
2524 sum += entry.getValue();
2526 final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
2527 ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
2528 log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
2531 private static void preparePhylogenyForParsimonyAnalyses( final Phylogeny intree,
2532 final String[][] input_file_properties ) {
2533 final String[] genomes = new String[ input_file_properties.length ];
2534 for( int i = 0; i < input_file_properties.length; ++i ) {
2535 if ( intree.getNodes( input_file_properties[ i ][ 1 ] ).size() > 1 ) {
2536 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
2537 + "] is not unique in input tree " + intree.getName() );
2539 genomes[ i ] = input_file_properties[ i ][ 1 ];
2542 final PhylogenyNodeIterator it = intree.iteratorPostorder();
2543 while ( it.hasNext() ) {
2544 final PhylogenyNode n = it.next();
2545 if ( ForesterUtil.isEmpty( n.getName() ) ) {
2546 if ( n.getNodeData().isHasTaxonomy()
2547 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getTaxonomyCode() ) ) {
2548 n.setName( n.getNodeData().getTaxonomy().getTaxonomyCode() );
2550 else if ( n.getNodeData().isHasTaxonomy()
2551 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getScientificName() ) ) {
2552 n.setName( n.getNodeData().getTaxonomy().getScientificName() );
2554 else if ( n.getNodeData().isHasTaxonomy()
2555 && !ForesterUtil.isEmpty( n.getNodeData().getTaxonomy().getCommonName() ) ) {
2556 n.setName( n.getNodeData().getTaxonomy().getCommonName() );
2560 .fatalError( surfacing.PRG_NAME,
2561 "node with no name, scientific name, common name, or taxonomy code present" );
2566 final List<String> igns = PhylogenyMethods.deleteExternalNodesPositiveSelection( genomes, intree );
2567 if ( igns.size() > 0 ) {
2568 System.out.println( "Not using the following " + igns.size() + " nodes:" );
2569 for( int i = 0; i < igns.size(); ++i ) {
2570 System.out.println( " " + i + ": " + igns.get( i ) );
2572 System.out.println( "--" );
2574 for( final String[] input_file_propertie : input_file_properties ) {
2576 intree.getNode( input_file_propertie[ 1 ] );
2578 catch ( final IllegalArgumentException e ) {
2579 ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
2580 + "] not present/not unique in input tree" );
2585 private static void printHelp() {
2586 System.out.println();
2587 System.out.println( "Usage:" );
2588 System.out.println();
2589 System.out.println( "% java -Xms256m -Xmx512m -cp forester.jar org.forester.applications." + surfacing.PRG_NAME
2590 + " [options] <phylogen(y|ies) infile> [external node name 1] [name 2] ... [name n]" );
2591 System.out.println();
2592 System.out.println( " Note: This software might need a significant amount of memory (heap space);" );
2594 .println( " hence use \"-Xms128m -Xmx512m\" (or more) to prevent a \"java.lang.OutOfMemoryError\"." );
2595 System.out.println();
2596 System.out.println( " Options: " );
2597 System.out.println( surfacing.DETAILEDNESS_OPTION + ": level of detail for similarities output file (default:"
2598 + DETAILEDNESS_DEFAULT + ")" );
2599 System.out.println( surfacing.IGNORE_COMBINATION_WITH_SAME_OPTION
2600 + ": to ignore combinations with self (default: not to ignore)" );
2602 .println( surfacing.IGNORE_DOMAINS_WITHOUT_COMBINATIONS_IN_ALL_SPECIES_OPTION
2603 + ": to ignore domains without combinations in any species (for similarity calc purposes, not for parsimony analyses) (default: not to ignore)" );
2605 .println( surfacing.IGNORE_DOMAINS_SPECIFIC_TO_ONE_SPECIES_OPTION
2606 + ": to ignore domains specific to one species (for similarity calc purposes, not for parsimony analyses) (default: not to ignore)" );
2607 System.out.println( surfacing.NOT_IGNORE_DUFS_OPTION
2608 + ": to _not_ ignore DUFs (domains with unknown function) (default: ignore DUFs)" );
2610 .println( surfacing.IGNORE_VIRAL_IDS
2611 + ": to ignore domains with ids containing 'vir', 'retro', 'transpos', 'phage', or starting with 'rv' or 'gag_'" );
2612 System.out.println( surfacing.DOMAIN_SIMILARITY_SORT_OPTION + ": sorting for similarities (default: "
2613 + DOMAIN_SORT_FILD_DEFAULT + ")" );
2614 System.out.println( surfacing.OUTPUT_FILE_OPTION + ": name for (main) output file (mandatory)" );
2615 System.out.println( surfacing.MAX_E_VALUE_OPTION + ": max (inclusive) E-value" );
2616 System.out.println( surfacing.MAX_ALLOWED_OVERLAP_OPTION + ": maximal allowed domain overlap" );
2617 System.out.println( surfacing.NO_ENGULFING_OVERLAP_OPTION + ": to ignore engulfed lower confidence domains" );
2618 System.out.println( surfacing.SPECIES_MATRIX_OPTION + ": species matrix" );
2619 System.out.println( surfacing.SCORING_OPTION + ": scoring (default:" + SCORING_DEFAULT + ")" );
2620 System.out.println( surfacing.DOMAIN_COUNT_SORT_OPTION + ": sorting for domain counts (default:"
2621 + DOMAINS_SORT_ORDER_DEFAULT + ")" );
2622 System.out.println( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION + ": domain similarity print option (default:"
2623 + DOMAIN_SIMILARITY_PRINT_OPTION_DEFAULT + ")" );
2624 System.out.println( surfacing.CUTOFF_SCORE_FILE_OPTION + ": cutoff score file" );
2625 System.out.println( surfacing.DOMAIN_SIMILARITY_SORT_BY_SPECIES_COUNT_FIRST_OPTION
2626 + ": sort by species count first" );
2627 System.out.println( surfacing.OUTPUT_DIR_OPTION + ": output directory" );
2628 System.out.println( surfacing.PFAM_TO_GO_FILE_USE_OPTION + ": Pfam to GO mapping file" );
2629 System.out.println( surfacing.GO_OBO_FILE_USE_OPTION + ": GO terms file (OBO format)" );
2630 System.out.println( surfacing.GO_NAMESPACE_LIMIT_OPTION + ": limit GO term to one GO namespace" );
2631 System.out.println( surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
2632 + "[=<suffix for pairwise comparison output files>]: to perform pairwise comparison based analyses" );
2633 System.out.println( surfacing.INPUT_SPECIES_TREE_OPTION
2634 + ": species tree, to perform (Dollo, Fitch) parismony analyses" );
2636 .println( JACKNIFE_OPTION
2637 + ": perform jacknife resampling for domain and binary domain combination based distance matrices [default resamplings: "
2638 + JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT + "]" );
2639 System.out.println( JACKNIFE_RATIO_OPTION + ": ratio for jacknife resampling [default: "
2640 + JACKNIFE_RATIO_DEFAULT + "]" );
2641 System.out.println( JACKNIFE_RANDOM_SEED_OPTION
2642 + ": seed for random number generator for jacknife resampling [default: "
2643 + JACKNIFE_RANDOM_SEED_DEFAULT + "]" );
2644 // System.out.println( surfacing.INFER_SPECIES_TREES_OPTION
2645 // + ": to infer NJ species trees based on shared domains/binary domain combinations" );
2647 .println( surfacing.INPUT_SPECIES_TREE_OPTION
2648 + "=<treefiles in phyloXML format, separated by #>: to infer domain/binary domain combination gains/losses on given species trees" );
2649 System.out.println( surfacing.FILTER_POSITIVE_OPTION
2650 + "=<file>: to filter out proteins not containing at least one domain listed in <file>" );
2651 System.out.println( surfacing.FILTER_NEGATIVE_OPTION
2652 + "=<file>: to filter out proteins containing at least one domain listed in <file>" );
2653 System.out.println( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION
2654 + "=<file>: to filter out (ignore) domains listed in <file>" );
2655 System.out.println( surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>: to read input files from <file>" );
2657 .println( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION
2658 + "=<seed>: seed for random number generator for Fitch Parsimony analysis (type: long, default: no randomization - given a choice, prefer absence" );
2659 System.out.println( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS
2660 + ": to consider directedness in binary combinations: e.g. A-B != B-A" );
2661 System.out.println( surfacing.CONSIDER_DOMAIN_COMBINATION_DIRECTEDNESS_AND_ADJACENCY
2662 + ": to consider directedness and adjacency in binary combinations" );
2664 .println( surfacing.SEQ_EXTRACT_OPTION
2665 + "=<domain ids (Pfam names)>: to extract sequence names of sequences containing matching domains and/or domain-sequences (order N to C) (domain separator: '~', domain sequences speparator: '#', e.g. 'NACHT#BIR~CARD')" );
2666 System.out.println( surfacing.SECONDARY_FEATURES_PARSIMONY_MAP_FILE
2667 + "=<file>: to perfom parsimony analysis on secondary features" );
2668 System.out.println( surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>: to presence/absence genome analysis" );
2669 System.out.println( surfacing.DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS
2670 + ": to output binary domain combinations for (downstream) graph analysis" );
2671 System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS + ": to output all proteins per domain" );
2672 System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION
2673 + ": e value max per domain for output of all proteins per domain" );
2674 System.out.println();
2675 System.out.println( "Example 1: java -Xms128m -Xmx512m -cp path/to/forester.jar"
2676 + " org.forester.application.surfacing p2g=pfam2go_2012_02_07.txt -dufs -cos=Pfam_260_NC1"
2677 + " -no_eo -mo=0 -input=genomes_limited.txt -out_dir=out -o=o "
2678 + " -species_tree=tol.xml -obo=gene_ontology_2012_02_07.obo -pos_filter=f.txt -all_prot" );
2679 System.out.println();
2680 System.out.println( "Example 2: java -Xms128m -Xmx512m -cp path/to/forester.jar"
2681 + " org.forester.application.surfacing -detail=punctilious -o=TEST.html -pwc=TEST"
2682 + " -cos=Pfam_ls_22_TC2 -p2g=pfam2go -obo=gene_ontology_edit.obo "
2683 + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo "
2684 + "-ds_output=detailed_html -scoring=domains -sort=alpha human mouse brafl strpu" );
2685 System.out.println();
2688 private static void processFilter( final File filter_file, final SortedSet<DomainId> filter ) {
2689 SortedSet<String> filter_str = null;
2691 filter_str = ForesterUtil.file2set( filter_file );
2693 catch ( final IOException e ) {
2694 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2696 if ( filter_str != null ) {
2697 for( final String string : filter_str ) {
2698 filter.add( new DomainId( string ) );
2702 System.out.println( "Filter:" );
2703 for( final DomainId domainId : filter ) {
2704 System.out.println( domainId.getId() );
2709 private static String[][] processInputFileNames( final String[] names ) {
2710 final String[][] input_file_properties = new String[ names.length ][];
2711 for( int i = 0; i < names.length; ++i ) {
2712 if ( names[ i ].indexOf( SEPARATOR_FOR_INPUT_VALUES ) < 0 ) {
2713 input_file_properties[ i ] = new String[ 2 ];
2714 input_file_properties[ i ][ 0 ] = names[ i ];
2715 input_file_properties[ i ][ 1 ] = names[ i ];
2718 input_file_properties[ i ] = names[ i ].split( surfacing.SEPARATOR_FOR_INPUT_VALUES + "" );
2719 if ( input_file_properties[ i ].length != 3 ) {
2721 .fatalError( surfacing.PRG_NAME,
2722 "properties for the input files (hmmpfam output) are expected "
2723 + "to be in the following format \"<hmmpfam output file>#<species>\" (or just one word, which is both the filename and the species id), instead received \""
2724 + names[ i ] + "\"" );
2727 final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
2728 if ( !ForesterUtil.isEmpty( error ) ) {
2729 ForesterUtil.fatalError( surfacing.PRG_NAME, error );
2732 return input_file_properties;
2735 private static void processPlusMinusAnalysisOption( final CommandLineArguments cla,
2736 final List<String> high_copy_base,
2737 final List<String> high_copy_target,
2738 final List<String> low_copy,
2739 final List<Object> numbers ) {
2740 if ( cla.isOptionSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
2741 if ( !cla.isOptionValueSet( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) ) {
2742 ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for 'plus-minus' file: -"
2743 + surfacing.PLUS_MINUS_ANALYSIS_OPTION + "=<file>" );
2745 final File plus_minus_file = new File( cla.getOptionValue( surfacing.PLUS_MINUS_ANALYSIS_OPTION ) );
2746 final String msg = ForesterUtil.isReadableFile( plus_minus_file );
2747 if ( !ForesterUtil.isEmpty( msg ) ) {
2748 ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + plus_minus_file + "\": " + msg );
2750 processPlusMinusFile( plus_minus_file, high_copy_base, high_copy_target, low_copy, numbers );
2754 // First numbers is minimal difference, second is factor.
2755 private static void processPlusMinusFile( final File plus_minus_file,
2756 final List<String> high_copy_base,
2757 final List<String> high_copy_target,
2758 final List<String> low_copy,
2759 final List<Object> numbers ) {
2760 Set<String> species_set = null;
2761 int min_diff = PLUS_MINUS_ANALYSIS_MIN_DIFF_DEFAULT;
2762 double factor = PLUS_MINUS_ANALYSIS_FACTOR_DEFAULT;
2764 species_set = ForesterUtil.file2set( plus_minus_file );
2766 catch ( final IOException e ) {
2767 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getMessage() );
2769 if ( species_set != null ) {
2770 for( final String species : species_set ) {
2771 final String species_trimmed = species.substring( 1 );
2772 if ( species.startsWith( "+" ) ) {
2773 if ( low_copy.contains( species_trimmed ) ) {
2774 ForesterUtil.fatalError( surfacing.PRG_NAME,
2775 "species/genome names can not appear with both '+' and '-' suffix, as appears the case for: \""
2776 + species_trimmed + "\"" );
2778 high_copy_base.add( species_trimmed );
2780 else if ( species.startsWith( "*" ) ) {
2781 if ( low_copy.contains( species_trimmed ) ) {
2782 ForesterUtil.fatalError( surfacing.PRG_NAME,
2783 "species/genome names can not appear with both '*' and '-' suffix, as appears the case for: \""
2784 + species_trimmed + "\"" );
2786 high_copy_target.add( species_trimmed );
2788 else if ( species.startsWith( "-" ) ) {
2789 if ( high_copy_base.contains( species_trimmed ) || high_copy_target.contains( species_trimmed ) ) {
2790 ForesterUtil.fatalError( surfacing.PRG_NAME,
2791 "species/genome names can not appear with both '+' or '*' and '-' suffix, as appears the case for: \""
2792 + species_trimmed + "\"" );
2794 low_copy.add( species_trimmed );
2796 else if ( species.startsWith( "$D" ) ) {
2798 min_diff = Integer.parseInt( species.substring( 3 ) );
2800 catch ( final NumberFormatException e ) {
2801 ForesterUtil.fatalError( surfacing.PRG_NAME,
2802 "could not parse integer value for minimal difference from: \""
2803 + species.substring( 3 ) + "\"" );
2806 else if ( species.startsWith( "$F" ) ) {
2808 factor = Double.parseDouble( species.substring( 3 ) );
2810 catch ( final NumberFormatException e ) {
2811 ForesterUtil.fatalError( surfacing.PRG_NAME, "could not parse double value for factor from: \""
2812 + species.substring( 3 ) + "\"" );
2815 else if ( species.startsWith( "#" ) ) {
2820 .fatalError( surfacing.PRG_NAME,
2821 "species/genome names in 'plus minus' file must begin with '*' (high copy target genome), '+' (high copy base genomes), '-' (low copy genomes), '$D=<integer>' minimal Difference (default is 1), '$F=<double>' factor (default is 1.0), double), or '#' (ignore) suffix, encountered: \""
2824 numbers.add( new Integer( min_diff + "" ) );
2825 numbers.add( new Double( factor + "" ) );
2829 ForesterUtil.fatalError( surfacing.PRG_NAME, "'plus minus' file [" + plus_minus_file + "] appears empty" );
2833 private static void writePresentToNexus( final File output_file,
2834 final File positive_filter_file,
2835 final SortedSet<DomainId> filter,
2836 final List<GenomeWideCombinableDomains> gwcd_list ) {
2839 .writeMatrixToFile( DomainParsimonyCalculator
2840 .createMatrixOfDomainPresenceOrAbsence( gwcd_list, positive_filter_file == null ? null
2841 : filter ), output_file + DOMAINS_PRESENT_NEXUS, Format.NEXUS_BINARY );
2842 SurfacingUtil.writeMatrixToFile( DomainParsimonyCalculator
2843 .createMatrixOfBinaryDomainCombinationPresenceOrAbsence( gwcd_list ), output_file
2844 + BDC_PRESENT_NEXUS, Format.NEXUS_BINARY );
2846 catch ( final Exception e ) {
2847 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2851 private static void writeProteinListsForAllSpecies( final File output_dir,
2852 final SortedMap<Species, List<Protein>> protein_lists_per_species,
2853 final List<GenomeWideCombinableDomains> gwcd_list,
2854 final double domain_e_cutoff ) {
2855 final SortedSet<DomainId> all_domains = new TreeSet<DomainId>();
2856 for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
2857 all_domains.addAll( gwcd.getAllDomainIds() );
2859 for( final DomainId domain : all_domains ) {
2860 final File out = new File( output_dir + ForesterUtil.FILE_SEPARATOR + domain + SEQ_EXTRACT_SUFFIX );
2861 SurfacingUtil.checkForOutputFileWriteability( out );
2863 final Writer proteins_file_writer = new BufferedWriter( new FileWriter( out ) );
2864 SurfacingUtil.extractProteinNames( protein_lists_per_species,
2866 proteins_file_writer,
2868 LIMIT_SPEC_FOR_PROT_EX,
2870 proteins_file_writer.close();
2872 catch ( final IOException e ) {
2873 ForesterUtil.fatalError( surfacing.PRG_NAME, e.getLocalizedMessage() );
2875 ForesterUtil.programMessage( surfacing.PRG_NAME, "Wrote proteins list to \"" + out + "\"" );