// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
//
// Contact: phylosoft @ gmail . com
-// WWW: www.phylosoft.org/forester
+// WWW: https://sites.google.com/site/cmzmasek/home/software/forester
package org.forester.application;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.forester.evoinference.distance.NeighborJoining;
import org.forester.evoinference.matrix.character.CharacterStateMatrix.Format;
-import org.forester.evoinference.matrix.distance.DistanceMatrix;
import org.forester.go.GoId;
import org.forester.go.GoNameSpace;
import org.forester.go.GoTerm;
import org.forester.io.parsers.HmmscanPerDomainTableParser;
import org.forester.io.parsers.HmmscanPerDomainTableParser.INDIVIDUAL_SCORE_CUTOFF;
import org.forester.io.parsers.util.ParserUtils;
-import org.forester.io.writers.PhylogenyWriter;
import org.forester.phylogeny.Phylogeny;
import org.forester.phylogeny.PhylogenyMethods;
import org.forester.phylogeny.PhylogenyNode;
private static final int JACKNIFE_NUMBER_OF_RESAMPLINGS_DEFAULT = 100;
final static private long JACKNIFE_RANDOM_SEED_DEFAULT = 19;
final static private double JACKNIFE_RATIO_DEFAULT = 0.5;
- //final static private String INFER_SPECIES_TREES_OPTION = "species_tree_inference";
- final static private String INFERRED_SD_BASED_NJ_SPECIES_TREE_SUFFIX = "_sd_nj.nh";
- final static private String INFERRED_SBC_BASED_NJ_SPECIES_TREE_SUFFIX = "_sbc_nj.nh";
final static private String FILTER_POSITIVE_OPTION = "pos_filter";
final static private String FILTER_NEGATIVE_OPTION = "neg_filter";
final static private String FILTER_NEGATIVE_DOMAINS_OPTION = "neg_dom_filter";
- final static private String INPUT_FILES_FROM_FILE_OPTION = "input";
+ final static private String INPUT_GENOMES_FILE_OPTION = "genomes";
final static private String INPUT_SPECIES_TREE_OPTION = "species_tree";
final static private String SEQ_EXTRACT_OPTION = "prot_extract";
- final static private char SEPARATOR_FOR_INPUT_VALUES = '#';
- final static private String PRG_VERSION = "2.250";
- final static private String PRG_DATE = "2012.05.07";
+ final static private String PRG_VERSION = "2.260";
+ final static private String PRG_DATE = "130721";
final static private String E_MAIL = "czmasek@burnham.org";
final static private String WWW = "www.phylosoft.org/forester/applications/surfacing";
final static private boolean IGNORE_DUFS_DEFAULT = true;
private static final String PLUS_MINUS_ALL_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_all.txt";
private static final String PLUS_MINUS_PASSING_GO_IDS_DOM_SUFFIX = "_plus_minus_go_ids_passing.txt";
private static final String OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS = "all_prot";
+ final static private String OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION = "all_prot_e";
private static final boolean VERBOSE = false;
private static final String OUTPUT_DOMAIN_COMBINATIONS_GAINED_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_gains_counts";
private static final String OUTPUT_DOMAIN_COMBINATIONS_LOST_MORE_THAN_ONCE_ANALYSIS_SUFFIX = "_fitch_dc_losses_counts";
private static final String LOG_FILE_SUFFIX = "_log.txt";
private static final String DATA_FILE_SUFFIX = "_domain_combination_data.txt";
private static final String DATA_FILE_DESC = "#SPECIES\tPRTEIN_ID\tN_TERM_DOMAIN\tC_TERM_DOMAIN\tN_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tC_TERM_DOMAIN_PER_DOMAIN_E_VALUE\tN_TERM_DOMAIN_COUNTS_PER_PROTEIN\tC_TERM_DOMAIN_COUNTS_PER_PROTEIN";
- private static final INDIVIDUAL_SCORE_CUTOFF INDIVIDUAL_SCORE_CUTOFF_DEFAULT = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE;
+ private static final INDIVIDUAL_SCORE_CUTOFF INDIVIDUAL_SCORE_CUTOFF_DEFAULT = INDIVIDUAL_SCORE_CUTOFF.FULL_SEQUENCE; //TODO look at me! change?
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_COUNTS_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_counts.txt";
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists.txt";
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping.txt";
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_MAPPED.txt";
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_MAPPED.txt";
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique_MAPPED.txt";
+ private static final boolean PERFORM_DC_REGAIN_PROTEINS_STATS = true;
+ private static final boolean DA_ANALYSIS = false;
private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
final String[][] input_file_properties,
return intrees;
}
- private static List<Phylogeny> inferSpeciesTrees( final File outfile, final List<DistanceMatrix> distances_list ) {
- final NeighborJoining nj = NeighborJoining.createInstance();
- final List<Phylogeny> phylogenies = nj.execute( distances_list );
- final PhylogenyWriter w = new PhylogenyWriter();
- try {
- w.toNewHampshire( phylogenies, true, true, outfile, ";" );
- }
- catch ( final IOException e ) {
- ForesterUtil.fatalError( PRG_NAME, "failed to write to outfile [" + outfile + "]: " + e.getMessage() );
- }
- return phylogenies;
- }
-
private static void log( final String msg, final Writer w ) {
try {
w.write( msg );
allowed_options.add( JACKNIFE_RANDOM_SEED_OPTION );
allowed_options.add( JACKNIFE_RATIO_OPTION );
allowed_options.add( INPUT_SPECIES_TREE_OPTION );
- //allowed_options.add( INFER_SPECIES_TREES_OPTION );
allowed_options.add( FILTER_POSITIVE_OPTION );
allowed_options.add( FILTER_NEGATIVE_OPTION );
- allowed_options.add( INPUT_FILES_FROM_FILE_OPTION );
+ allowed_options.add( INPUT_GENOMES_FILE_OPTION );
allowed_options.add( RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION );
allowed_options.add( FILTER_NEGATIVE_DOMAINS_OPTION );
allowed_options.add( IGNORE_VIRAL_IDS );
allowed_options.add( SEQ_EXTRACT_OPTION );
+ allowed_options.add( OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
allowed_options.add( SECONDARY_FEATURES_PARSIMONY_MAP_FILE );
allowed_options.add( PLUS_MINUS_ANALYSIS_OPTION );
allowed_options.add( DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS );
+ error );
}
try {
- final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, " " );
+ final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, ' ' );
individual_score_cutoffs = scores_table.getColumnsAsMapDouble( 0, 1 );
}
catch ( final IOException e ) {
plus_minus_analysis_high_copy_target_species,
plus_minus_analysis_high_low_copy_species,
plus_minus_analysis_numbers );
- File input_files_file = null;
- String[] input_file_names_from_file = null;
- if ( cla.isOptionSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
- if ( !cla.isOptionValueSet( surfacing.INPUT_FILES_FROM_FILE_OPTION ) ) {
- ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input files file: -"
- + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
- }
- input_files_file = new File( cla.getOptionValue( surfacing.INPUT_FILES_FROM_FILE_OPTION ) );
- final String msg = ForesterUtil.isReadableFile( input_files_file );
+ File input_genomes_file = null;
+ if ( cla.isOptionSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
+ if ( !cla.isOptionValueSet( surfacing.INPUT_GENOMES_FILE_OPTION ) ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, "no value for input genomes file: -"
+ + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
+ }
+ input_genomes_file = new File( cla.getOptionValue( surfacing.INPUT_GENOMES_FILE_OPTION ) );
+ final String msg = ForesterUtil.isReadableFile( input_genomes_file );
if ( !ForesterUtil.isEmpty( msg ) ) {
- ForesterUtil.fatalError( surfacing.PRG_NAME, "can not read from \"" + input_files_file + "\": " + msg );
- }
- try {
- input_file_names_from_file = ForesterUtil.file2array( input_files_file );
- }
- catch ( final IOException e ) {
- ForesterUtil.fatalError( surfacing.PRG_NAME, "failed to read from \"" + input_files_file + "\": " + e );
+ ForesterUtil
+ .fatalError( surfacing.PRG_NAME, "can not read from \"" + input_genomes_file + "\": " + msg );
}
}
- if ( ( cla.getNumberOfNames() < 1 )
- && ( ( input_file_names_from_file == null ) || ( input_file_names_from_file.length < 1 ) ) ) {
- ForesterUtil.fatalError( surfacing.PRG_NAME,
- "No hmmpfam output file indicated is input: use comand line directly or "
- + surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>" );
+ else {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, "no input genomes file given: "
+ + surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>" );
}
DomainSimilarity.DomainSimilarityScoring scoring = SCORING_DEFAULT;
if ( cla.isOptionSet( surfacing.SCORING_OPTION ) ) {
species_matrix = true;
}
boolean output_protein_lists_for_all_domains = false;
+ double output_list_of_all_proteins_per_domain_e_value_max = -1;
if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS ) ) {
output_protein_lists_for_all_domains = true;
+ if ( cla.isOptionSet( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION ) ) {
+ try {
+ output_list_of_all_proteins_per_domain_e_value_max = cla
+ .getOptionValueAsDouble( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION );
+ }
+ catch ( final Exception e ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME, "no acceptable value for per domain E-value maximum" );
+ }
+ }
}
Detailedness detailedness = DETAILEDNESS_DEFAULT;
if ( cla.isOptionSet( surfacing.DETAILEDNESS_OPTION ) ) {
domain_similarity_print_option = PrintableDomainSimilarity.PRINT_OPTION.HTML;
}
else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_HTML ) ) {
- // domain_similarity_print_option =
- // DomainSimilarity.PRINT_OPTION.SIMPLE_HTML;
ForesterUtil.fatalError( surfacing.PRG_NAME, "simple HTML output not implemented yet :(" );
}
else if ( sort.equals( surfacing.DOMAIN_SIMILARITY_PRINT_OPTION_SIMPLE_TAB_DELIMITED ) ) {
+ surfacing.DOMAIN_COUNT_SORT_COMBINATIONS_COUNT + ">\"" );
}
}
- String[][] input_file_properties = null;
- if ( input_file_names_from_file != null ) {
- input_file_properties = surfacing.processInputFileNames( input_file_names_from_file );
- }
- else {
- input_file_properties = surfacing.processInputFileNames( cla.getNames() );
+ final String[][] input_file_properties = processInputGenomesFile( input_genomes_file );
+ for( final String[] input_file_propertie : input_file_properties ) {
+ for( int j = 0; j < input_file_propertie.length; j++ ) {
+ System.out.print( input_file_propertie[ j ] + " " );
+ }
+ System.out.println();
}
final int number_of_genomes = input_file_properties.length;
if ( number_of_genomes < 2 ) {
automated_pairwise_comparison_suffix,
out_dir );
for( int i = 0; i < number_of_genomes; i++ ) {
- File dcc_outfile = new File( input_file_properties[ i ][ 0 ]
+ File dcc_outfile = new File( input_file_properties[ i ][ 1 ]
+ surfacing.DOMAIN_COMBINITON_COUNTS_OUTPUTFILE_SUFFIX );
if ( out_dir != null ) {
dcc_outfile = new File( out_dir + ForesterUtil.FILE_SEPARATOR + dcc_outfile );
}
}
}
- // boolean infer_species_trees = false;
- // if ( cla.isOptionSet( surfacing.INFER_SPECIES_TREES_OPTION ) ) {
- // if ( ( output_file == null ) || ( number_of_genomes < 3 )
- // || ForesterUtil.isEmpty( automated_pairwise_comparison_suffix ) ) {
- // ForesterUtil.fatalError( surfacing.PRG_NAME, "cannot infer species trees (-"
- // + surfacing.INFER_SPECIES_TREES_OPTION + " without pairwise analyses ("
- // + surfacing.PAIRWISE_DOMAIN_COMPARISONS_OPTION
- // + "=<suffix for pairwise comparison output files>)" );
- // }
- // infer_species_trees = true;
- // }
File[] intree_files = null;
Phylogeny[] intrees = null;
if ( cla.isOptionSet( surfacing.INPUT_SPECIES_TREE_OPTION ) ) {
+ surfacing.GO_OBO_FILE_USE_OPTION + "=<file>)" );
}
System.out.println( "Output directory : " + out_dir );
- if ( input_file_names_from_file != null ) {
- System.out.println( "Input files names from : " + input_files_file + " ["
- + input_file_names_from_file.length + " input files]" );
- html_desc.append( "<tr><td>Input files names from:</td><td>" + input_files_file + " ["
- + input_file_names_from_file.length + " input files]</td></tr>" + nl );
- }
+ System.out.println( "Input genomes from : " + input_genomes_file );
+ html_desc.append( "<tr><td>Input genomes from:</td><td>" + input_genomes_file + "</td></tr>" + nl );
if ( positive_filter_file != null ) {
final int filter_size = filter.size();
System.out.println( "Positive protein filter : " + positive_filter_file + " [" + filter_size
System.out.println( "E-value maximum (inclusive) : " + e_value_max );
html_desc.append( "<tr><td>E-value maximum (inclusive):</td><td>" + e_value_max + "</td></tr>" + nl );
}
+ if ( output_protein_lists_for_all_domains ) {
+ System.out.println( "Domain E-value max : " + output_list_of_all_proteins_per_domain_e_value_max );
+ html_desc.append( "<tr><td>Protein lists: E-value maximum per domain (inclusive):</td><td>"
+ + output_list_of_all_proteins_per_domain_e_value_max + "</td></tr>" + nl );
+ }
System.out.println( "Ignore DUFs : " + ignore_dufs );
if ( ignore_virus_like_ids ) {
System.out.println( "Ignore virus like ids : " + ignore_virus_like_ids );
System.out.println( " Ratio : " + ForesterUtil.round( jacknife_ratio, 2 ) );
System.out.println( " Random number seed : " + random_seed );
}
- // if ( infer_species_trees ) {
- // html_desc.append( "<tr><td>Infer species trees:</td><td>true</td></tr>" + nl );
- // System.out.println( " Infer species trees : true" );
- // }
if ( ( intrees != null ) && ( intrees.length > 0 ) ) {
for( final File intree_file : intree_files ) {
html_desc.append( "<tr><td>Intree for gain/loss parsimony analysis:</td><td>" + intree_file
catch ( final IOException e3 ) {
e3.printStackTrace();
}
- final Map<String, DescriptiveStatistics> protein_length_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
- final Map<String, DescriptiveStatistics> domain_number_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
+ Map<String, DescriptiveStatistics> protein_length_stats_by_dc = null;
+ Map<String, DescriptiveStatistics> domain_number_stats_by_dc = null;
final Map<String, DescriptiveStatistics> domain_length_stats_by_domain = new HashMap<String, DescriptiveStatistics>();
+ if ( PERFORM_DC_REGAIN_PROTEINS_STATS ) {
+ protein_length_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
+ domain_number_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
+ }
// Main loop:
+ final SortedMap<String, Set<String>> distinct_domain_architecutures_per_genome = new TreeMap<String, Set<String>>();
+ final SortedMap<String, Integer> distinct_domain_architecuture_counts = new TreeMap<String, Integer>();
for( int i = 0; i < number_of_genomes; ++i ) {
System.out.println();
System.out.println( ( i + 1 ) + "/" + number_of_genomes );
log( ( i + 1 ) + "/" + number_of_genomes, log_writer );
- System.out.println( "Processing : " + input_file_properties[ i ][ 0 ] );
- log( "Genome : " + input_file_properties[ i ][ 0 ], log_writer );
+ System.out.println( "Processing : " + input_file_properties[ i ][ 1 ]
+ + " [" + input_file_properties[ i ][ 0 ] + "]" );
+ log( "Genome : " + input_file_properties[ i ][ 1 ] + " ["
+ + input_file_properties[ i ][ 0 ] + "]", log_writer );
HmmscanPerDomainTableParser parser = null;
INDIVIDUAL_SCORE_CUTOFF ind_score_cutoff = INDIVIDUAL_SCORE_CUTOFF.NONE;
if ( individual_score_cutoffs != null ) {
}
final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
protein_coverage_stats.addValue( coverage );
+ int distinct_das = -1;
+ if ( DA_ANALYSIS ) {
+ final String genome = input_file_properties[ i ][ 0 ];
+ distinct_das = SurfacingUtil.storeDomainArchitectures( genome,
+ distinct_domain_architecutures_per_genome,
+ protein_list,
+ distinct_domain_architecuture_counts );
+ }
System.out.println( "Number of proteins encountered : " + parser.getProteinsEncountered() );
log( "Number of proteins encountered : " + parser.getProteinsEncountered(), log_writer );
System.out.println( "Number of proteins stored : " + protein_list.size() );
log( "Proteins ignored due to positive filter : " + parser.getProteinsIgnoredDueToFilter(),
log_writer );
}
+ if ( DA_ANALYSIS ) {
+ System.out.println( "Distinct domain architectures stored : " + distinct_das );
+ log( "Distinct domain architectures stored : " + distinct_das, log_writer );
+ }
System.out.println( "Time for processing : " + parser.getTime() + "ms" );
log( "", log_writer );
html_desc.append( "<tr><td>" + input_file_properties[ i ][ 0 ] + " [species: "
+ parser.getProteinsIgnoredDueToFilter() );
}
html_desc.append( "</td></tr>" + nl );
- // domain_partner_counts_array[ i ] =
- // Methods.getDomainPartnerCounts( protein_domain_collections_array[
- // i ],
- // false, input_file_properties[ i ][ 1 ] );
try {
int count = 0;
for( final Protein protein : protein_list ) {
domains_which_are_sometimes_single_sometimes_not,
domains_which_never_single,
domains_per_potein_stats_writer );
- gwcd_list.add( BasicGenomeWideCombinableDomains
- .createInstance( protein_list,
- ignore_combination_with_same,
- new BasicSpecies( input_file_properties[ i ][ 1 ] ),
- domain_id_to_go_ids_map,
- dc_type,
- protein_length_stats_by_dc,
- domain_number_stats_by_dc ) );
domain_lengths_table.addLengths( protein_list );
- if ( gwcd_list.get( i ).getSize() > 0 ) {
- SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
- out_dir,
- per_genome_domain_promiscuity_statistics_writer,
- gwcd_list.get( i ),
- i,
- dc_sort_order );
- if ( output_binary_domain_combinationsfor_graph_analysis ) {
- SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
- out_dir,
- gwcd_list.get( i ),
- i,
- dc_sort_order );
+ if ( !DA_ANALYSIS ) {
+ gwcd_list.add( BasicGenomeWideCombinableDomains
+ .createInstance( protein_list,
+ ignore_combination_with_same,
+ new BasicSpecies( input_file_properties[ i ][ 1 ] ),
+ domain_id_to_go_ids_map,
+ dc_type,
+ protein_length_stats_by_dc,
+ domain_number_stats_by_dc ) );
+ if ( gwcd_list.get( i ).getSize() > 0 ) {
+ SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
+ out_dir,
+ per_genome_domain_promiscuity_statistics_writer,
+ gwcd_list.get( i ),
+ i,
+ dc_sort_order );
+ if ( output_binary_domain_combinationsfor_graph_analysis ) {
+ SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
+ out_dir,
+ gwcd_list.get( i ),
+ i,
+ dc_sort_order );
+ }
+ SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
+ SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
+ all_bin_domain_combinations_encountered );
}
- SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
- SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
- all_bin_domain_combinations_encountered );
}
if ( query_domains_writer_ary != null ) {
for( int j = 0; j < query_domain_ids_array.length; j++ ) {
ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
+ per_genome_domain_promiscuity_statistics_file );
//
+ if ( DA_ANALYSIS ) {
+ SurfacingUtil.performDomainArchitectureAnalysis( distinct_domain_architecutures_per_genome,
+ distinct_domain_architecuture_counts,
+ 10,
+ new File( out_dir.toString() + "/" + output_file
+ + "_DA_counts.txt" ),
+ new File( out_dir.toString() + "/" + output_file
+ + "_unique_DAs.txt" ) );
+ distinct_domain_architecutures_per_genome.clear();
+ distinct_domain_architecuture_counts.clear();
+ System.gc();
+ }
try {
domains_per_potein_stats_writer.write( "ALL" );
domains_per_potein_stats_writer.write( "\t" );
"Average of proteins with a least one domain assigned: "
+ ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
+ ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)" );
- ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: " + 100
- * protein_coverage_stats.getMin() + "%-" + 100 * protein_coverage_stats.getMax() + "%" );
+ ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: "
+ + ( 100 * protein_coverage_stats.getMin() ) + "%-" + ( 100 * protein_coverage_stats.getMax() )
+ + "%" );
log( "Average of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.arithmeticMean() )
+ "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
- log( "Range of prot with a least one dom assigned : " + 100 * protein_coverage_stats.getMin() + "%-"
- + 100 * protein_coverage_stats.getMax() + "%", log_writer );
+ log( "Range of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.getMin() ) + "%-"
+ + ( 100 * protein_coverage_stats.getMax() ) + "%", log_writer );
}
catch ( final IOException e2 ) {
ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
System.out.print( ", not mapped domains = " + mapping_results.getSumOfFailures() );
if ( total_domains > 0 ) {
System.out.println( ", mapped ratio = "
- + ( 100 * mapping_results.getSumOfSuccesses() / total_domains ) + "%" );
+ + ( ( 100 * mapping_results.getSumOfSuccesses() ) / total_domains ) + "%" );
}
else {
System.out.println( ", mapped ratio = n/a (total domains = 0 )" );
plus_minus_analysis_numbers );
}
if ( output_protein_lists_for_all_domains ) {
- writeProteinListsForAllSpecies( out_dir, protein_lists_per_species, gwcd_list );
- }
- // if ( ( intrees != null ) && ( intrees.length > 0 ) && ( inferred_trees != null ) && ( inferred_trees.size() > 0 ) ) {
- // final StringBuilder parameters_sb = createParametersAsString( ignore_dufs,
- // e_value_max,
- // max_allowed_overlap,
- // no_engulfing_overlaps,
- // cutoff_scores_file );
- // String s = "_";
- // if ( radomize_fitch_parsimony ) {
- // s += random_number_seed_for_fitch_parsimony + "_";
- // }
- // int i = 0;
- // for( final Phylogeny inferred_tree : inferred_trees ) {
- // if ( !inferred_tree.isRooted() ) {
- // intrees[ 0 ].getRoot().getName();
- // inferred_tree.r
- // }
- // final String outfile_name = ForesterUtil.removeSuffix( inferred_tree.getName() ) + s;
- // final DomainParsimonyCalculator domain_parsimony = DomainParsimonyCalculator
- // .createInstance( inferred_tree, gwcd_list );
- // SurfacingUtil.executeParsimonyAnalysis( random_number_seed_for_fitch_parsimony,
- // radomize_fitch_parsimony,
- // outfile_name,
- // domain_parsimony,
- // inferred_tree,
- // domain_id_to_go_ids_map,
- // go_id_to_term_map,
- // go_namespace_limit,
- // parameters_sb.toString() );
- // i++;
- // }
- // }
+ writeProteinListsForAllSpecies( out_dir,
+ protein_lists_per_species,
+ gwcd_list,
+ output_list_of_all_proteins_per_domain_e_value_max );
+ }
if ( all_bin_domain_combinations_gained_fitch != null ) {
try {
executeFitchGainsAnalysis( new File( output_file
for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
sum += entry.getValue();
}
- final double percentage = 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) / sum;
+ final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
}
}
System.out.println( "--" );
}
- for( int i = 0; i < input_file_properties.length; ++i ) {
+ for( final String[] input_file_propertie : input_file_properties ) {
try {
- intree.getNode( input_file_properties[ i ][ 1 ] );
+ intree.getNode( input_file_propertie[ 1 ] );
}
catch ( final IllegalArgumentException e ) {
- ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
+ ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
+ "] not present/not unique in input tree" );
}
}
}
- // public static StringBuffer stringCombinableDomainsMapToStringBuffer(
- // final SortedMap<String, CombinableDomains> map ) {
- // final StringBuffer sb = new StringBuffer();
- // for( final Iterator<String> iter = map.keySet().iterator();
- // iter.hasNext(); ) {
- // final Object key = iter.next();
- // sb.append( ForesterUtil.pad( new StringBuffer( key.toString() ), 18, ' ',
- // false ) );
- // final CombinableDomains domain_combination = map.get( key );
- // sb.append( ForesterUtil.pad( new StringBuffer( "" +
- // domain_combination.getNumberOfCombiningDomains() ), 8,
- // ' ', false ) );
- // sb.append( domain_combination.toStringBuffer() );
- // sb.append( ForesterUtil.getLineSeparator() );
- // }
- // return sb;
- // }
private static void printHelp() {
System.out.println();
System.out.println( "Usage:" );
+ "=<file>: to filter out proteins containing at least one domain listed in <file>" );
System.out.println( surfacing.FILTER_NEGATIVE_DOMAINS_OPTION
+ "=<file>: to filter out (ignore) domains listed in <file>" );
- System.out.println( surfacing.INPUT_FILES_FROM_FILE_OPTION + "=<file>: to read input files from <file>" );
+ System.out.println( surfacing.INPUT_GENOMES_FILE_OPTION + "=<file>: to read input files from <file>" );
System.out
.println( surfacing.RANDOM_SEED_FOR_FITCH_PARSIMONY_OPTION
+ "=<seed>: seed for random number generator for Fitch Parsimony analysis (type: long, default: no randomization - given a choice, prefer absence" );
System.out.println( surfacing.DOMAIN_COMBINITONS_OUTPUT_OPTION_FOR_GRAPH_ANALYSIS
+ ": to output binary domain combinations for (downstream) graph analysis" );
System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_OPTIONS + ": to output all proteins per domain" );
+ System.out.println( surfacing.OUTPUT_LIST_OF_ALL_PROTEINS_PER_DOMAIN_E_VALUE_OPTION
+ + ": e value max per domain for output of all proteins per domain" );
System.out.println();
+ System.out.println( "Example 1: java -Xms128m -Xmx512m -cp path/to/forester.jar"
+ + " org.forester.application.surfacing p2g=pfam2go_2012_02_07.txt -dufs -cos=Pfam_260_NC1"
+ + " -no_eo -mo=0 -genomes=eukaryotes.txt -out_dir=out -o=o "
+ + " -species_tree=tol.xml -obo=gene_ontology_2012_02_07.obo -pos_filter=f.txt -all_prot" );
System.out.println();
- System.out.println( "Example: java -Xms128m -Xmx512m -cp path/to/forester.jar"
+ System.out.println( "Example 2: java -Xms128m -Xmx512m -cp path/to/forester.jar"
+ " org.forester.application.surfacing -detail=punctilious -o=TEST.html -pwc=TEST"
+ " -cos=Pfam_ls_22_TC2 -p2g=pfam2go -obo=gene_ontology_edit.obo "
- + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo "
- + "-ds_output=detailed_html -scoring=domains -sort=alpha -" + JACKNIFE_OPTION
- + "=50 human mouse brafl strpu" );
+ + "-dc_sort=dom -ignore_with_self -no_singles -e=0.001 -mo=1 -no_eo -genomes=eukaryotes.txt "
+ + "-ds_output=detailed_html -scoring=domains -sort=alpha " );
System.out.println();
}
}
}
- private static String[][] processInputFileNames( final String[] names ) {
- final String[][] input_file_properties = new String[ names.length ][];
- for( int i = 0; i < names.length; ++i ) {
- if ( names[ i ].indexOf( SEPARATOR_FOR_INPUT_VALUES ) < 0 ) {
- input_file_properties[ i ] = new String[ 2 ];
- input_file_properties[ i ][ 0 ] = names[ i ];
- input_file_properties[ i ][ 1 ] = names[ i ];
- }
- else {
- input_file_properties[ i ] = names[ i ].split( surfacing.SEPARATOR_FOR_INPUT_VALUES + "" );
- if ( input_file_properties[ i ].length != 3 ) {
- ForesterUtil
- .fatalError( surfacing.PRG_NAME,
- "properties for the input files (hmmpfam output) are expected "
- + "to be in the following format \"<hmmpfam output file>#<species>\" (or just one word, which is both the filename and the species id), instead received \""
- + names[ i ] + "\"" );
- }
- }
+ private static String[][] processInputGenomesFile( final File input_genomes ) {
+ String[][] input_file_properties = null;
+ try {
+ input_file_properties = ForesterUtil.file22dArray( input_genomes );
+ }
+ catch ( final IOException e ) {
+ ForesterUtil.fatalError( surfacing.PRG_NAME,
+ "genomes files is to be in the following format \"<hmmpfam output file> <species>\": "
+ + e.getLocalizedMessage() );
+ }
+ for( int i = 0; i < input_file_properties.length; ++i ) {
final String error = ForesterUtil.isReadableFile( new File( input_file_properties[ i ][ 0 ] ) );
if ( !ForesterUtil.isEmpty( error ) ) {
ForesterUtil.fatalError( surfacing.PRG_NAME, error );
private static void writeProteinListsForAllSpecies( final File output_dir,
final SortedMap<Species, List<Protein>> protein_lists_per_species,
- final List<GenomeWideCombinableDomains> gwcd_list ) {
+ final List<GenomeWideCombinableDomains> gwcd_list,
+ final double domain_e_cutoff ) {
final SortedSet<DomainId> all_domains = new TreeSet<DomainId>();
for( final GenomeWideCombinableDomains gwcd : gwcd_list ) {
all_domains.addAll( gwcd.getAllDomainIds() );
domain,
proteins_file_writer,
"\t",
- LIMIT_SPEC_FOR_PROT_EX );
+ LIMIT_SPEC_FOR_PROT_EX,
+ domain_e_cutoff );
proteins_file_writer.close();
}
catch ( final IOException e ) {