// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
//
// Contact: phylosoft @ gmail . com
-// WWW: www.phylosoft.org/forester
+// WWW: https://sites.google.com/site/cmzmasek/home/software/forester
package org.forester.application;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.forester.phylogeny.PhylogenyNode;
import org.forester.phylogeny.factories.ParserBasedPhylogenyFactory;
import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
+import org.forester.protein.BasicProtein;
import org.forester.protein.BinaryDomainCombination;
import org.forester.protein.Domain;
import org.forester.protein.DomainId;
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_MAPPED.txt";
public static final String INDEPENDENT_DC_GAINS_FITCH_PARS_DC_FOR_GO_MAPPING_MAPPED_OUTPUT_UNIQUE_SUFFIX = "_indep_dc_gains_fitch_lists_for_go_mapping_unique_MAPPED.txt";
private static final boolean PERFORM_DC_REGAIN_PROTEINS_STATS = true;
+ private static final boolean DA_ANALYSIS = true;
private static void checkWriteabilityForPairwiseComparisons( final PrintableDomainSimilarity.PRINT_OPTION domain_similarity_print_option,
final String[][] input_file_properties,
+ error );
}
try {
- final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, " " );
+ final BasicTable<String> scores_table = BasicTableParser.parse( cutoff_scores_file, ' ' );
individual_score_cutoffs = scores_table.getColumnsAsMapDouble( 0, 1 );
}
catch ( final IOException e ) {
domain_number_stats_by_dc = new HashMap<String, DescriptiveStatistics>();
}
// Main loop:
+ final SortedMap<String, Set<String>> distinct_domain_architecutures_per_genome = new TreeMap<String, Set<String>>();
+ final SortedMap<String, Integer> distinct_domain_architecuture_counts = new TreeMap<String, Integer>();
for( int i = 0; i < number_of_genomes; ++i ) {
System.out.println();
System.out.println( ( i + 1 ) + "/" + number_of_genomes );
}
final double coverage = ( double ) protein_list.size() / parser.getProteinsEncountered();
protein_coverage_stats.addValue( coverage );
+ int distinct_das = -1;
+ if ( DA_ANALYSIS ) {
+ final String genome = input_file_properties[ i ][ 0 ];
+ distinct_das = storeDomainArchitectures( genome,
+ distinct_domain_architecutures_per_genome,
+ protein_list,
+ distinct_domain_architecuture_counts );
+ }
System.out.println( "Number of proteins encountered : " + parser.getProteinsEncountered() );
log( "Number of proteins encountered : " + parser.getProteinsEncountered(), log_writer );
System.out.println( "Number of proteins stored : " + protein_list.size() );
log( "Proteins ignored due to positive filter : " + parser.getProteinsIgnoredDueToFilter(),
log_writer );
}
+ if ( DA_ANALYSIS ) {
+ System.out.println( "Distinct domain architectures stored : " + distinct_das );
+ log( "Distinct domain architectures stored : " + distinct_das, log_writer );
+ }
System.out.println( "Time for processing : " + parser.getTime() + "ms" );
log( "", log_writer );
html_desc.append( "<tr><td>" + input_file_properties[ i ][ 0 ] + " [species: "
domains_which_are_sometimes_single_sometimes_not,
domains_which_never_single,
domains_per_potein_stats_writer );
- gwcd_list.add( BasicGenomeWideCombinableDomains
- .createInstance( protein_list,
- ignore_combination_with_same,
- new BasicSpecies( input_file_properties[ i ][ 1 ] ),
- domain_id_to_go_ids_map,
- dc_type,
- protein_length_stats_by_dc,
- domain_number_stats_by_dc ) );
domain_lengths_table.addLengths( protein_list );
- if ( gwcd_list.get( i ).getSize() > 0 ) {
- SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
- out_dir,
- per_genome_domain_promiscuity_statistics_writer,
- gwcd_list.get( i ),
- i,
- dc_sort_order );
- if ( output_binary_domain_combinationsfor_graph_analysis ) {
- SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
- out_dir,
- gwcd_list.get( i ),
- i,
- dc_sort_order );
+ if ( !DA_ANALYSIS ) {
+ gwcd_list.add( BasicGenomeWideCombinableDomains
+ .createInstance( protein_list,
+ ignore_combination_with_same,
+ new BasicSpecies( input_file_properties[ i ][ 1 ] ),
+ domain_id_to_go_ids_map,
+ dc_type,
+ protein_length_stats_by_dc,
+ domain_number_stats_by_dc ) );
+ if ( gwcd_list.get( i ).getSize() > 0 ) {
+ SurfacingUtil.writeDomainCombinationsCountsFile( input_file_properties,
+ out_dir,
+ per_genome_domain_promiscuity_statistics_writer,
+ gwcd_list.get( i ),
+ i,
+ dc_sort_order );
+ if ( output_binary_domain_combinationsfor_graph_analysis ) {
+ SurfacingUtil.writeBinaryDomainCombinationsFileForGraphAnalysis( input_file_properties,
+ out_dir,
+ gwcd_list.get( i ),
+ i,
+ dc_sort_order );
+ }
+ SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
+ SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
+ all_bin_domain_combinations_encountered );
}
- SurfacingUtil.addAllDomainIdsToSet( gwcd_list.get( i ), all_domains_encountered );
- SurfacingUtil.addAllBinaryDomainCombinationToSet( gwcd_list.get( i ),
- all_bin_domain_combinations_encountered );
}
if ( query_domains_writer_ary != null ) {
for( int j = 0; j < query_domain_ids_array.length; j++ ) {
ForesterUtil.programMessage( PRG_NAME, "Wrote domain promiscuities to: "
+ per_genome_domain_promiscuity_statistics_file );
//
+ if ( DA_ANALYSIS ) {
+ performDomainArchitectureAnalysis( distinct_domain_architecutures_per_genome,
+ distinct_domain_architecuture_counts,
+ 10 );
+ distinct_domain_architecutures_per_genome.clear();
+ distinct_domain_architecuture_counts.clear();
+ System.gc();
+ }
try {
domains_per_potein_stats_writer.write( "ALL" );
domains_per_potein_stats_writer.write( "\t" );
"Average of proteins with a least one domain assigned: "
+ ( 100 * protein_coverage_stats.arithmeticMean() ) + "% (+/-"
+ ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)" );
- ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: " + 100
- * protein_coverage_stats.getMin() + "%-" + 100 * protein_coverage_stats.getMax() + "%" );
+ ForesterUtil.programMessage( PRG_NAME, "Range of proteins with a least one domain assigned: "
+ + ( 100 * protein_coverage_stats.getMin() ) + "%-" + ( 100 * protein_coverage_stats.getMax() )
+ + "%" );
log( "Average of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.arithmeticMean() )
+ "% (+/-" + ( 100 * protein_coverage_stats.sampleStandardDeviation() ) + "%)", log_writer );
- log( "Range of prot with a least one dom assigned : " + 100 * protein_coverage_stats.getMin() + "%-"
- + 100 * protein_coverage_stats.getMax() + "%", log_writer );
+ log( "Range of prot with a least one dom assigned : " + ( 100 * protein_coverage_stats.getMin() ) + "%-"
+ + ( 100 * protein_coverage_stats.getMax() ) + "%", log_writer );
}
catch ( final IOException e2 ) {
ForesterUtil.fatalError( surfacing.PRG_NAME, e2.getLocalizedMessage() );
System.out.print( ", not mapped domains = " + mapping_results.getSumOfFailures() );
if ( total_domains > 0 ) {
System.out.println( ", mapped ratio = "
- + ( 100 * mapping_results.getSumOfSuccesses() / total_domains ) + "%" );
+ + ( ( 100 * mapping_results.getSumOfSuccesses() ) / total_domains ) + "%" );
}
else {
System.out.println( ", mapped ratio = n/a (total domains = 0 )" );
System.out.println();
}
+ private static void performDomainArchitectureAnalysis( final SortedMap<String, Set<String>> domain_architecutures,
+ final SortedMap<String, Integer> domain_architecuture_counts,
+ final int min_count ) {
+ final StringBuilder unique_das = new StringBuilder();
+ final Iterator<Entry<String, Integer>> it = domain_architecuture_counts.entrySet().iterator();
+ System.out.println( "Domain Architecture Counts (min count: " + min_count + " ):" );
+ while ( it.hasNext() ) {
+ final Map.Entry<String, Integer> e = it.next();
+ final String da = e.getKey();
+ final int count = e.getValue();
+ if ( count >= min_count ) {
+ System.out.println( da + "\t" + count );
+ }
+ if ( count == 1 ) {
+ final Iterator<Entry<String, Set<String>>> it2 = domain_architecutures.entrySet().iterator();
+ while ( it2.hasNext() ) {
+ final Map.Entry<String, Set<String>> e2 = it2.next();
+ final String genome = e2.getKey();
+ final Set<String> das = e2.getValue();
+ if ( das.contains( da ) ) {
+ unique_das.append( genome + "\t" + da + ForesterUtil.LINE_SEPARATOR );
+ }
+ }
+ }
+ }
+ System.out.println();
+ System.out.println();
+ System.out.println( "Unique Domain Architectures:" );
+ System.out.println( unique_das );
+ System.out.println();
+ System.out.println();
+ }
+
+ private static int storeDomainArchitectures( final String genome,
+ final SortedMap<String, Set<String>> domain_architecutures,
+ final List<Protein> protein_list,
+ final Map<String, Integer> distinct_domain_architecuture_counts ) {
+ final Set<String> da = new HashSet<String>();
+ domain_architecutures.put( genome, da );
+ for( final Protein protein : protein_list ) {
+ final String da_str = ( ( BasicProtein ) protein ).toDomainArchitectureString( "~" );
+ if ( !da.contains( da_str ) ) {
+ if ( !distinct_domain_architecuture_counts.containsKey( da_str ) ) {
+ distinct_domain_architecuture_counts.put( da_str, 1 );
+ }
+ else {
+ distinct_domain_architecuture_counts.put( da_str,
+ distinct_domain_architecuture_counts.get( da_str ) + 1 );
+ }
+ da.add( da_str );
+ }
+ }
+ return da.size();
+ }
+
private static void createSplitWriters( final File out_dir,
final String my_outfile,
final Map<Character, Writer> split_writers ) throws IOException {
for( final Entry<Integer, Integer> entry : all_genomes_domains_per_potein_histo.entrySet() ) {
sum += entry.getValue();
}
- final double percentage = 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) / sum;
+ final double percentage = ( 100.0 * ( sum - all_genomes_domains_per_potein_histo.get( 1 ) ) ) / sum;
ForesterUtil.programMessage( PRG_NAME, "Percentage of multidomain proteins: " + percentage + "%" );
log( "Percentage of multidomain proteins: : " + percentage + "%", log_writer );
}
}
System.out.println( "--" );
}
- for( int i = 0; i < input_file_properties.length; ++i ) {
+ for( final String[] input_file_propertie : input_file_properties ) {
try {
- intree.getNode( input_file_properties[ i ][ 1 ] );
+ intree.getNode( input_file_propertie[ 1 ] );
}
catch ( final IllegalArgumentException e ) {
- ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_properties[ i ][ 1 ]
+ ForesterUtil.fatalError( surfacing.PRG_NAME, "node named [" + input_file_propertie[ 1 ]
+ "] not present/not unique in input tree" );
}
}