2 // FORESTER -- software libraries and applications
3 // for evolutionary biology research and applications.
5 // Copyright (C) 2008-2009 Christian M. Zmasek
6 // Copyright (C) 2008-2009 Burnham Institute for Medical Research
9 // This library is free software; you can redistribute it and/or
10 // modify it under the terms of the GNU Lesser General Public
11 // License as published by the Free Software Foundation; either
12 // version 2.1 of the License, or (at your option) any later version.
14 // This library is distributed in the hope that it will be useful,
15 // but WITHOUT ANY WARRANTY; without even the implied warranty of
16 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 // Lesser General Public License for more details.
19 // You should have received a copy of the GNU Lesser General Public
20 // License along with this library; if not, write to the Free Software
21 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
23 // Contact: phylosoft @ gmail . com
24 // WWW: https://sites.google.com/site/cmzmasek/home/software/forester
26 package org.forester.go.etc;
28 import java.awt.Color;
29 import java.io.BufferedReader;
31 import java.io.FileReader;
32 import java.io.IOException;
33 import java.io.Writer;
34 import java.text.DecimalFormat;
35 import java.text.NumberFormat;
36 import java.util.ArrayList;
37 import java.util.HashSet;
38 import java.util.List;
41 import java.util.SortedMap;
42 import java.util.SortedSet;
43 import java.util.TreeMap;
44 import java.util.TreeSet;
45 import java.util.regex.Matcher;
46 import java.util.regex.Pattern;
48 import org.forester.go.GoId;
49 import org.forester.go.GoNameSpace;
50 import org.forester.go.GoTerm;
51 import org.forester.go.GoUtils;
52 import org.forester.go.OBOparser;
53 import org.forester.go.PfamToGoMapping;
54 import org.forester.species.BasicSpecies;
55 import org.forester.species.Species;
56 import org.forester.surfacing.SurfacingConstants;
57 import org.forester.surfacing.SurfacingUtil;
58 import org.forester.util.ForesterUtil;
60 public class MetaOntologizer {
62 private final static NumberFormat FORMATER = new DecimalFormat( "0.00E0" );
63 private final static Color MIN_COLOR = new Color( 0, 200, 50 );
64 private final static Color MAX_COLOR = new Color( 0, 0, 0 );
65 final static private String PRG_NAME = "meta_ontologizer";
66 private static final boolean VERBOSE = true;
67 //table-a_41_dollo_all_gains_d-Topology-Elim-Bonferroni.txt:
69 // private final static Pattern PATTERN_ONTOLOGIZER_TABLE_OUTPUT = Pattern.compile( ".*table-(.+)_dollo_.*",
70 // Pattern.CASE_INSENSITIVE ); //TODO this might need some work...
71 private final static Pattern PATTERN_ONTOLOGIZER_TABLE_OUTPUT = Pattern.compile( ".*table-(.+)\\.txt",
72 Pattern.CASE_INSENSITIVE ); //TODO this might need some work...
74 private static boolean hasResultsForSpecies( final Map<GoId, GoTerm> go_id_to_terms,
75 final SortedMap<String, SortedSet<OntologizerResult>> species_to_results_map,
77 final GoNameSpace.GoNamespaceType namespace ) {
78 for( final OntologizerResult ontologizer_result : species_to_results_map.get( species ) ) {
79 if ( go_id_to_terms.get( ontologizer_result.getGoId() ).getGoNameSpace().getType() == namespace ) {
86 private static StringBuilder obtainDomainsForGoId( final List<PfamToGoMapping> pfam_to_go,
87 final SortedSet<String> domains_per_species,
88 final Map<GoId, GoTerm> all_go_terms,
89 final GoId query_go_id,
90 final Set<String> found_domain_ids ) {
91 final StringBuilder sb = new StringBuilder();
92 D: for( final String domain_id : domains_per_species ) {
93 for( final PfamToGoMapping ptg : pfam_to_go ) {
94 if ( ptg.getKey().equals( domain_id ) ) {
95 final GoId go_id = ptg.getValue();
96 final Set<GoId> super_ids = new HashSet<GoId>();
97 for( final GoTerm term : GoUtils.getAllSuperGoTerms( go_id, all_go_terms ) ) {
98 super_ids.add( term.getGoId() );
100 super_ids.add( go_id );
101 if ( super_ids.contains( query_go_id ) ) {
102 sb.append( "[<a href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain_id + "\">"
103 + domain_id + "</a>] " );
104 found_domain_ids.add( domain_id );
113 private static String obtainSpecies( final File ontologizer_outfile ) {
114 final Matcher matcher = PATTERN_ONTOLOGIZER_TABLE_OUTPUT.matcher( ontologizer_outfile.getName() );
115 String species = null;
116 if ( matcher.matches() ) {
117 species = matcher.group( 1 );
120 .programMessage( PRG_NAME, "species for [" + ontologizer_outfile + "] is [" + species + "]" );
124 throw new RuntimeException( "pattern [" + PATTERN_ONTOLOGIZER_TABLE_OUTPUT + "] did not match ["
125 + ontologizer_outfile.getName() + "]" );
130 private static SortedMap<Species, SortedSet<String>> parseDomainGainLossFile( final File input ) throws IOException {
131 final String error = ForesterUtil.isReadableFile( input );
132 if ( !ForesterUtil.isEmpty( error ) ) {
133 throw new IOException( error );
135 final SortedMap<Species, SortedSet<String>> speciesto_to_domain_id = new TreeMap<Species, SortedSet<String>>();
136 final BufferedReader br = new BufferedReader( new FileReader( input ) );
139 Species current_species = null;
141 while ( ( line = br.readLine() ) != null ) {
144 if ( ( ForesterUtil.isEmpty( line ) ) || ( line.startsWith( "##" ) ) ) {
147 else if ( line.startsWith( "#" ) ) {
148 current_species = new BasicSpecies( line.substring( 1 ) );
149 speciesto_to_domain_id.put( current_species, new TreeSet<String>() );
151 ForesterUtil.programMessage( PRG_NAME, "saw " + current_species );
155 if ( current_species == null ) {
156 throw new IOException( "parsing problem [at line " + line_number + "] in [" + input + "]" );
158 speciesto_to_domain_id.get( current_species ).add( new String( line ) );
162 catch ( final Exception e ) {
163 throw new IOException( "parsing problem [at line " + line_number + "] in [" + input + "]: "
166 return speciesto_to_domain_id;
169 private static void processOneSpecies( final Map<GoId, GoTerm> go_id_to_terms,
170 final Writer b_html_writer,
171 final Writer b_tab_writer,
172 final Writer c_html_writer,
173 final Writer c_tab_writer,
174 final Writer m_html_writer,
175 final Writer m_tab_writer,
176 final SortedMap<String, SortedSet<OntologizerResult>> species_to_results_map,
177 final String species,
178 final double p_adjusted_upper_limit,
179 final SortedSet<String> domains_per_species,
180 final List<PfamToGoMapping> pfam_to_go,
181 final Set<String> domain_ids_with_go_annot ) throws IOException {
182 final SortedSet<OntologizerResult> ontologizer_results = species_to_results_map.get( species );
183 for( final OntologizerResult ontologizer_result : ontologizer_results ) {
184 final GoTerm go_term = go_id_to_terms.get( ontologizer_result.getGoId() );
185 Writer current_html_writer = b_html_writer;
186 Writer current_tab_writer = b_tab_writer;
187 switch ( go_term.getGoNameSpace().getType() ) {
188 case CELLULAR_COMPONENT:
189 current_html_writer = c_html_writer;
190 current_tab_writer = c_tab_writer;
192 case MOLECULAR_FUNCTION:
193 current_html_writer = m_html_writer;
194 current_tab_writer = m_tab_writer;
197 writeValuesToTabWriter( species, ontologizer_result, go_term, current_tab_writer );
198 writeValuesToHtmlWriter( ontologizer_result,
201 p_adjusted_upper_limit,
206 domain_ids_with_go_annot );
210 public static void reformat( final File ontologizer_outdir,
211 final String result_file_prefix,
212 final File domain_gain_loss_file,
213 final String outfile_base,
215 final double p_adjusted_upper_limit,
216 final String comment,
217 final List<PfamToGoMapping> pfam_to_go ) throws IOException {
218 if ( !ontologizer_outdir.exists() ) {
219 throw new IllegalArgumentException( "[" + ontologizer_outdir + "] does not exist" );
221 if ( !ontologizer_outdir.isDirectory() ) {
222 throw new IllegalArgumentException( "[" + ontologizer_outdir + "] is not a directory" );
224 if ( !obo_file.exists() ) {
225 throw new IllegalArgumentException( "[" + obo_file + "] does not exist" );
227 if ( ( p_adjusted_upper_limit < 0.0 ) || ( p_adjusted_upper_limit > 1.0 ) ) {
228 throw new IllegalArgumentException( "adjusted P values limit [" + p_adjusted_upper_limit
229 + "] is out of range" );
231 SortedMap<Species, SortedSet<String>> speciesto_to_domain_id = null;
232 if ( domain_gain_loss_file != null ) {
233 if ( !domain_gain_loss_file.exists() ) {
234 throw new IllegalArgumentException( "[" + domain_gain_loss_file + "] does not exist" );
236 speciesto_to_domain_id = parseDomainGainLossFile( domain_gain_loss_file );
238 ForesterUtil.programMessage( PRG_NAME, "parsed gain/loss domains for " + speciesto_to_domain_id.size()
239 + " species from [" + domain_gain_loss_file + "]" );
242 final String[] children = ontologizer_outdir.list();
243 final List<File> ontologizer_outfiles = new ArrayList<File>();
244 if ( children == null ) {
245 throw new IllegalArgumentException( "problem with [" + ontologizer_outdir + "]" );
248 for( final String filename : children ) {
249 if ( filename.startsWith( result_file_prefix ) ) {
250 ontologizer_outfiles.add( new File( filename ) );
255 ForesterUtil.programMessage( PRG_NAME, "need to analyze " + ontologizer_outfiles.size()
256 + " Ontologizer outfiles from [" + ontologizer_outdir + "]" );
258 final OBOparser parser = new OBOparser( obo_file, OBOparser.ReturnType.BASIC_GO_TERM );
259 final List<GoTerm> go_terms = parser.parse();
261 ForesterUtil.programMessage( PRG_NAME, "parsed " + go_terms.size() + " GO terms from [" + obo_file + "]" );
263 final Map<GoId, GoTerm> go_id_to_terms = GoUtils.createGoIdToGoTermMap( go_terms );
264 //FIXME not needed? when doe sthis error arise?
265 // if ( go_id_to_terms.size() != go_terms.size() ) {
266 // throw new IllegalArgumentException( "GO terms with non-unique ids found" );
268 final String b_file_html = outfile_base + "_B.html";
269 final String b_file_txt = outfile_base + "_B.txt";
270 final String m_file_html = outfile_base + "_C.html";
271 final String m_file_txt = outfile_base + "_C.txt";
272 final String c_file_html = outfile_base + "_M.html";
273 final String c_file_txt = outfile_base + "_M.txt";
274 final Writer b_html_writer = ForesterUtil.createBufferedWriter( b_file_html );
275 final Writer b_tab_writer = ForesterUtil.createBufferedWriter( b_file_txt );
276 final Writer c_html_writer = ForesterUtil.createBufferedWriter( m_file_html );
277 final Writer c_tab_writer = ForesterUtil.createBufferedWriter( m_file_txt );
278 final Writer m_html_writer = ForesterUtil.createBufferedWriter( c_file_html );
279 final Writer m_tab_writer = ForesterUtil.createBufferedWriter( c_file_txt );
280 final SortedMap<String, SortedSet<OntologizerResult>> species_to_results_map = new TreeMap<String, SortedSet<OntologizerResult>>();
281 for( final File ontologizer_outfile : ontologizer_outfiles ) {
282 final String species = obtainSpecies( ontologizer_outfile );
283 final List<OntologizerResult> ontologizer_results = OntologizerResult.parse( new File( ontologizer_outdir
284 + ForesterUtil.FILE_SEPARATOR + ontologizer_outfile ) );
285 final SortedSet<OntologizerResult> filtered_ontologizer_results = new TreeSet<OntologizerResult>();
286 for( final OntologizerResult ontologizer_result : ontologizer_results ) {
287 if ( ontologizer_result.getPAdjusted() <= p_adjusted_upper_limit ) {
288 filtered_ontologizer_results.add( ontologizer_result );
291 species_to_results_map.put( species, filtered_ontologizer_results );
293 writeLabelsToTabWriter( b_tab_writer );
294 writeLabelsToTabWriter( c_tab_writer );
295 writeLabelsToTabWriter( m_tab_writer );
296 String domain_gain_loss_file_full_path_str = null;
297 if ( domain_gain_loss_file != null ) {
298 domain_gain_loss_file_full_path_str = domain_gain_loss_file.getAbsolutePath();
300 writeHtmlHeader( b_html_writer,
301 GoNameSpace.GoNamespaceType.BIOLOGICAL_PROCESS.toString() + " | Pmax = "
302 + p_adjusted_upper_limit + " | " + comment,
303 ontologizer_outdir.getAbsolutePath(),
304 domain_gain_loss_file_full_path_str );
305 writeHtmlHeader( c_html_writer,
306 GoNameSpace.GoNamespaceType.CELLULAR_COMPONENT.toString() + " | Pmax = "
307 + p_adjusted_upper_limit + " | " + comment,
308 ontologizer_outdir.getAbsolutePath(),
309 domain_gain_loss_file_full_path_str );
310 writeHtmlHeader( m_html_writer,
311 GoNameSpace.GoNamespaceType.MOLECULAR_FUNCTION.toString() + " | Pmax = "
312 + p_adjusted_upper_limit + " | " + comment,
313 ontologizer_outdir.getAbsolutePath(),
314 domain_gain_loss_file_full_path_str );
315 for( final String species : species_to_results_map.keySet() ) {
316 if ( hasResultsForSpecies( go_id_to_terms,
317 species_to_results_map,
319 GoNameSpace.GoNamespaceType.BIOLOGICAL_PROCESS ) ) {
320 writeHtmlSpecies( b_html_writer, species );
322 if ( hasResultsForSpecies( go_id_to_terms,
323 species_to_results_map,
325 GoNameSpace.GoNamespaceType.CELLULAR_COMPONENT ) ) {
326 writeHtmlSpecies( c_html_writer, species );
328 if ( hasResultsForSpecies( go_id_to_terms,
329 species_to_results_map,
331 GoNameSpace.GoNamespaceType.MOLECULAR_FUNCTION ) ) {
332 writeHtmlSpecies( m_html_writer, species );
334 SortedSet<String> domains_per_species = null;
335 if ( ( speciesto_to_domain_id != null ) && ( speciesto_to_domain_id.size() > 0 ) ) {
336 domains_per_species = speciesto_to_domain_id.get( new BasicSpecies( species ) );
338 final Set<String> domain_ids_with_go_annot = new HashSet<String>();
339 processOneSpecies( go_id_to_terms,
346 species_to_results_map,
348 p_adjusted_upper_limit,
351 domain_ids_with_go_annot );
352 if ( ( speciesto_to_domain_id != null ) && ( speciesto_to_domain_id.size() > 0 ) ) {
353 if ( hasResultsForSpecies( go_id_to_terms,
354 species_to_results_map,
356 GoNameSpace.GoNamespaceType.BIOLOGICAL_PROCESS ) ) {
357 writeHtmlDomains( b_html_writer, domains_per_species, domain_ids_with_go_annot );
359 if ( hasResultsForSpecies( go_id_to_terms,
360 species_to_results_map,
362 GoNameSpace.GoNamespaceType.CELLULAR_COMPONENT ) ) {
363 writeHtmlDomains( c_html_writer, domains_per_species, domain_ids_with_go_annot );
365 if ( hasResultsForSpecies( go_id_to_terms,
366 species_to_results_map,
368 GoNameSpace.GoNamespaceType.MOLECULAR_FUNCTION ) ) {
369 writeHtmlDomains( m_html_writer, domains_per_species, domain_ids_with_go_annot );
373 writeHtmlEnd( b_html_writer );
374 writeHtmlEnd( c_html_writer );
375 writeHtmlEnd( m_html_writer );
376 b_html_writer.close();
377 b_tab_writer.close();
378 c_html_writer.close();
379 c_tab_writer.close();
380 m_html_writer.close();
381 m_tab_writer.close();
383 ForesterUtil.programMessage( PRG_NAME, "successfully wrote biological process summary to [" + b_file_html
385 ForesterUtil.programMessage( PRG_NAME, "successfully wrote biological process summary to [" + b_file_txt
387 ForesterUtil.programMessage( PRG_NAME, "successfully wrote molecular function summary to [" + m_file_html
389 ForesterUtil.programMessage( PRG_NAME, "successfully wrote molecular function summary to [" + m_file_txt
391 ForesterUtil.programMessage( PRG_NAME, "successfully wrote cellular component summary to [" + c_file_html
393 ForesterUtil.programMessage( PRG_NAME, "successfully wrote cellular component summary to [" + c_file_txt
398 private static void writeHtmlDomains( final Writer writer,
399 final SortedSet<String> domains,
400 final Set<String> domain_ids_with_go_annot ) throws IOException {
401 writer.write( "<tr>" );
402 writer.write( "<td colspan=\"10\">" );
403 if ( domains != null ) {
404 for( final String domain : domains ) {
405 if ( !domain_ids_with_go_annot.contains( domain ) ) {
406 writer.write( "[<a class=\"new_type\" href=\"" + SurfacingConstants.PFAM_FAMILY_ID_LINK + domain
407 + "\">" + domain + "</a>] " );
411 writer.write( "</td>" );
412 writer.write( "</tr>" );
413 writer.write( ForesterUtil.LINE_SEPARATOR );
416 private static void writeHtmlEnd( final Writer writer ) throws IOException {
417 writer.write( "</table>" );
418 writer.write( "</body>" );
419 writer.write( "</html>" );
422 private static void writeHtmlHeader( final Writer w,
424 final String ontologizer_outdir,
425 final String domain_gain_loss_file ) throws IOException {
427 w.write( "<title>" );
429 w.write( "</title>" );
430 w.write( ForesterUtil.LINE_SEPARATOR );
431 w.write( "<style>" );
432 w.write( ForesterUtil.LINE_SEPARATOR );
433 w.write( "a:visited { color : #F87217; text-decoration : none; }" );
434 w.write( ForesterUtil.LINE_SEPARATOR );
435 w.write( "a:link { color : #F87217; text-decoration : none; }" );
436 w.write( ForesterUtil.LINE_SEPARATOR );
437 w.write( "a:hover { color : #FFFFFF; background-color : #00FF00; text-decoration : none; }" );
438 w.write( ForesterUtil.LINE_SEPARATOR );
439 w.write( "a:hover { color : #FFFFFF; background-color : #00FF00; text-decoration : none; }" );
440 w.write( ForesterUtil.LINE_SEPARATOR );
441 w.write( "a.new_type:visited { font-size: 7pt; color : #808080; text-decoration : none; }" );
442 w.write( ForesterUtil.LINE_SEPARATOR );
443 w.write( "a.new_type:link { font-size: 7pt; color : #505050; text-decoration : none; }" );
444 w.write( ForesterUtil.LINE_SEPARATOR );
445 w.write( "a.new_type:hover { font-size: 7pt; color : #000000; background-color : #FFFF00; text-decoration : none; }" );
446 w.write( ForesterUtil.LINE_SEPARATOR );
447 w.write( "a.new_type:hover { font-size: 7pt; color : #000000; background-color : #FFFF00; text-decoration : none; }" );
448 w.write( ForesterUtil.LINE_SEPARATOR );
449 w.write( "td { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 8pt}" );
450 w.write( ForesterUtil.LINE_SEPARATOR );
451 w.write( "th { text-align: left; vertical-align: top; font-family: Verdana, Arial, Helvetica; font-size: 10pt; font-weight: bold }" );
452 w.write( ForesterUtil.LINE_SEPARATOR );
453 w.write( "h1 { color : #000000; font-family: Verdana, Arial, Helvetica; font-size: 18pt; font-weight: bold }" );
454 w.write( ForesterUtil.LINE_SEPARATOR );
455 w.write( "h2 { color : #000000; font-family: Verdana, Arial, Helvetica; font-size: 16pt; font-weight: bold }" );
456 w.write( "h3 { margin-top: 12px; margin-bottom: 0px; color : #000000; font-family: Verdana, Arial, Helvetica; font-size: 12pt; font-weight: bold }" );
457 w.write( ForesterUtil.LINE_SEPARATOR );
458 w.write( "</style>" );
459 w.write( ForesterUtil.LINE_SEPARATOR );
460 w.write( "</head>" );
461 w.write( ForesterUtil.LINE_SEPARATOR );
463 w.write( ForesterUtil.LINE_SEPARATOR );
465 w.write( "meta ontologizer" );
467 w.write( ForesterUtil.LINE_SEPARATOR );
471 w.write( ForesterUtil.LINE_SEPARATOR );
472 w.write( "<table>" );
473 w.write( ForesterUtil.LINE_SEPARATOR );
474 w.write( "<tr><th>" );
475 w.write( "ontolgizer output directory analysed:" );
476 w.write( "</th><td>" );
477 w.write( ontologizer_outdir );
478 w.write( "</td></tr>" );
479 if ( !ForesterUtil.isEmpty( domain_gain_loss_file ) ) {
480 w.write( ForesterUtil.LINE_SEPARATOR );
481 w.write( "<tr><th>" );
482 w.write( "domain gain or loss file:" );
483 w.write( "</th><td>" );
484 w.write( domain_gain_loss_file );
485 w.write( "</td></tr>" );
487 w.write( "</table>" );
488 w.write( ForesterUtil.LINE_SEPARATOR );
489 w.write( "<table>" );
490 w.write( ForesterUtil.LINE_SEPARATOR );
493 w.write( "GO term name" );
494 w.write( "</th><th>" );
496 w.write( "</th><th>" );
497 w.write( "P adjusted" );
498 w.write( "</th><th>" );
500 w.write( "</th><th>" );
501 w.write( "Pop total" );
502 w.write( "</th><th>" );
503 w.write( "Pop term" );
504 w.write( "</th><th>" );
505 w.write( "Study total" );
506 w.write( "</th><th>" );
507 w.write( "Study term" );
508 w.write( "</th><th>" );
509 w.write( "Domains" );
510 w.write( "</th><th>" );
511 w.write( "trivial?" );
514 w.write( ForesterUtil.LINE_SEPARATOR );
517 private static void writeHtmlSpecies( final Writer writer, final String species ) throws IOException {
518 writer.write( "<tr>" );
519 writer.write( "<td><h3>" );
520 writer.write( species );
521 SurfacingUtil.writeTaxonomyLinks( writer, species, null );
522 writer.write( "</h3></td>" );
523 writer.write( "</tr>" );
524 writer.write( ForesterUtil.LINE_SEPARATOR );
527 private static void writeLabelsToTabWriter( final Writer writer ) throws IOException {
528 writer.write( "#species" );
529 writer.write( "\t" );
530 writer.write( "GO name" );
531 writer.write( "\t" );
532 writer.write( "GO id" );
533 writer.write( "\t" );
534 writer.write( "P adjusted" );
535 writer.write( "\t" );
537 writer.write( "\t" );
538 writer.write( "Pop total" );
539 writer.write( "\t" );
540 writer.write( "Pop term" );
541 writer.write( "\t" );
542 writer.write( "Study total" );
543 writer.write( "\t" );
544 writer.write( "Study term" );
545 writer.write( "\t" );
546 writer.write( "is trivial" );
547 writer.write( ForesterUtil.LINE_SEPARATOR );
550 private static void writeValuesToHtmlWriter( final OntologizerResult ontologizer_result,
551 final GoTerm go_term,
553 final double p_adjusted_upper_limit,
554 final String species,
555 final Map<GoId, GoTerm> go_id_to_terms,
556 final SortedSet<String> domains_per_species,
557 final List<PfamToGoMapping> pfam_to_go,
558 final Set<String> domain_ids_with_go_annot ) throws IOException {
559 final Color p_adj_color = ForesterUtil.calcColor( ontologizer_result.getPAdjusted(),
561 p_adjusted_upper_limit,
564 final Color p_color = ForesterUtil.calcColor( ontologizer_result.getP(),
566 p_adjusted_upper_limit,
569 writer.write( "<tr>" );
570 writer.write( "<td>" );
571 writer.write( "<font color=\"#" + ForesterUtil.colorToHex( p_adj_color ) + "\">" );
572 writer.write( go_term.getName() );
573 writer.write( "</font>" );
574 writer.write( "</td><td>" );
575 writer.write( "<a href=\"" + SurfacingConstants.GO_LINK + ontologizer_result.getGoId().getId()
576 + "\" target=\"amigo_window\">" + ontologizer_result.getGoId().getId() + "</a>" );
577 writer.write( "</td><td>" );
578 writer.write( "<font color=\"#" + ForesterUtil.colorToHex( p_adj_color ) + "\">" );
579 writer.write( FORMATER.format( ontologizer_result.getPAdjusted() ) );
580 writer.write( "</font>" );
581 writer.write( "</td><td>" );
582 writer.write( "<font color=\"#" + ForesterUtil.colorToHex( p_color ) + "\">" );
583 writer.write( FORMATER.format( ontologizer_result.getP() ) );
584 writer.write( "</font>" );
585 writer.write( "</td><td>" );
586 writer.write( String.valueOf( ontologizer_result.getPopTotal() ) );
587 writer.write( "</td><td>" );
588 writer.write( String.valueOf( ontologizer_result.getPopTerm() ) );
589 writer.write( "</td><td>" );
590 writer.write( String.valueOf( ontologizer_result.getStudyTotal() ) );
591 writer.write( "</td><td>" );
592 writer.write( String.valueOf( ontologizer_result.getStudyTerm() ) );
593 writer.write( "</td><td>" );
594 if ( domains_per_species != null ) {
595 final StringBuilder sb = obtainDomainsForGoId( pfam_to_go,
599 domain_ids_with_go_annot );
600 writer.write( sb.toString() );
605 writer.write( "</td><td>" );
606 if ( ontologizer_result.isTrivial() ) {
607 writer.write( "trivial" );
612 writer.write( "</td>" );
613 writer.write( "</tr>" );
614 writer.write( ForesterUtil.LINE_SEPARATOR );
617 private static void writeValuesToTabWriter( final String species,
618 final OntologizerResult ontologizer_result,
619 final GoTerm got_term,
620 final Writer writer ) throws IOException {
621 writer.write( species );
622 writer.write( "\t" );
623 writer.write( got_term.getName() );
624 writer.write( "\t" );
625 writer.write( ontologizer_result.getGoId().getId() );
626 writer.write( "\t" );
627 writer.write( String.valueOf( ontologizer_result.getPAdjusted() ) );
628 writer.write( "\t" );
629 writer.write( String.valueOf( ontologizer_result.getP() ) );
630 writer.write( "\t" );
631 writer.write( String.valueOf( ontologizer_result.getPopTotal() ) );
632 writer.write( "\t" );
633 writer.write( String.valueOf( ontologizer_result.getPopTerm() ) );
634 writer.write( "\t" );
635 writer.write( String.valueOf( ontologizer_result.getStudyTotal() ) );
636 writer.write( "\t" );
637 writer.write( String.valueOf( ontologizer_result.getStudyTerm() ) );
638 writer.write( "\t" );
639 writer.write( String.valueOf( ontologizer_result.isTrivial() ) );
640 writer.write( ForesterUtil.LINE_SEPARATOR );