package org.forester.io.parsers.nhx;
+import java.awt.Color;
import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
+import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.forester.io.parsers.IteratingPhylogenyParser;
import org.forester.io.parsers.PhylogenyParser;
import org.forester.io.parsers.phyloxml.PhyloXmlDataFormatException;
import org.forester.io.parsers.util.ParserUtils;
import org.forester.phylogeny.PhylogenyMethods;
import org.forester.phylogeny.PhylogenyNode;
import org.forester.phylogeny.data.Accession;
+import org.forester.phylogeny.data.BranchColor;
import org.forester.phylogeny.data.Confidence;
-import org.forester.phylogeny.data.DomainArchitecture;
import org.forester.phylogeny.data.Event;
import org.forester.phylogeny.data.Identifier;
import org.forester.phylogeny.data.PhylogenyDataUtil;
import org.forester.phylogeny.data.Sequence;
import org.forester.phylogeny.data.Taxonomy;
import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
+import org.forester.util.ForesterConstants;
import org.forester.util.ForesterUtil;
-public final class NHXParser implements PhylogenyParser {
-
- public static final TAXONOMY_EXTRACTION TAXONOMY_EXTRACTION_DEFAULT = TAXONOMY_EXTRACTION.NO;
- public final static Pattern UC_LETTERS_NUMBERS_PATTERN = Pattern.compile( "^[A-Z0-9]+$" );
- public final static Pattern NUMBERS_ONLY_PATTERN = Pattern.compile( "^[0-9\\.]+$" );
- public final static Pattern MB_PROB_PATTERN = Pattern.compile( "prob=([^,]+)" );
- public final static Pattern MB_PROB_SD_PATTERN = Pattern.compile( "prob_stddev=([^,]+)" );
- public final static Pattern MB_BL_PATTERN = Pattern.compile( "length_median=([^,]+)" );
- final static private boolean GUESS_ROOTEDNESS_DEFAULT = true;
- final static private boolean GUESS_IF_SUPPORT_VALUES = true;
- final static private boolean IGNORE_QUOTES_DEFAULT = false;
- final static public boolean REPLACE_UNDERSCORES_DEFAULT = false;
- final static private byte STRING = 0;
- final static private byte STRING_BUFFER = 1;
- final static private byte CHAR_ARRAY = 2;
- final static private byte BUFFERED_READER = 3;
- final static private byte STRING_BUILDER = 4;
- private boolean _guess_rootedness;
- private boolean _ignore_quotes;
- private byte _input_type;
- private int _source_length;
- private PhylogenyNode _current_node;
- private StringBuilder _current_anotation;
- private Object _nhx_source;
- private int _clade_level;
- private Phylogeny _current_phylogeny;
- private TAXONOMY_EXTRACTION _taxonomy_extraction;
- private boolean _replace_underscores;
- private boolean _in_comment = false;
- private boolean _saw_colon = false;
- private boolean _saw_closing_paren;
- private boolean _saw_open_bracket = false;
- private boolean _in_open_bracket = false;
- private boolean _in_double_quote = false;
- private boolean _in_single_quote = false;
- private String _my_source_str = null;
- private StringBuffer _my_source_sbuff = null;
- private StringBuilder _my_source_sbuil = null;
- private char[] _my_source_charary = null;
- private BufferedReader _my_source_br = null;
- private int _i;
- private Phylogeny _next;
- private Object _source;
+public final class NHXParser implements PhylogenyParser, IteratingPhylogenyParser {
+
+ private final static Pattern MB_BL_PATTERN = Pattern.compile( "length.median=([-+eE0-9\\.]+)" );
+ private final static Pattern MB_PROB_PATTERN = Pattern.compile( "prob=([-+eE0-9\\.]+)" );
+ private final static Pattern MB_PROB_SD_PATTERN = Pattern.compile( "prob.stddev=([-+eE0-9\\.]+)" );
+ private final static Pattern NUMBERS_ONLY_PATTERN = Pattern.compile( "^[0-9\\.]+$" );
+
+ private final static Pattern BEAST_STYLE_EXTENDED_BOOTSTRAP_PATTERN = Pattern.compile( "boot?strap=([\\d\\.]+)" );
+ private final static Pattern BEAST_STYLE_EXTENDED_COLOR_PATTERN = Pattern.compile( "colou?r=(#[\\da-fA-F]{6})" );
+ private final static Pattern ENDS_WITH_NUMBER_PATTERN = Pattern.compile( "(:[-+eE0-9\\.]+$)" );
+
+
+ public final static boolean REPLACE_UNDERSCORES_DEFAULT = false;
+ private final static boolean ALLOW_ERRORS_IN_DISTANCE_TO_PARENT_DEFAULT = false;
+ private final static byte BUFFERED_READER = 3;
+ private final static byte CHAR_ARRAY = 2;
+ private final static boolean GUESS_IF_SUPPORT_VALUES = true;
+ private final static boolean GUESS_ROOTEDNESS_DEFAULT = true;
+ private final static boolean IGNORE_QUOTES_DEFAULT = false;
+
+ private final static char BELL = 7;
+ private final static String ENCODING_DEFAULT = ForesterConstants.UTF_8;
+ private boolean _allow_errors_in_distance_to_parent;
+ private int _clade_level;
+ private StringBuilder _current_anotation;
+ private PhylogenyNode _current_node;
+ private Phylogeny _current_phylogeny;
+ private boolean _guess_rootedness;
+ private int _i;
+ private boolean _ignore_quotes;
+ private boolean _in_comment = false;
+ private boolean _in_double_quote = false;
+ private boolean _in_open_bracket = false;
+ private boolean _in_single_quote = false;
+ private byte _input_type;
+ private BufferedReader _my_source_br = null;
+ private char[] _my_source_charary = null;
+ private Phylogeny _next;
+ private Object _nhx_source;
+ private boolean _replace_underscores;
+ private boolean _saw_closing_paren;
+ private boolean _saw_colon = false;
+ private boolean _saw_open_bracket = false;
+ private Object _source;
+ private int _source_length;
+ private TAXONOMY_EXTRACTION _taxonomy_extraction;
+ private boolean _parse_beast_style_extended_tags = false;
+ private final String _encoding;
public NHXParser() {
+ _encoding = ENCODING_DEFAULT;
init();
}
+
+ public NHXParser( final String encoding ) {
+ _encoding = encoding;
+ init();
+ }
+
+ @Override
+ public String getName() {
+ return "NH/NHX Parser";
+ }
public final TAXONOMY_EXTRACTION getTaxonomyExtraction() {
return _taxonomy_extraction;
}
+ @Override
public final boolean hasNext() {
return _next != null;
}
+ @Override
public final Phylogeny next() throws NHXFormatException, IOException {
final Phylogeny phy = _next;
- getNext();
+ parseNext();
return phy;
}
@Override
public final Phylogeny[] parse() throws IOException {
- reset();
final List<Phylogeny> l = new ArrayList<Phylogeny>();
while ( hasNext() ) {
l.add( next() );
for( int i = 0; i < l.size(); ++i ) {
p[ i ] = l.get( i );
}
+ reset();
return p;
}
+ @Override
public final void reset() throws NHXFormatException, IOException {
_i = 0;
_next = null;
_current_anotation = new StringBuilder();
_current_phylogeny = null;
_current_node = null;
- _my_source_str = null;
- _my_source_sbuff = null;
- _my_source_sbuil = null;
_my_source_charary = null;
- _my_source_br = null;
- determineSourceType( _source );
+ determineAndProcessSourceType( _source );
switch ( _input_type ) {
- case STRING:
- _my_source_str = ( String ) _nhx_source;
- break;
- case STRING_BUFFER:
- _my_source_sbuff = ( StringBuffer ) _nhx_source;
- break;
- case STRING_BUILDER:
- _my_source_sbuil = ( StringBuilder ) _nhx_source;
- break;
case CHAR_ARRAY:
+ _my_source_br = null;
_my_source_charary = ( char[] ) _nhx_source;
break;
case BUFFERED_READER:
- if ( _my_source_br != null ) {
- try {
- _my_source_br.close();
- }
- catch ( final IOException e ) {
- //do nothing
- }
- }
_my_source_br = ( BufferedReader ) _nhx_source;
break;
default:
throw new RuntimeException( "unknown input type" );
}
- getNext();
+ parseNext();
}
public final void setGuessRootedness( final boolean guess_rootedness ) {
_taxonomy_extraction = taxonomy_extraction;
}
- private final void determineSourceType( final Object nhx_source ) throws PhylogenyParserException,
- FileNotFoundException {
+ public final void setAllowErrorsInDistanceToParent( final boolean allow_errors_in_distance_to_parent ) {
+ _allow_errors_in_distance_to_parent = allow_errors_in_distance_to_parent;
+ }
+
+ private final void determineAndProcessSourceType( final Object nhx_source ) throws IOException {
if ( nhx_source == null ) {
throw new PhylogenyParserException( getClass() + ": attempt to parse null object." );
}
else if ( nhx_source instanceof String ) {
- _input_type = NHXParser.STRING;
- _source_length = ( ( String ) nhx_source ).length();
- _nhx_source = nhx_source;
- }
- else if ( nhx_source instanceof StringBuilder ) {
- _input_type = NHXParser.STRING_BUILDER;
- _source_length = ( ( StringBuilder ) nhx_source ).length();
- _nhx_source = nhx_source;
- }
- else if ( nhx_source instanceof StringBuffer ) {
- _input_type = NHXParser.STRING_BUFFER;
- _source_length = ( ( StringBuffer ) nhx_source ).length();
- _nhx_source = nhx_source;
- }
- else if ( nhx_source instanceof StringBuilder ) {
- _input_type = NHXParser.STRING_BUILDER;
- _source_length = ( ( StringBuilder ) nhx_source ).length();
_nhx_source = nhx_source;
+ _input_type = NHXParser.BUFFERED_READER;
+ _source_length = 0;
+ InputStream is = new ByteArrayInputStream( (( String ) nhx_source ).getBytes(getEncoding()));
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
+ _nhx_source = new BufferedReader( isr );
}
else if ( nhx_source instanceof char[] ) {
_input_type = NHXParser.CHAR_ARRAY;
else if ( nhx_source instanceof File ) {
_input_type = NHXParser.BUFFERED_READER;
_source_length = 0;
+ if ( _my_source_br != null ) {
+ //I am REALLY not sure if it is a "good" idea NOT to close the stream...
+ // try {
+ // _my_source_br.close();
+ // }
+ // catch ( final IOException e ) {
+ // }
+ }
final File f = ( File ) nhx_source;
final String error = ForesterUtil.isReadableFile( f );
if ( !ForesterUtil.isEmpty( error ) ) {
throw new PhylogenyParserException( error );
}
- _nhx_source = new BufferedReader( new FileReader( f ) );
+ final InputStream is = new FileInputStream( f );
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
+ _nhx_source = new BufferedReader( isr );
}
- else if ( nhx_source instanceof InputStream ) {
+ else if ( nhx_source instanceof URL ) {
_input_type = NHXParser.BUFFERED_READER;
_source_length = 0;
- final InputStreamReader isr = new InputStreamReader( ( InputStream ) nhx_source );
+ if ( _my_source_br != null ) {
+ //I am REALLY not sure if it is a "good" idea NOT to close the stream...
+ // try {
+ // _my_source_br.close();
+ // }
+ // catch ( final IOException e ) {
+ // }
+ }
+ final InputStream is = ( ( URL ) nhx_source ).openStream();
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
_nhx_source = new BufferedReader( isr );
}
+ else if ( nhx_source instanceof InputStream ) {
+ _input_type = NHXParser.BUFFERED_READER;
+ _source_length = 0;
+ if ( _my_source_br != null ) {
+ //I am REALLY not sure if it is a "good" idea NOT to close the stream...
+ // try {
+ // _my_source_br.close();
+ // }
+ // catch ( final IOException e ) {
+ // }
+ }
+ final InputStream is = ( InputStream ) nhx_source;
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
+ _nhx_source = new BufferedReader( isr );
+ }
else {
throw new IllegalArgumentException( getClass() + " can only parse objects of type String,"
- + " StringBuffer, StringBuilder, char[], File," + " or InputStream "
+ + " char[], File, InputStream, or URL "
+ " [attempt to parse object of " + nhx_source.getClass() + "]." );
}
}
private final Phylogeny finishPhylogeny() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
if ( _current_phylogeny != null ) {
parseNHX( _current_anotation != null ? _current_anotation.toString() : "",
- _current_phylogeny.getRoot(),
- getTaxonomyExtraction(),
- isReplaceUnderscores() );
+ _current_phylogeny.getRoot(),
+ getTaxonomyExtraction(),
+ isReplaceUnderscores(),
+ isAllowErrorsInDistanceToParent(),
+ true,
+ isParseBeastStyleExtendedTags());
if ( GUESS_IF_SUPPORT_VALUES ) {
if ( isBranchLengthsLikeBootstrapValues( _current_phylogeny ) ) {
moveBranchLengthsToConfidenceValues( _current_phylogeny );
}
private final Phylogeny finishSingleNodePhylogeny() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
final PhylogenyNode new_node = new PhylogenyNode();
- parseNHX( _current_anotation.toString(), new_node, getTaxonomyExtraction(), isReplaceUnderscores() );
+ parseNHX( _current_anotation.toString(),
+ new_node,
+ getTaxonomyExtraction(),
+ isReplaceUnderscores(),
+ isAllowErrorsInDistanceToParent(),
+ true,
+ isParseBeastStyleExtendedTags());
_current_phylogeny = new Phylogeny();
_current_phylogeny.setRoot( new_node );
return _current_phylogeny;
}
- private final void getNext() throws IOException, NHXFormatException {
+ private final void init() {
+ setTaxonomyExtraction( TAXONOMY_EXTRACTION.NO );
+ setReplaceUnderscores( REPLACE_UNDERSCORES_DEFAULT );
+ setGuessRootedness( GUESS_ROOTEDNESS_DEFAULT );
+ setIgnoreQuotes( IGNORE_QUOTES_DEFAULT );
+ setAllowErrorsInDistanceToParent( ALLOW_ERRORS_IN_DISTANCE_TO_PARENT_DEFAULT );
+ setParseBeastStyleExtendedTags( false );
+ }
+
+ private final boolean isAllowErrorsInDistanceToParent() {
+ return _allow_errors_in_distance_to_parent;
+ }
+
+ private final boolean isGuessRootedness() {
+ return _guess_rootedness;
+ }
+
+ private final boolean isIgnoreQuotes() {
+ return _ignore_quotes;
+ }
+
+ private final boolean isReplaceUnderscores() {
+ return _replace_underscores;
+ }
+
+ private final void parseNext() throws IOException, NHXFormatException {
+ if ( _source == null ) {
+ throw new IOException( "source is not set" );
+ }
while ( true ) {
char c = '\b';
if ( _input_type == BUFFERED_READER ) {
if ( _i >= _source_length ) {
break;
}
- else {
- switch ( _input_type ) {
- case STRING:
- c = _my_source_str.charAt( _i );
- break;
- case STRING_BUFFER:
- c = _my_source_sbuff.charAt( _i );
- break;
- case STRING_BUILDER:
- c = _my_source_sbuil.charAt( _i );
- break;
- case CHAR_ARRAY:
- c = _my_source_charary[ _i ];
- break;
- }
- }
+ c = _my_source_charary[ _i ];
}
if ( !_in_single_quote && !_in_double_quote ) {
if ( c == ':' ) {
_saw_colon = true;
}
- else if ( !( ( c < 33 ) || ( c > 126 ) ) && _saw_colon
+ else if ( !( ( c < 33 ) || ( c == 127 ) ) && _saw_colon
&& ( ( c != '[' ) && ( c != '.' ) && ( ( c < 48 ) || ( c > 57 ) ) ) ) {
_saw_colon = false;
}
}
}
// \n\t is always ignored,
- // as is " (34) and ' (39) (space is 32):
- if ( ( isIgnoreQuotes() && ( ( c < 33 ) || ( c > 126 ) || ( c == 34 ) || ( c == 39 ) || ( ( _clade_level == 0 ) && ( c == ';' ) ) ) )
- || ( !isIgnoreQuotes() && ( ( c < 32 ) || ( c > 126 ) || ( ( _clade_level == 0 ) && ( c == ';' ) ) ) ) ) {
- //do nothing
- }
- else if ( ( c == 32 ) && ( !_in_single_quote && !_in_double_quote ) ) {
+ // "=34 '=39 space=32
+ if ( ( c < 32 ) || ( c == 127 ) || ( isIgnoreQuotes() && ( ( c == 32 ) || ( c == 34 ) || ( c == 39 ) ) )
+ || ( ( c == 32 ) && ( !_in_single_quote && !_in_double_quote ) )
+ || ( ( _clade_level == 0 ) && ( c == ';' ) && ( !_in_single_quote && !_in_double_quote ) ) ) {
//do nothing
}
else if ( _in_comment ) {
_in_double_quote = false;
}
else {
- _current_anotation.append( c );
+ _current_anotation.append( changeCharInParens( c ) );
}
}
- else if ( c == '"' ) {
+ else if ( ( c == '"' ) && !_in_single_quote ) {
_in_double_quote = true;
}
else if ( _in_single_quote ) {
_in_single_quote = false;
}
else {
- _current_anotation.append( c );
+ _current_anotation.append( changeCharInParens( c ) );
}
}
else if ( c == 39 ) {
final Phylogeny phy = processOpenParen();
if ( phy != null ) {
++_i;
- // return phy;
_next = phy;
return;
}
_current_anotation.append( c );
}
++_i;
- } // while ( true )
+ } // while ( true )
if ( _clade_level != 0 ) {
throw new PhylogenyParserException( "error in NH (Newick) formatted data: most likely cause: number of open parens does not equal number of close parens" );
}
}
}
- private final void init() {
- setTaxonomyExtraction( TAXONOMY_EXTRACTION_DEFAULT );
- setReplaceUnderscores( REPLACE_UNDERSCORES_DEFAULT );
- setGuessRootedness( GUESS_ROOTEDNESS_DEFAULT );
- setIgnoreQuotes( IGNORE_QUOTES_DEFAULT );
- }
-
- private final boolean isGuessRootedness() {
- return _guess_rootedness;
- }
-
- private final boolean isIgnoreQuotes() {
- return _ignore_quotes;
- }
-
- private final boolean isReplaceUnderscores() {
- return _replace_underscores;
+ private final static char changeCharInParens( char c ) {
+ if ( c == ':' ) {
+ c = BELL;
+ }
+ else if ( c == '[' ) {
+ c = '{';
+ }
+ else if ( c == ']' ) {
+ c = '}';
+ }
+ return c;
}
private final void processCloseParen() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
if ( _clade_level < 0 ) {
throw new PhylogenyParserException( "error in NH (Newick)/NHX formatted data: most likely cause: number of close parens is larger than number of open parens" );
}
--_clade_level;
if ( !_saw_closing_paren ) {
final PhylogenyNode new_node = new PhylogenyNode();
- parseNHX( _current_anotation.toString(), new_node, getTaxonomyExtraction(), isReplaceUnderscores() );
+ parseNHX( _current_anotation.toString(),
+ new_node,
+ getTaxonomyExtraction(),
+ isReplaceUnderscores(),
+ isAllowErrorsInDistanceToParent(),
+ true,
+ isParseBeastStyleExtendedTags());
_current_anotation = new StringBuilder();
_current_node.addAsChild( new_node );
}
parseNHX( _current_anotation.toString(),
_current_node.getLastChildNode(),
getTaxonomyExtraction(),
- isReplaceUnderscores() );
+ isReplaceUnderscores(),
+ isAllowErrorsInDistanceToParent(),
+ true,
+ isParseBeastStyleExtendedTags());
_current_anotation = new StringBuilder();
}
if ( !_current_node.isRoot() ) {
private final void processComma() throws PhylogenyParserException, NHXFormatException, PhyloXmlDataFormatException {
if ( !_saw_closing_paren ) {
final PhylogenyNode new_node = new PhylogenyNode();
- parseNHX( _current_anotation.toString(), new_node, getTaxonomyExtraction(), isReplaceUnderscores() );
+ parseNHX( _current_anotation.toString(),
+ new_node,
+ getTaxonomyExtraction(),
+ isReplaceUnderscores(),
+ isAllowErrorsInDistanceToParent(),
+ true,
+ isParseBeastStyleExtendedTags());
if ( _current_node == null ) {
throw new NHXFormatException( "format might not be NH or NHX" );
}
parseNHX( _current_anotation.toString(),
_current_node.getLastChildNode(),
getTaxonomyExtraction(),
- isReplaceUnderscores() );
+ isReplaceUnderscores(),
+ isAllowErrorsInDistanceToParent(),
+ true,
+ isParseBeastStyleExtendedTags());
}
_current_anotation = new StringBuilder();
_saw_closing_paren = false;
}
private final Phylogeny processOpenParen() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
Phylogeny phy = null;
final PhylogenyNode new_node = new PhylogenyNode();
if ( _clade_level == 0 ) {
return phy;
}
+ private final static NHXParser createInstance( final Object nhx_source ) throws NHXFormatException, IOException {
+ final NHXParser parser = new NHXParser();
+ parser.setSource( nhx_source );
+ return parser;
+ }
+
+ public final static Phylogeny[] parse( final Object nhx_source ) throws NHXFormatException, IOException {
+ return NHXParser.createInstance( nhx_source ).parse();
+ }
+
public final static void parseNHX( String s,
final PhylogenyNode node_to_annotate,
final TAXONOMY_EXTRACTION taxonomy_extraction,
- final boolean replace_underscores ) throws NHXFormatException,
- PhyloXmlDataFormatException {
+ final boolean replace_underscores,
+ final boolean allow_errors_in_distance_to_parent,
+ final boolean replace_bell,
+ final boolean parse_beast_style_extended_tags ) throws NHXFormatException,
+ PhyloXmlDataFormatException {
if ( ( taxonomy_extraction != TAXONOMY_EXTRACTION.NO ) && replace_underscores ) {
throw new IllegalArgumentException( "cannot extract taxonomies and replace under scores at the same time" );
}
if ( replace_underscores ) {
s = s.replaceAll( "_+", " " );
}
+ s = s.replaceAll( "\\s+", " " ).trim();
boolean is_nhx = false;
final int ob = s.indexOf( "[" );
if ( ob > -1 ) {
else if ( s.indexOf( "prob=" ) > -1 ) {
processMrBayes3Data( s, node_to_annotate );
}
+ if ( parse_beast_style_extended_tags ) {
+ processBeastStyleExtendedData( s, node_to_annotate );
+ }
+ final Matcher ewn_matcher = ENDS_WITH_NUMBER_PATTERN.matcher( s );
+ if ( ewn_matcher.find() ) {
+ b = ewn_matcher.group(1);
+ }
}
s = s.substring( 0, ob ) + b;
if ( ( s.indexOf( "[" ) > -1 ) || ( s.indexOf( "]" ) > -1 ) ) {
}
}
final StringTokenizer t = new StringTokenizer( s, ":" );
+
if ( t.countTokens() > 0 ) {
if ( !s.startsWith( ":" ) ) {
- node_to_annotate.setName( t.nextToken() );
+ if ( ( s.indexOf( BELL ) <= -1 ) || !replace_bell ) {
+ node_to_annotate.setName( t.nextToken() );
+ }
+ else {
+ node_to_annotate.setName( t.nextToken().replace( BELL, ':' ) );
+ }
if ( !replace_underscores && ( !is_nhx && ( taxonomy_extraction != TAXONOMY_EXTRACTION.NO ) ) ) {
ParserUtils.extractTaxonomyDataFromNodeName( node_to_annotate, taxonomy_extraction );
}
}
while ( t.hasMoreTokens() ) {
s = t.nextToken();
- if ( s.startsWith( NHXtags.SPECIES_NAME ) ) {
+ if ( ( s.indexOf( BELL ) > -1 ) && replace_bell ) {
+ s = s.replace( BELL, ':' );
+ }
+ if ( s.indexOf( '=' ) < 0 ) {
+ if ( ( node_to_annotate.getDistanceToParent() != PhylogenyDataUtil.BRANCH_LENGTH_DEFAULT )
+ && !allow_errors_in_distance_to_parent ) {
+ throw new NHXFormatException( "error in NHX formatted data: more than one distance to parent:"
+ + "\"" + s + "\"" );
+ }
+ node_to_annotate.setDistanceToParent( doubleValue( s, allow_errors_in_distance_to_parent ) );
+ }
+ else if ( s.startsWith( NHXtags.SPECIES_NAME ) ) {
if ( !node_to_annotate.getNodeData().isHasTaxonomy() ) {
node_to_annotate.getNodeData().setTaxonomy( new Taxonomy() );
}
}
}
else if ( s.startsWith( NHXtags.SUPPORT ) ) {
- PhylogenyMethods.setConfidence( node_to_annotate, doubleValue( s.substring( 2 ) ) );
+ PhylogenyMethods.setConfidence( node_to_annotate, doubleValue( s.substring( 2 ), false ) );
}
else if ( s.startsWith( NHXtags.TAXONOMY_ID ) ) {
if ( !node_to_annotate.getNodeData().isHasTaxonomy() ) {
}
node_to_annotate.getNodeData().getTaxonomy().setIdentifier( new Identifier( s.substring( 2 ) ) );
}
- else if ( s.startsWith( NHXtags.DOMAIN_STRUCTURE ) ) {
- if ( !node_to_annotate.getNodeData().isHasSequence() ) {
- node_to_annotate.getNodeData().setSequence( new Sequence() );
- }
- node_to_annotate.getNodeData().getSequence()
- .setDomainArchitecture( new DomainArchitecture( s.substring( 3 ) ) );
- }
else if ( s.startsWith( NHXtags.SEQUENCE_ACCESSION ) ) {
if ( !node_to_annotate.getNodeData().isHasSequence() ) {
node_to_annotate.getNodeData().setSequence( new Sequence() );
}
node_to_annotate.getNodeData().getSequence()
- .setAccession( new Accession( s.substring( 3 ), "?" ) );
+ .setAccession( new Accession( s.substring( 3 ), "?" ) );
}
else if ( s.startsWith( NHXtags.GENE_NAME ) ) {
if ( !node_to_annotate.getNodeData().isHasSequence() ) {
}
node_to_annotate.getNodeData().getSequence().setName( s.substring( 3 ) );
}
- else if ( s.indexOf( '=' ) < 0 ) {
- if ( node_to_annotate.getDistanceToParent() != PhylogenyDataUtil.BRANCH_LENGTH_DEFAULT ) {
- throw new NHXFormatException( "error in NHX formatted data: more than one distance to parent:"
- + "\"" + s + "\"" );
- }
- node_to_annotate.setDistanceToParent( doubleValue( s ) );
- }
- } // while ( t.hasMoreTokens() )
+ } // while ( t.hasMoreTokens() )
}
}
}
- private final static double doubleValue( final String str ) throws NHXFormatException {
+ private final static double doubleValue( final String str, final boolean allow_errors ) throws NHXFormatException {
try {
return Double.valueOf( str ).doubleValue();
}
catch ( final NumberFormatException ex ) {
- throw new NHXFormatException( "error in NH/NHX formatted data: failed to parse number from " + "\"" + str
- + "\"" );
+ if ( !allow_errors ) {
+ throw new NHXFormatException( "error in NH/NHX formatted data: failed to parse number from " + "\""
+ + str + "\"" );
+ }
}
+ return 0.0;
}
private final static boolean isBranchLengthsLikeBootstrapValues( final Phylogeny p ) {
}
}
+ private final static void processBeastStyleExtendedData( final String s,
+ final PhylogenyNode node_to_annotate )
+ throws NHXFormatException {
+ final Matcher ft_bs_matcher = BEAST_STYLE_EXTENDED_BOOTSTRAP_PATTERN.matcher( s );
+
+ double bs = -1;
+ if ( ft_bs_matcher.find() ) {
+ try {
+ bs = Double.parseDouble( ft_bs_matcher.group( 1 ) );
+ }
+ catch ( final NumberFormatException e ) {
+ throw new NHXFormatException( "failed to parse bootstrap support from \""
+ + s + "\"" );
+ }
+ if ( bs >= 0.0 ) {
+ node_to_annotate.getBranchData()
+ .addConfidence( new Confidence( bs, "bootstrap" ) );
+ }
+ }
+ final Matcher ft_color_matcher = BEAST_STYLE_EXTENDED_COLOR_PATTERN.matcher( s );
+ Color c = null;
+ if ( ft_color_matcher.find() ) {
+ try {
+ c = Color.decode(ft_color_matcher.group( 1 ) );
+ }
+ catch ( final NumberFormatException e ) {
+ throw new NHXFormatException( "failed to parse color from \""
+ + s + "\"" );
+ }
+ }
+ if ( c != null ) {
+ node_to_annotate.getBranchData().setBranchColor( new BranchColor( c ) );
+ }
+ }
+
private final static void processMrBayes3Data( final String s, final PhylogenyNode node_to_annotate )
throws NHXFormatException {
double sd = -1;
if ( prob >= 0.0 ) {
if ( sd >= 0.0 ) {
node_to_annotate.getBranchData()
- .addConfidence( new Confidence( prob, "posterior probability", sd ) );
+ .addConfidence( new Confidence( prob, "posterior probability", sd ) );
}
else {
node_to_annotate.getBranchData().addConfidence( new Confidence( prob, "posterior probability" ) );
}
catch ( final NumberFormatException e ) {
throw new NHXFormatException( "failed to parse median branch length (Mr Bayes output) from \"" + s
- + "\"" );
+ + "\"" );
}
if ( bl >= 0.0 ) {
node_to_annotate.setDistanceToParent( bl );
}
}
+ public String getEncoding() {
+ return _encoding;
+ }
+
+ private final boolean isParseBeastStyleExtendedTags() {
+ return _parse_beast_style_extended_tags;
+ }
+
+ public final void setParseBeastStyleExtendedTags( final boolean parse_beast_style_extended_tags ) {
+ _parse_beast_style_extended_tags = parse_beast_style_extended_tags;
+ }
+
public static enum TAXONOMY_EXTRACTION {
- NO, YES, PFAM_STYLE_ONLY;
+ AGGRESSIVE, NO, PFAM_STYLE_RELAXED, PFAM_STYLE_STRICT;
}
}