package org.forester.io.parsers.nhx;
+import java.awt.Color;
import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
import java.io.File;
-import java.io.FileReader;
+import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.forester.phylogeny.PhylogenyMethods;
import org.forester.phylogeny.PhylogenyNode;
import org.forester.phylogeny.data.Accession;
+import org.forester.phylogeny.data.BranchColor;
import org.forester.phylogeny.data.Confidence;
import org.forester.phylogeny.data.Event;
import org.forester.phylogeny.data.Identifier;
import org.forester.phylogeny.data.Sequence;
import org.forester.phylogeny.data.Taxonomy;
import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
+import org.forester.util.ForesterConstants;
import org.forester.util.ForesterUtil;
public final class NHXParser implements PhylogenyParser, IteratingPhylogenyParser {
- public final static Pattern MB_BL_PATTERN = Pattern.compile( "length_median=([^,]+)" );
- public final static Pattern MB_PROB_PATTERN = Pattern.compile( "prob=([^,]+)" );
- public final static Pattern MB_PROB_SD_PATTERN = Pattern.compile( "prob_stddev=([^,]+)" );
- public final static Pattern NUMBERS_ONLY_PATTERN = Pattern.compile( "^[0-9\\.]+$" );
- final static public boolean REPLACE_UNDERSCORES_DEFAULT = false;
- private static final boolean ALLOW_ERRORS_IN_DISTANCE_TO_PARENT_DEFAULT = false;
- final static private byte BUFFERED_READER = 3;
- final static private byte CHAR_ARRAY = 2;
- final static private boolean GUESS_IF_SUPPORT_VALUES = true;
- final static private boolean GUESS_ROOTEDNESS_DEFAULT = true;
- final static private boolean IGNORE_QUOTES_DEFAULT = false;
- final static private byte STRING = 0;
- final static private byte STRING_BUFFER = 1;
- final static private byte STRING_BUILDER = 4;
- final static private char BELL = 7;
+ private final static Pattern MB_BL_PATTERN = Pattern.compile( "length.median=([-+eE0-9\\.]+)" );
+ private final static Pattern MB_PROB_PATTERN = Pattern.compile( "prob=([-+eE0-9\\.]+)" );
+ private final static Pattern MB_PROB_SD_PATTERN = Pattern.compile( "prob.stddev=([-+eE0-9\\.]+)" );
+ private final static Pattern NUMBERS_ONLY_PATTERN = Pattern.compile( "^[0-9\\.]+$" );
+
+ private final static Pattern BEAST_STYLE_EXTENDED_BOOTSTRAP_PATTERN = Pattern.compile( "boot?strap=([\\d\\.]+)" );
+ private final static Pattern BEAST_STYLE_EXTENDED_COLOR_PATTERN = Pattern.compile( "colou?r=(#[\\da-fA-F]{6})" );
+ private final static Pattern ENDS_WITH_NUMBER_PATTERN = Pattern.compile( "(:[-+eE0-9\\.]+$)" );
+
+
+ public final static boolean REPLACE_UNDERSCORES_DEFAULT = false;
+ private final static boolean ALLOW_ERRORS_IN_DISTANCE_TO_PARENT_DEFAULT = false;
+ private final static byte BUFFERED_READER = 3;
+ private final static byte CHAR_ARRAY = 2;
+ private final static boolean GUESS_IF_SUPPORT_VALUES = true;
+ private final static boolean GUESS_ROOTEDNESS_DEFAULT = true;
+ private final static boolean IGNORE_QUOTES_DEFAULT = false;
+
+ private final static char BELL = 7;
+ private final static String ENCODING_DEFAULT = ForesterConstants.UTF_8;
private boolean _allow_errors_in_distance_to_parent;
private int _clade_level;
private StringBuilder _current_anotation;
private byte _input_type;
private BufferedReader _my_source_br = null;
private char[] _my_source_charary = null;
- private StringBuffer _my_source_sbuff = null;
- private StringBuilder _my_source_sbuil = null;
- private String _my_source_str = null;
private Phylogeny _next;
private Object _nhx_source;
private boolean _replace_underscores;
private Object _source;
private int _source_length;
private TAXONOMY_EXTRACTION _taxonomy_extraction;
+ private boolean _parse_beast_style_extended_tags = false;
+ private final String _encoding;
public NHXParser() {
+ _encoding = ENCODING_DEFAULT;
+ init();
+ }
+
+ public NHXParser( final String encoding ) {
+ _encoding = encoding;
init();
}
@Override
public String getName() {
- return "NN/NHX Parser";
+ return "NH/NHX Parser";
}
public final TAXONOMY_EXTRACTION getTaxonomyExtraction() {
@Override
public final Phylogeny[] parse() throws IOException {
final List<Phylogeny> l = new ArrayList<Phylogeny>();
- int c = 0;
while ( hasNext() ) {
l.add( next() );
- c++;
}
final Phylogeny[] p = new Phylogeny[ l.size() ];
for( int i = 0; i < l.size(); ++i ) {
_current_anotation = new StringBuilder();
_current_phylogeny = null;
_current_node = null;
- _my_source_str = null;
- _my_source_sbuff = null;
- _my_source_sbuil = null;
_my_source_charary = null;
- determineSourceType( _source );
+ determineAndProcessSourceType( _source );
switch ( _input_type ) {
- case STRING:
- _my_source_br = null;
- _my_source_str = ( String ) _nhx_source;
- break;
- case STRING_BUFFER:
- _my_source_br = null;
- _my_source_sbuff = ( StringBuffer ) _nhx_source;
- break;
- case STRING_BUILDER:
- _my_source_br = null;
- _my_source_sbuil = ( StringBuilder ) _nhx_source;
- break;
case CHAR_ARRAY:
_my_source_br = null;
_my_source_charary = ( char[] ) _nhx_source;
_allow_errors_in_distance_to_parent = allow_errors_in_distance_to_parent;
}
- private final void determineSourceType( final Object nhx_source ) throws IOException {
+ private final void determineAndProcessSourceType( final Object nhx_source ) throws IOException {
if ( nhx_source == null ) {
throw new PhylogenyParserException( getClass() + ": attempt to parse null object." );
}
else if ( nhx_source instanceof String ) {
- _input_type = NHXParser.STRING;
- _source_length = ( ( String ) nhx_source ).length();
- _nhx_source = nhx_source;
- }
- else if ( nhx_source instanceof StringBuilder ) {
- _input_type = NHXParser.STRING_BUILDER;
- _source_length = ( ( StringBuilder ) nhx_source ).length();
- _nhx_source = nhx_source;
- }
- else if ( nhx_source instanceof StringBuffer ) {
- _input_type = NHXParser.STRING_BUFFER;
- _source_length = ( ( StringBuffer ) nhx_source ).length();
- _nhx_source = nhx_source;
- }
- else if ( nhx_source instanceof StringBuilder ) {
- _input_type = NHXParser.STRING_BUILDER;
- _source_length = ( ( StringBuilder ) nhx_source ).length();
_nhx_source = nhx_source;
+ _input_type = NHXParser.BUFFERED_READER;
+ _source_length = 0;
+ InputStream is = new ByteArrayInputStream( (( String ) nhx_source ).getBytes(getEncoding()));
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
+ _nhx_source = new BufferedReader( isr );
}
else if ( nhx_source instanceof char[] ) {
_input_type = NHXParser.CHAR_ARRAY;
_input_type = NHXParser.BUFFERED_READER;
_source_length = 0;
if ( _my_source_br != null ) {
- try {
- _my_source_br.close();
- }
- catch ( final IOException e ) {
- }
+ //I am REALLY not sure if it is a "good" idea NOT to close the stream...
+ // try {
+ // _my_source_br.close();
+ // }
+ // catch ( final IOException e ) {
+ // }
}
final File f = ( File ) nhx_source;
final String error = ForesterUtil.isReadableFile( f );
if ( !ForesterUtil.isEmpty( error ) ) {
throw new PhylogenyParserException( error );
}
- _nhx_source = new BufferedReader( new FileReader( f ) );
+ final InputStream is = new FileInputStream( f );
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
+ _nhx_source = new BufferedReader( isr );
}
else if ( nhx_source instanceof URL ) {
_input_type = NHXParser.BUFFERED_READER;
_source_length = 0;
if ( _my_source_br != null ) {
- try {
- _my_source_br.close();
- }
- catch ( final IOException e ) {
- }
- }
- final InputStreamReader isr = new InputStreamReader( ( ( URL ) nhx_source ).openStream() );
+ //I am REALLY not sure if it is a "good" idea NOT to close the stream...
+ // try {
+ // _my_source_br.close();
+ // }
+ // catch ( final IOException e ) {
+ // }
+ }
+ final InputStream is = ( ( URL ) nhx_source ).openStream();
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
_nhx_source = new BufferedReader( isr );
}
else if ( nhx_source instanceof InputStream ) {
_input_type = NHXParser.BUFFERED_READER;
_source_length = 0;
if ( _my_source_br != null ) {
- try {
- _my_source_br.close();
- }
- catch ( final IOException e ) {
- }
+ //I am REALLY not sure if it is a "good" idea NOT to close the stream...
+ // try {
+ // _my_source_br.close();
+ // }
+ // catch ( final IOException e ) {
+ // }
}
- final InputStreamReader isr = new InputStreamReader( ( InputStream ) nhx_source );
- _nhx_source = new BufferedReader( isr );
+ final InputStream is = ( InputStream ) nhx_source;
+ final InputStreamReader isr = new InputStreamReader( is, getEncoding() );
+ _nhx_source = new BufferedReader( isr );
}
else {
throw new IllegalArgumentException( getClass() + " can only parse objects of type String,"
- + " StringBuffer, StringBuilder, char[], File, InputStream, or URL "
+ + " char[], File, InputStream, or URL "
+ " [attempt to parse object of " + nhx_source.getClass() + "]." );
}
}
private final Phylogeny finishPhylogeny() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
if ( _current_phylogeny != null ) {
parseNHX( _current_anotation != null ? _current_anotation.toString() : "",
- _current_phylogeny.getRoot(),
- getTaxonomyExtraction(),
- isReplaceUnderscores(),
- isAllowErrorsInDistanceToParent(),
- true );
+ _current_phylogeny.getRoot(),
+ getTaxonomyExtraction(),
+ isReplaceUnderscores(),
+ isAllowErrorsInDistanceToParent(),
+ true,
+ isParseBeastStyleExtendedTags());
if ( GUESS_IF_SUPPORT_VALUES ) {
if ( isBranchLengthsLikeBootstrapValues( _current_phylogeny ) ) {
moveBranchLengthsToConfidenceValues( _current_phylogeny );
}
private final Phylogeny finishSingleNodePhylogeny() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
final PhylogenyNode new_node = new PhylogenyNode();
parseNHX( _current_anotation.toString(),
new_node,
getTaxonomyExtraction(),
isReplaceUnderscores(),
isAllowErrorsInDistanceToParent(),
- true );
+ true,
+ isParseBeastStyleExtendedTags());
_current_phylogeny = new Phylogeny();
_current_phylogeny.setRoot( new_node );
return _current_phylogeny;
setGuessRootedness( GUESS_ROOTEDNESS_DEFAULT );
setIgnoreQuotes( IGNORE_QUOTES_DEFAULT );
setAllowErrorsInDistanceToParent( ALLOW_ERRORS_IN_DISTANCE_TO_PARENT_DEFAULT );
+ setParseBeastStyleExtendedTags( false );
}
private final boolean isAllowErrorsInDistanceToParent() {
if ( _i >= _source_length ) {
break;
}
- else {
- switch ( _input_type ) {
- case STRING:
- c = _my_source_str.charAt( _i );
- break;
- case STRING_BUFFER:
- c = _my_source_sbuff.charAt( _i );
- break;
- case STRING_BUILDER:
- c = _my_source_sbuil.charAt( _i );
- break;
- case CHAR_ARRAY:
- c = _my_source_charary[ _i ];
- break;
- }
- }
+ c = _my_source_charary[ _i ];
}
if ( !_in_single_quote && !_in_double_quote ) {
if ( c == ':' ) {
_saw_colon = true;
}
- else if ( !( ( c < 33 ) || ( c > 126 ) ) && _saw_colon
+ else if ( !( ( c < 33 ) || ( c == 127 ) ) && _saw_colon
&& ( ( c != '[' ) && ( c != '.' ) && ( ( c < 48 ) || ( c > 57 ) ) ) ) {
_saw_colon = false;
}
}
}
// \n\t is always ignored,
- // as is " (34) and ' (39) (space is 32):
- if ( ( isIgnoreQuotes() && ( ( c < 33 ) || ( c > 126 ) || ( c == 34 ) || ( c == 39 ) || ( ( _clade_level == 0 ) && ( c == ';' ) ) ) )
- || ( !isIgnoreQuotes() && ( ( c < 32 ) || ( c > 126 ) || ( ( _clade_level == 0 ) && ( c == ';' ) ) ) ) ) {
- //do nothing
- }
- else if ( ( c == 32 ) && ( !_in_single_quote && !_in_double_quote ) ) {
+ // "=34 '=39 space=32
+ if ( ( c < 32 ) || ( c == 127 ) || ( isIgnoreQuotes() && ( ( c == 32 ) || ( c == 34 ) || ( c == 39 ) ) )
+ || ( ( c == 32 ) && ( !_in_single_quote && !_in_double_quote ) )
+ || ( ( _clade_level == 0 ) && ( c == ';' ) && ( !_in_single_quote && !_in_double_quote ) ) ) {
//do nothing
}
else if ( _in_comment ) {
_in_double_quote = false;
}
else {
- _current_anotation.append( c != ':' ? c : BELL );
+ _current_anotation.append( changeCharInParens( c ) );
}
}
- else if ( c == '"' ) {
+ else if ( ( c == '"' ) && !_in_single_quote ) {
_in_double_quote = true;
}
else if ( _in_single_quote ) {
_in_single_quote = false;
}
else {
- _current_anotation.append( c != ':' ? c : BELL );
+ _current_anotation.append( changeCharInParens( c ) );
}
}
else if ( c == 39 ) {
final Phylogeny phy = processOpenParen();
if ( phy != null ) {
++_i;
- // return phy;
_next = phy;
return;
}
_current_anotation.append( c );
}
++_i;
- } // while ( true )
+ } // while ( true )
if ( _clade_level != 0 ) {
throw new PhylogenyParserException( "error in NH (Newick) formatted data: most likely cause: number of open parens does not equal number of close parens" );
}
}
}
+ private final static char changeCharInParens( char c ) {
+ if ( c == ':' ) {
+ c = BELL;
+ }
+ else if ( c == '[' ) {
+ c = '{';
+ }
+ else if ( c == ']' ) {
+ c = '}';
+ }
+ return c;
+ }
+
private final void processCloseParen() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
if ( _clade_level < 0 ) {
throw new PhylogenyParserException( "error in NH (Newick)/NHX formatted data: most likely cause: number of close parens is larger than number of open parens" );
}
getTaxonomyExtraction(),
isReplaceUnderscores(),
isAllowErrorsInDistanceToParent(),
- true );
+ true,
+ isParseBeastStyleExtendedTags());
_current_anotation = new StringBuilder();
_current_node.addAsChild( new_node );
}
getTaxonomyExtraction(),
isReplaceUnderscores(),
isAllowErrorsInDistanceToParent(),
- true );
+ true,
+ isParseBeastStyleExtendedTags());
_current_anotation = new StringBuilder();
}
if ( !_current_node.isRoot() ) {
getTaxonomyExtraction(),
isReplaceUnderscores(),
isAllowErrorsInDistanceToParent(),
- true );
+ true,
+ isParseBeastStyleExtendedTags());
if ( _current_node == null ) {
throw new NHXFormatException( "format might not be NH or NHX" );
}
getTaxonomyExtraction(),
isReplaceUnderscores(),
isAllowErrorsInDistanceToParent(),
- true );
+ true,
+ isParseBeastStyleExtendedTags());
}
_current_anotation = new StringBuilder();
_saw_closing_paren = false;
}
private final Phylogeny processOpenParen() throws PhylogenyParserException, NHXFormatException,
- PhyloXmlDataFormatException {
+ PhyloXmlDataFormatException {
Phylogeny phy = null;
final PhylogenyNode new_node = new PhylogenyNode();
if ( _clade_level == 0 ) {
return phy;
}
- public final static NHXParser createInstance( final Object nhx_source ) throws NHXFormatException, IOException {
+ private final static NHXParser createInstance( final Object nhx_source ) throws NHXFormatException, IOException {
final NHXParser parser = new NHXParser();
parser.setSource( nhx_source );
return parser;
final TAXONOMY_EXTRACTION taxonomy_extraction,
final boolean replace_underscores,
final boolean allow_errors_in_distance_to_parent,
- final boolean replace_bell ) throws NHXFormatException,
- PhyloXmlDataFormatException {
+ final boolean replace_bell,
+ final boolean parse_beast_style_extended_tags ) throws NHXFormatException,
+ PhyloXmlDataFormatException {
if ( ( taxonomy_extraction != TAXONOMY_EXTRACTION.NO ) && replace_underscores ) {
throw new IllegalArgumentException( "cannot extract taxonomies and replace under scores at the same time" );
}
else if ( s.indexOf( "prob=" ) > -1 ) {
processMrBayes3Data( s, node_to_annotate );
}
+ if ( parse_beast_style_extended_tags ) {
+ processBeastStyleExtendedData( s, node_to_annotate );
+ }
+ final Matcher ewn_matcher = ENDS_WITH_NUMBER_PATTERN.matcher( s );
+ if ( ewn_matcher.find() ) {
+ b = ewn_matcher.group(1);
+ }
}
s = s.substring( 0, ob ) + b;
if ( ( s.indexOf( "[" ) > -1 ) || ( s.indexOf( "]" ) > -1 ) ) {
}
}
final StringTokenizer t = new StringTokenizer( s, ":" );
+
if ( t.countTokens() > 0 ) {
if ( !s.startsWith( ":" ) ) {
if ( ( s.indexOf( BELL ) <= -1 ) || !replace_bell ) {
node_to_annotate.getNodeData().setSequence( new Sequence() );
}
node_to_annotate.getNodeData().getSequence()
- .setAccession( new Accession( s.substring( 3 ), "?" ) );
+ .setAccession( new Accession( s.substring( 3 ), "?" ) );
}
else if ( s.startsWith( NHXtags.GENE_NAME ) ) {
if ( !node_to_annotate.getNodeData().isHasSequence() ) {
}
node_to_annotate.getNodeData().getSequence().setName( s.substring( 3 ) );
}
- } // while ( t.hasMoreTokens() )
+ } // while ( t.hasMoreTokens() )
}
}
}
}
}
+ private final static void processBeastStyleExtendedData( final String s,
+ final PhylogenyNode node_to_annotate )
+ throws NHXFormatException {
+ final Matcher ft_bs_matcher = BEAST_STYLE_EXTENDED_BOOTSTRAP_PATTERN.matcher( s );
+
+ double bs = -1;
+ if ( ft_bs_matcher.find() ) {
+ try {
+ bs = Double.parseDouble( ft_bs_matcher.group( 1 ) );
+ }
+ catch ( final NumberFormatException e ) {
+ throw new NHXFormatException( "failed to parse bootstrap support from \""
+ + s + "\"" );
+ }
+ if ( bs >= 0.0 ) {
+ node_to_annotate.getBranchData()
+ .addConfidence( new Confidence( bs, "bootstrap" ) );
+ }
+ }
+ final Matcher ft_color_matcher = BEAST_STYLE_EXTENDED_COLOR_PATTERN.matcher( s );
+ Color c = null;
+ if ( ft_color_matcher.find() ) {
+ try {
+ c = Color.decode(ft_color_matcher.group( 1 ) );
+ }
+ catch ( final NumberFormatException e ) {
+ throw new NHXFormatException( "failed to parse color from \""
+ + s + "\"" );
+ }
+ }
+ if ( c != null ) {
+ node_to_annotate.getBranchData().setBranchColor( new BranchColor( c ) );
+ }
+ }
+
private final static void processMrBayes3Data( final String s, final PhylogenyNode node_to_annotate )
throws NHXFormatException {
double sd = -1;
if ( prob >= 0.0 ) {
if ( sd >= 0.0 ) {
node_to_annotate.getBranchData()
- .addConfidence( new Confidence( prob, "posterior probability", sd ) );
+ .addConfidence( new Confidence( prob, "posterior probability", sd ) );
}
else {
node_to_annotate.getBranchData().addConfidence( new Confidence( prob, "posterior probability" ) );
}
catch ( final NumberFormatException e ) {
throw new NHXFormatException( "failed to parse median branch length (Mr Bayes output) from \"" + s
- + "\"" );
+ + "\"" );
}
if ( bl >= 0.0 ) {
node_to_annotate.setDistanceToParent( bl );
}
}
+ public String getEncoding() {
+ return _encoding;
+ }
+
+ private final boolean isParseBeastStyleExtendedTags() {
+ return _parse_beast_style_extended_tags;
+ }
+
+ public final void setParseBeastStyleExtendedTags( final boolean parse_beast_style_extended_tags ) {
+ _parse_beast_style_extended_tags = parse_beast_style_extended_tags;
+ }
+
public static enum TAXONOMY_EXTRACTION {
AGGRESSIVE, NO, PFAM_STYLE_RELAXED, PFAM_STYLE_STRICT;
}