public class rio {
- public final static String PRG_NAME = "rio";
- public final static String PRG_VERSION = "5.000";
- public final static String PRG_DATE = "170411";
+ public final static String PRG_NAME = "rio";
+ public final static String PRG_VERSION = "5.000";
+ public final static String PRG_DATE = "170411";
final static private String E_MAIL = "phyloxml@gmail.com";
final static private String WWW = "https://sites.google.com/site/cmzmasek/home/software/forester";
final static private String HELP_OPTION_1 = "help";
final static private String LOGFILE_SUFFIX = "_RIO_log.tsv";
final static private String STRIPPED_SPECIES_TREE_SUFFIX = "_RIO_sst.xml";
final static private String ORTHO_OUTTABLE_SUFFIX = "_RIO_orthologies.tsv";
+ final static private String ORTHO_OUTTABLE_WITH_MAP_SUFFIX = "_RIO_orthologies_ext_map.tsv";
final static private String OUT_MIN_DUP_GENE_TREE_SUFFIX = "_RIO_gene_tree_min_dup_";
final static private String OUT_MED_DUP_GENE_TREE_SUFFIX = "_RIO_gene_tree_med_dup_";
final static private String ORTHOLOG_GROUPS_SUFFIX = "_RIO_ortholog_groups.tsv";
log.print( "\t" );
log.print( "EXT NODES" );
log.print( "\t" );
- log.print( ortholog_group_cutoff + " O GROUPS" );
+ log.print( ortholog_group_cutoff + " O GROUPS" );
log.print( "\t" );
log.print( "0.05 O GROUPS" );
log.print( "\t" );
outname = outname.substring( 0, outname.lastIndexOf( "." ) );
}
try {
+ boolean perform_id_mapping = true;
+ File id_mapping_dir = new File( "mappings" );
+ String id_mapping_suffix = ".nim";
RIOUtil.executeAnalysis( gf,
- species_tree_file,
- new File( outdir.getCanonicalFile() + "/" + outname + ORTHO_OUTTABLE_SUFFIX ),
- new File( outdir.getCanonicalFile() + "/" + outname + ORTHOLOG_GROUPS_SUFFIX ),
- new File( outdir.getCanonicalFile() + "/" + outname + LOGFILE_SUFFIX ),
- outgroup,
- rerooting,
- gt_first,
- gt_last,
- new File( outdir.getCanonicalFile() + "/" + outname
- + STRIPPED_SPECIES_TREE_SUFFIX ),
- new File( outdir.getCanonicalFile() + "/" + outname
- + OUT_MIN_DUP_GENE_TREE_SUFFIX ),
- new File( outdir.getCanonicalFile() + "/" + outname
- + OUT_MED_DUP_GENE_TREE_SUFFIX ),
- true,
- algorithm,
- true,
- log,
- ortholog_group_cutoff );
+ species_tree_file,
+ new File( outdir.getCanonicalFile() + "/" + outname
+ + ORTHO_OUTTABLE_SUFFIX ),
+ new File( outdir.getCanonicalFile() + "/" + outname
+ + ORTHO_OUTTABLE_WITH_MAP_SUFFIX ),
+ new File( outdir.getCanonicalFile() + "/" + outname
+ + ORTHOLOG_GROUPS_SUFFIX ),
+ new File( outdir.getCanonicalFile() + "/" + outname + LOGFILE_SUFFIX ),
+ outgroup,
+ rerooting,
+ gt_first,
+ gt_last,
+ new File( outdir.getCanonicalFile() + "/" + outname
+ + STRIPPED_SPECIES_TREE_SUFFIX ),
+ new File( outdir.getCanonicalFile() + "/" + outname
+ + OUT_MIN_DUP_GENE_TREE_SUFFIX ),
+ new File( outdir.getCanonicalFile() + "/" + outname
+ + OUT_MED_DUP_GENE_TREE_SUFFIX ),
+ true,
+ algorithm,
+ true,
+ log,
+ ortholog_group_cutoff,
+ perform_id_mapping,
+ id_mapping_dir,
+ id_mapping_suffix );
}
catch ( IOException e ) {
ForesterUtil.fatalError( PRG_NAME, e.getLocalizedMessage() );
outname = outname.substring( 0, outname.lastIndexOf( "." ) );
}
RIOUtil.executeAnalysis( gene_trees_file,
- species_tree_file,
- orthology_outtable,
- new File( outname + ORTHOLOG_GROUPS_SUFFIX ),
- logfile,
- outgroup,
- rerooting,
- gt_first,
- gt_last,
- new File( outname + STRIPPED_SPECIES_TREE_SUFFIX ),
- new File( outname + OUT_MIN_DUP_GENE_TREE_SUFFIX ),
- new File( outname + OUT_MED_DUP_GENE_TREE_SUFFIX ),
- algorithm == ALGORITHM.GSDIR,
- algorithm,
- false,
- null,
- ortholog_group_cutoff );
+ species_tree_file,
+ orthology_outtable,
+ null,
+ new File( outname + ORTHOLOG_GROUPS_SUFFIX ),
+ logfile,
+ outgroup,
+ rerooting,
+ gt_first,
+ gt_last,
+ new File( outname + STRIPPED_SPECIES_TREE_SUFFIX ),
+ new File( outname + OUT_MIN_DUP_GENE_TREE_SUFFIX ),
+ new File( outname + OUT_MED_DUP_GENE_TREE_SUFFIX ),
+ algorithm == ALGORITHM.GSDIR,
+ algorithm,
+ false,
+ null,
+ ortholog_group_cutoff,
+ false,
+ null,
+ null );
}
if ( !use_dir ) {
time = System.currentTimeMillis() - time;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeSet;
import org.forester.io.parsers.util.ParserUtils;
import org.forester.io.writers.PhylogenyWriter;
import org.forester.phylogeny.Phylogeny;
+import org.forester.phylogeny.PhylogenyNode;
+import org.forester.phylogeny.data.Sequence;
+import org.forester.phylogeny.iterators.PhylogenyNodeIterator;
import org.forester.rio.RIO.REROOTING;
import org.forester.sdi.SDIException;
import org.forester.sdi.SDIutil.ALGORITHM;
public static final void executeAnalysis( final File gene_trees_file,
final File species_tree_file,
final File orthology_outtable,
+ final File orthology_outtable_with_mappings,
final File orthology_groups_outfile,
final File logfile,
final String outgroup,
final ALGORITHM algorithm,
final boolean use_gene_trees_dir,
final EasyWriter log,
- final double ortholog_group_cutoff ) {
+ final double ortholog_group_cutoff,
+ final boolean perform_id_mapping,
+ final File id_mapping_dir,
+ final String id_mapping_suffix ) {
try {
+ final SortedMap<String, String> id_map;
+ if ( perform_id_mapping ) {
+ id_map = obtainMapping( id_mapping_dir, gene_trees_file.getName(), id_mapping_suffix );
+ }
+ else {
+ id_map = null;
+ }
final RIO rio;
boolean iterating = false;
final PhylogenyParser p = ParserUtils.createParserDependingOnFileType( gene_trees_file, true );
System.out.println( "Taxonomy linking based on :\t" + rio.getGSDIRtaxCompBase() );
}
}
- ///
- ////
final IntMatrix m;
if ( iterating ) {
m = rio.getOrthologTable();
m = RIO.calculateOrthologTable( rio.getAnalyzedGeneTrees(), true );
}
final BasicDescriptiveStatistics stats = rio.getDuplicationsStatistics();
- writeTable( orthology_outtable, stats.getN(), m, !use_gene_trees_dir );
+ if ( perform_id_mapping ) {
+ writeOrthologyTable( orthology_outtable, stats.getN(), m, !use_gene_trees_dir, id_map, true );
+ writeOrthologyTable( orthology_outtable_with_mappings,
+ stats.getN(),
+ m,
+ !use_gene_trees_dir,
+ id_map,
+ false );
+ }
+ else {
+ writeOrthologyTable( orthology_outtable, stats.getN(), m, !use_gene_trees_dir, null, false );
+ }
final int ortholog_groups = writeOrtologGroups( orthology_groups_outfile,
ortholog_group_cutoff,
stats.getN(),
m,
!use_gene_trees_dir,
- false );
- final int ortholog_groups_005 = writeOrtologGroups( null, 0.05, stats.getN(), m, false, true );
- final int ortholog_groups_025 = writeOrtologGroups( null, 0.25, stats.getN(), m, false, true );
- final int ortholog_groups_05 = writeOrtologGroups( null, 0.5, stats.getN(), m, false, true );
- final int ortholog_groups_075 = writeOrtologGroups( null, 0.75, stats.getN(), m, false, true );
- final int ortholog_groups_095 = writeOrtologGroups( null, 0.95, stats.getN(), m, false, true );
+ false,
+ id_map );
+ final int ortholog_groups_005 = writeOrtologGroups( null, 0.05, stats.getN(), m, false, true, null );
+ final int ortholog_groups_025 = writeOrtologGroups( null, 0.25, stats.getN(), m, false, true, null );
+ final int ortholog_groups_05 = writeOrtologGroups( null, 0.5, stats.getN(), m, false, true, null );
+ final int ortholog_groups_075 = writeOrtologGroups( null, 0.75, stats.getN(), m, false, true, null );
+ final int ortholog_groups_095 = writeOrtologGroups( null, 0.95, stats.getN(), m, false, true, null );
if ( ( algorithm != ALGORITHM.SDIR ) && ( logfile != null ) ) {
writeLogFile( logfile,
rio,
if ( return_species_tree != null ) {
writeTree( rio.getSpeciesTree(),
return_species_tree,
- use_gene_trees_dir ? null : "Wrote (stripped) species tree to :\t" );
+ use_gene_trees_dir ? null : "Wrote (stripped) species tree to :\t",
+ null );
}
if ( return_min_dup_gene_tree != null && rio.getMinDuplicationsGeneTree() != null ) {
final int min = ( int ) rio.getDuplicationsStatistics().getMin();
writeTree( rio.getMinDuplicationsGeneTree(),
new File( return_min_dup_gene_tree.toString() + min + ".xml" ),
- use_gene_trees_dir ? null : "Wrote one min duplication gene tree :\t" );
+ use_gene_trees_dir ? null : "Wrote one min duplication gene tree :\t",
+ id_map );
}
if ( return_median_dup_gene_tree != null && rio.getDuplicationsToTreeMap() != null ) {
final int med = ( int ) rio.getDuplicationsStatistics().median();
writeTree( rio.getDuplicationsToTreeMap().get( med ),
new File( return_median_dup_gene_tree.toString() + med + ".xml" ),
- use_gene_trees_dir ? null : "Wrote one med duplication gene tree :\t" );
+ use_gene_trees_dir ? null : "Wrote one med duplication gene tree :\t",
+ id_map );
}
final java.text.DecimalFormat df = new java.text.DecimalFormat( "0.##" );
final int min = ( int ) stats.getMin();
}
}
- private static final void writeTable( final File table_outfile,
- final int gene_trees_analyzed,
- final IntMatrix m,
- final boolean verbose )
+ private static final void writeOrthologyTable( final File table_outfile,
+ final int gene_trees_analyzed,
+ final IntMatrix m,
+ final boolean verbose,
+ final SortedMap<String, String> id_map,
+ final boolean replace_ids )
throws IOException {
final EasyWriter w = ForesterUtil.createEasyWriter( table_outfile );
final java.text.DecimalFormat df = new java.text.DecimalFormat( "0.####" );
df.setRoundingMode( RoundingMode.HALF_UP );
for( int i = 0; i < m.size(); ++i ) {
w.print( "\t" );
- w.print( m.getLabel( i ) );
+ if ( replace_ids ) {
+ if ( !id_map.containsKey( m.getLabel( i ) ) ) {
+ throw new IOException( "no id mapping for \"" + m.getLabel( i ) + "\" (attempting to write ["
+ + table_outfile + "])" );
+ }
+ w.print( id_map.get( m.getLabel( i ) ) );
+ }
+ else {
+ w.print( m.getLabel( i ) );
+ }
}
w.println();
for( int x = 0; x < m.size(); ++x ) {
- w.print( m.getLabel( x ) );
+ if ( replace_ids ) {
+ w.print( id_map.get( m.getLabel( x ) ) );
+ }
+ else {
+ w.print( m.getLabel( x ) );
+ }
for( int y = 0; y < m.size(); ++y ) {
w.print( "\t" );
if ( x == y ) {
}
w.println();
}
+ if ( !replace_ids && id_map != null && id_map.size() > 0 ) {
+ w.println();
+ id_map.forEach( ( k, v ) -> {
+ try {
+ w.println( k + "\t" + v );
+ }
+ catch ( final IOException e ) {
+ //ignore
+ }
+ } );
+ }
w.close();
if ( verbose ) {
System.out.println( "Wrote table to :\t" + table_outfile.getCanonicalPath() );
final int gene_trees_analyzed,
final IntMatrix m,
final boolean verbose,
- final boolean calc_conly )
+ final boolean calc_conly,
+ final SortedMap<String, String> id_map )
throws IOException {
List<SortedSet<String>> groups = new ArrayList<SortedSet<String>>();
BasicDescriptiveStatistics stats = new BasicDescriptiveStatistics();
w.print( Integer.toString( counter++ ) );
for( final String s : group ) {
w.print( "\t" );
- w.print( s );
+ if ( id_map != null && id_map.size() > 0 ) {
+ if ( !id_map.containsKey( s ) ) {
+ throw new IOException( "no id mapping for \"" + s + "\" (attempting to write [" + outfile
+ + "])" );
+ }
+ w.print( id_map.get( s ) );
+ }
+ else {
+ w.print( s );
+ }
}
w.println();
}
return groups.size();
}
- private static void writeTree( final Phylogeny p, final File f, final String comment ) throws IOException {
+ private static void writeTree( final Phylogeny p,
+ final File f,
+ final String comment,
+ final SortedMap<String, String> id_map )
+ throws IOException {
+ if ( id_map != null && id_map.size() > 0 ) {
+ final PhylogenyNodeIterator it = p.iteratorExternalForward();
+ while ( it.hasNext() ) {
+ final PhylogenyNode n = it.next();
+ if ( !id_map.containsKey( n.getName() ) ) {
+ throw new IOException( "no id mapping for \"" + n.getName() + "\" (attempting to write [" + f
+ + "])" );
+ }
+ final Sequence seq = new Sequence();
+ seq.setName( id_map.get( n.getName() ) );
+ n.getNodeData().addSequence( seq );
+ }
+ }
final PhylogenyWriter writer = new PhylogenyWriter();
writer.toPhyloXML( f, p, 0 );
if ( comment != null ) {
}
}
- private final static Map<String, String> obtainMapping( final File dir, final String prefix, final String suffix )
+ private final static SortedMap<String, String> obtainMapping( final File dir,
+ final String prefix,
+ final String suffix )
throws IOException {
if ( !dir.exists() ) {
throw new IOException( "[" + dir + "] does not exist" );
return ( name.endsWith( suffix ) );
}
} );
- String my_suffix = suffix;
+ if ( mapping_files.length == 1 ) {
+ throw new IOException( "no files ending with \"" + suffix + "\" found in [" + dir + "]" );
+ }
+ String my_prefix = ForesterUtil.removeFileExtension( prefix );
boolean done = false;
+ boolean more_than_one = false;
+ File the_one = null;
do {
int matches = 0;
for( File file : mapping_files ) {
- if ( file.getName().equals( my_suffix ) ) {
+ if ( file.getName().startsWith( my_prefix ) ) {
matches++;
+ if ( matches > 1 ) {
+ the_one = null;
+ break;
+ }
+ the_one = file;
}
}
- if ( matches == 1) {
+ if ( matches > 1 ) {
+ more_than_one = true;
+ done = true;
+ }
+ if ( matches == 1 ) {
done = true;
}
else {
- my_suffix = my_suffix.substring( 0, my_suffix.length() - 1);
+ if ( my_prefix.length() <= 1 ) {
+ throw new IOException( "no file matching \"" + ForesterUtil.removeFileExtension( prefix )
+ + "\" and ending with \"" + suffix + "\" found in [" + dir + "]" );
+ }
+ my_prefix = my_prefix.substring( 0, my_prefix.length() - 1 );
}
- } while (!done );
-
-
- if ( mapping_files.length == 0 ) {
- throw new IOException( "file with prefix \"" + prefix + "\" and suffix \"" + suffix + "\" not found in ["
- + dir + "] " );
+ } while ( !done );
+ if ( more_than_one ) {
+ throw new IOException( "multiple files matching \"" + ForesterUtil.removeFileExtension( prefix )
+ + "\" and ending with \"" + suffix + "\" found in [" + dir + "]" );
+ }
+ else if ( the_one != null ) {
}
- if ( mapping_files.length > 1 ) {
- throw new IOException( "file with prefix \"" + prefix + "\" and suffix \"" + suffix + "\" not unique in ["
- + dir + "] " );
+ else {
+ throw new IOException( "no file matching \"" + ForesterUtil.removeFileExtension( prefix )
+ + "\" and ending with \"" + suffix + "\" found in [" + dir + "]" );
}
- final BasicTable<String> t = BasicTableParser.parse( mapping_files[ 0 ], '\t' );
+ final BasicTable<String> t = BasicTableParser.parse( the_one, '\t' );
return t.getColumnsAsMap( 0, 1 );
}
}
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
public class BasicTable<E> {
return result;
}
- public Map<String, E> getColumnsAsMap( final int key_col, final int value_col ) throws IllegalArgumentException {
- final Map<String, E> map = new HashMap<String, E>();
+ public SortedMap<String, E> getColumnsAsMap( final int key_col, final int value_col ) throws IllegalArgumentException {
+ final SortedMap<String, E> map = new TreeMap<String, E>();
for( int row = 0; row < getNumberOfRows(); ++row ) {
final String key = ( String ) getValue( key_col, row );
final E value = getValue( value_col, row );
}
}
+ final public static String removeFileExtension( final String file_name ) {
+ if ( file_name.indexOf( "." ) > 0 ) {
+ return file_name.substring( 0, file_name.lastIndexOf( "." ) );
+ }
+ return file_name;
+ }
+
/**
* This calculates a color. If value is equal to min the returned color is
* minColor, if value is equal to max the returned color is maxColor,
final private static int calculateColorComponent( final double smallercolor_component_x,
final double largercolor_component_x,
final double x ) {
- return ( int ) ( smallercolor_component_x + ( ( x * ( largercolor_component_x - smallercolor_component_x ) ) / 255.0 ) );
+ return ( int ) ( smallercolor_component_x
+ + ( ( x * ( largercolor_component_x - smallercolor_component_x ) ) / 255.0 ) );
}
/**
final public static void map2file( final File file,
final Map<?, ?> data,
final String entry_separator,
- final String data_separator ) throws IOException {
+ final String data_separator )
+ throws IOException {
final Writer writer = new BufferedWriter( new FileWriter( file ) );
map2writer( writer, data, entry_separator, data_separator );
writer.close();
final public static void map2writer( final Writer writer,
final Map<?, ?> data,
final String entry_separator,
- final String data_separator ) throws IOException {
+ final String data_separator )
+ throws IOException {
boolean first = true;
for( final Entry<?, ?> entry : data.entrySet() ) {
if ( !first ) {
}
}
- final public static StringBuffer mapToStringBuffer( final Map<Object, Object> map, final String key_value_separator ) {
+ final public static StringBuffer mapToStringBuffer( final Map<Object, Object> map,
+ final String key_value_separator ) {
final StringBuffer sb = new StringBuffer();
for( final Object key : map.keySet() ) {
sb.append( key.toString() );
return TaxonomyGroups.ALPHAHERPESVIRINAE;
}
else if ( tax.equalsIgnoreCase( TaxonomyGroups.BETAHERPESVIRINAE ) ) {
- return TaxonomyGroups.BETAHERPESVIRINAE ;
+ return TaxonomyGroups.BETAHERPESVIRINAE;
}
else if ( tax.equalsIgnoreCase( TaxonomyGroups.GAMMAHERPESVIRINAE ) ) {
return TaxonomyGroups.GAMMAHERPESVIRINAE;
}
else {
throw new IllegalArgumentException( "attempt to parse object of type [" + source.getClass()
- + "] (can only parse objects of type File, InputStream, String, or StringBuffer)" );
+ + "] (can only parse objects of type File, InputStream, String, or StringBuffer)" );
}
return reader;
}
System.exit( -1 );
}
- final public static StringBuffer pad( final double number, final int size, final char pad, final boolean left_pad ) {
+ final public static StringBuffer pad( final double number,
+ final int size,
+ final char pad,
+ final boolean left_pad ) {
return pad( new StringBuffer( number + "" ), size, pad, left_pad );
}
- final public static StringBuffer pad( final String string, final int size, final char pad, final boolean left_pad ) {
+ final public static StringBuffer pad( final String string,
+ final int size,
+ final char pad,
+ final boolean left_pad ) {
return pad( new StringBuffer( string ), size, pad, left_pad );
}
System.err.println( "[" + prg_name + "] > error: " + message );
}
- final public static void printProgramInformation( final String prg_name, final String prg_version, final String date ) {
+ final public static void printProgramInformation( final String prg_name,
+ final String prg_version,
+ final String date ) {
final int l = prg_name.length() + prg_version.length() + date.length() + 4;
System.out.println();
System.out.println( prg_name + " " + prg_version + " (" + date + ")" );
}
if ( !ForesterUtil.isEmpty( ForesterUtil.JAVA_VERSION ) && !ForesterUtil.isEmpty( ForesterUtil.JAVA_VENDOR ) ) {
System.out.println();
- System.out.println( "[running on Java " + ForesterUtil.JAVA_VERSION + " " + ForesterUtil.JAVA_VENDOR + "]" );
+ System.out
+ .println( "[running on Java " + ForesterUtil.JAVA_VERSION + " " + ForesterUtil.JAVA_VENDOR + "]" );
}
System.out.println();
}
public static Protein removeOverlappingDomains( final int max_allowed_overlap,
final boolean remove_engulfed_domains,
final Protein protein ) {
- final Protein pruned_protein = new BasicProtein( protein.getProteinId().getId(), protein.getSpecies()
- .getSpeciesId(), protein.getLength() );
+ final Protein pruned_protein = new BasicProtein( protein.getProteinId().getId(),
+ protein.getSpecies().getSpeciesId(),
+ protein.getLength() );
final List<Domain> sorted = SurfacingUtil.sortDomainsWithAscendingConfidenceValues( protein );
final List<Boolean> covered_positions = new ArrayList<Boolean>();
for( final Domain domain : sorted ) {
- if ( ( ( max_allowed_overlap < 0 ) || ( ForesterUtil.calculateOverlap( domain, covered_positions ) <= max_allowed_overlap ) )
+ if ( ( ( max_allowed_overlap < 0 )
+ || ( ForesterUtil.calculateOverlap( domain, covered_positions ) <= max_allowed_overlap ) )
&& ( !remove_engulfed_domains || !isEngulfed( domain, covered_positions ) ) ) {
final int covered_positions_size = covered_positions.size();
for( int i = covered_positions_size; i < domain.getFrom(); ++i ) {
final public static void unexpectedFatalError( final String prg_name, final Exception e ) {
System.err.println();
System.err.println( "[" + prg_name
- + "] > unexpected error; should not have occured! Please contact program author(s)." );
+ + "] > unexpected error; should not have occured! Please contact program author(s)." );
e.printStackTrace( System.err );
System.err.println();
System.exit( -1 );
final public static void unexpectedFatalError( final String prg_name, final String message ) {
System.err.println();
System.err.println( "[" + prg_name
- + "] > unexpected error: should not have occured! Please contact program author(s)." );
+ + "] > unexpected error: should not have occured! Please contact program author(s)." );
System.err.println( message );
System.err.println();
System.exit( -1 );
final public static void unexpectedFatalError( final String prg_name, final String message, final Exception e ) {
System.err.println();
System.err.println( "[" + prg_name
- + "] > unexpected error: should not have occured! Please contact program author(s)." );
+ + "] > unexpected error: should not have occured! Please contact program author(s)." );
System.err.println( message );
e.printStackTrace( System.err );
System.err.println();
return sb.toString();
}
-
- public final static Phylogeny[] readPhylogeniesFromUrl( final URL url,
- final PhylogenyParser parser )
+ public final static Phylogeny[] readPhylogeniesFromUrl( final URL url, final PhylogenyParser parser )
throws NoSuchAlgorithmException, IOException, KeyManagementException {
if ( url == null ) {
throw new IllegalArgumentException( "URL to read from must not be null" );
throw new IllegalArgumentException( "parser to use to read from URL must not be null" );
}
final URLConnection con;
- if ( url.toString().startsWith( "https:" ) ) {
- con = TrustManager.makeHttpsURLConnection( url );
+ if ( url.toString().startsWith( "https:" ) ) {
+ con = TrustManager.makeHttpsURLConnection( url );
}
else if ( url.toString().startsWith( "http:" ) ) {
con = url.openConnection();
else {
throw new IllegalArgumentException( "Cannot deal with URL: " + url );
}
- if ( con == null ) {
+ if ( con == null ) {
throw new IOException( "could not create connection from " + url );
}
con.setDefaultUseCaches( false );
final InputStream is = con.getInputStream();
- if ( is == null ) {
+ if ( is == null ) {
throw new IOException( "could not create input stream from " + url );
}
final Phylogeny[] trees = ParserBasedPhylogenyFactory.getInstance().create( is, parser );
}
return trees;
}
-
+
private ForesterUtil() {
}
}