From: Fábio Madeira Date: Tue, 11 Apr 2017 16:44:30 +0000 (+0100) Subject: JWS-114 Bumping varied config files and jetbrains xmls. Also adding some java classes... X-Git-Url: http://source.jalview.org/gitweb/?a=commitdiff_plain;h=cbb1f92f62667a9f2f85cfa9c95e2f00826d64ac;p=jabaws.git JWS-114 Bumping varied config files and jetbrains xmls. Also adding some java classes that are generated during the compilation. --- diff --git a/.classpath b/.classpath index 46b788d..11405ea 100644 --- a/.classpath +++ b/.classpath @@ -16,6 +16,7 @@ - + + diff --git a/.idea/kotlinc.xml b/.idea/kotlinc.xml new file mode 100644 index 0000000..1c24f9a --- /dev/null +++ b/.idea/kotlinc.xml @@ -0,0 +1,7 @@ + + + + + \ No newline at end of file diff --git a/.idea/workspace.xml b/.idea/workspace.xml index 863a1f6..3169d8a 100644 --- a/.idea/workspace.xml +++ b/.idea/workspace.xml @@ -6,11 +6,9 @@ - - - - + + @@ -79,81 +77,85 @@ - - + + - - - + + + + + - - + + - + - - + + - - + + - - + + - - - + + + + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + @@ -163,21 +165,48 @@ - <target name= - jabaws - 2.1 - jpred - http://localhost:8080/jabaws/RegistryWS?wsdl - RegistryWS?wsdl - RegistryWS - 8080 - project + http://webserv1.cluster.lifesci.dundee.ac.uk:8089/jabaws/ProbconsWS?wsdl + http://webserv1.cluster.lifesci.dundee.ac.uk:8089S?wsdl + http://webserv1.cluster.lifesci.dundee.ac.uk:8089 + localhost + http://localhost:8080 + Preset Huge alignments + Huge alignments + with + ResultNotAvailableException + test_outfile + FileNotFoundException + PStruct + mode + readStatisticsClusterExecution + testsrc + c + test_group_runner + singleTest + test_group_runner2 + -c + test. + jaba.war + website + js + man_ + man + ma + m + j + jsp 2.2 + target="1.8" + webapps/jabaws + test_group_runner + test_group_runner2 $PROJECT_DIR$ + $PROJECT_DIR$/testsrc + $PROJECT_DIR$/website/statpages @@ -202,15 +231,6 @@ @@ -238,10 +292,10 @@ DEFINITION_ORDER - @@ -280,8 +334,7 @@ - - + @@ -294,9 +347,38 @@ + + + + + + + + + + + + + + + + - + + @@ -338,12 +420,28 @@ + + + + + + + @@ -490,6 +588,17 @@ + + @@ -512,26 +621,6 @@ - - + + + + + + + @@ -682,55 +778,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -807,19 +820,6 @@ - - project @@ -836,152 +836,12 @@ - - project - - - true - - - - DIRECTORY - - false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - localhost 5050 @@ -1034,11 +894,32 @@ + + + + + + + + + + + + + + + + + + + + + - @@ -1061,12 +942,12 @@ - - + + - + @@ -1081,27 +962,53 @@ - - + + - + + + + + + + + + + + + + + + + + + + + + + + + + - + + + @@ -1154,368 +1061,401 @@ - + - - + + - + - - + + - + + - - + + - - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - + + - + + - - + + - + - - - + + - + - - + + - + - + - + - - + + - + - - + + - + - - + + + - + - - + + + - + - - + + + - + - - + + - + - - + + - + - - + + + - + - - + + + - + - - + + - + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + - - - - - - - - + - - + + + - + - - + + - + - - + + - + - - + + - + - - + + - + - - - + + + + + - + - - - + + + + + - + - - + + - + - - + + - + - + - + - - + + - + - + diff --git a/ExecutionStatistic/log/log11.dat b/ExecutionStatistic/log/log11.dat new file mode 100644 index 0000000..ede267a Binary files /dev/null and b/ExecutionStatistic/log/log11.dat differ diff --git a/WEB-INF/classes/lib/testng-5.10-jdk15.jar b/WEB-INF/classes/lib/testng-5.10-jdk15.jar new file mode 100644 index 0000000..67463a7 Binary files /dev/null and b/WEB-INF/classes/lib/testng-5.10-jdk15.jar differ diff --git a/WEB-INF/classes/log4j.properties.dundee b/WEB-INF/classes/log4j.properties.dundee new file mode 100644 index 0000000..eee5a76 --- /dev/null +++ b/WEB-INF/classes/log4j.properties.dundee @@ -0,0 +1,51 @@ + +# change this +logDir =/homes/www-jws2/logs + +log4j.rootLogger=ERROR, stdout +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.Target=System.out +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%m%n + +log4j.logger.compbio=TRACE, ACTIVITY +log4j.appender.ACTIVITY=org.apache.log4j.RollingFileAppender +log4j.appender.ACTIVITY.File=${logDir}/activity.log +log4j.appender.ACTIVITY.MaxFileSize=10MB +log4j.appender.ACTIVITY.MaxBackupIndex=10000 +log4j.appender.ACTIVITY.layout=org.apache.log4j.PatternLayout +log4j.appender.ACTIVITY.layout.ConversionPattern=%d{MM-dd@HH:mm:ss} %-5p %3x - %m%n + +log4j.logger.ClustalWS-stats=INFO, STAT +log4j.logger.MuscleWS-stats=INFO, STAT +log4j.logger.TcoffeeWS-stats=INFO, STAT +log4j.logger.MafftWS-stats=INFO, STAT +log4j.logger.ProbconsWS-stats=INFO, STAT +log4j.appender.STAT=org.apache.log4j.RollingFileAppender +log4j.appender.STAT.File=${logDir}/wsaccess.log +log4j.appender.STAT.MaxFileSize=10MB +log4j.appender.STAT.MaxBackupIndex=10000 +log4j.appender.STAT.layout=org.apache.log4j.PatternLayout +log4j.appender.STAT.layout.ConversionPattern=%t %d %m%n + + +# Uncomment for separate local engine execution log +#log4j.logger.compbio.engine.local.LocalExecutorService=INFO, C +#log4j.appender.C=org.apache.log4j.FileAppender +#log4j.appender.C.File=LocalExecutorService.log +#log4j.appender.C.layout=org.apache.log4j.PatternLayout +#log4j.appender.C.layout.ConversionPattern=%m%n + +#Parameter combinator logger (testing only) +#log4j.logger.RunnerLogger=ERROR, RUNNER +#log4j.appender.RUNNER=org.apache.log4j.FileAppender +#log4j.appender.RUNNER.File=RunnerParam.log +#log4j.appender.RUNNER.layout=org.apache.log4j.PatternLayout +#log4j.appender.RUNNER.layout.ConversionPattern=%r [%t] %p %c %x - %m%n + + +#Not used - collection of patterns +# %d{ABSOLUTE} %5p %c{1}:%L - +#log4j.appender.C.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n +# %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - +#log4j.appender.ClustalWS-stats.layout.ConversionPattern=%p %d %t %C{4} %m%n diff --git a/WEB-INF/classes/log4j.properties.full b/WEB-INF/classes/log4j.properties.full new file mode 100644 index 0000000..a906597 --- /dev/null +++ b/WEB-INF/classes/log4j.properties.full @@ -0,0 +1,62 @@ + +## CHANGE THIS (The root directory where to store all the log files) +#logDir = . + +## Uncomment to enable JWS2 activity logging to standard out (to the console if available) +## for possible log levels please refer to Log4j documentation http://logging.apache.org/log4j/1.2/manual.html +## Valid log levels are: +## TRACE - log everything from below including very detailed messages (useful for debugging only) +## DEBUG - log everything from below including some minor events (useful for debugging only) +## INFO - log everything from below including some information messages +## WARN - log error and warnings +## ERROR - log errors and fatal events only +## FATAL - log fatal events only + +#log4j.rootLogger=ERROR, stdout +#log4j.appender.stdout=org.apache.log4j.ConsoleAppender +#log4j.appender.stdout.Target=System.out +#log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +#log4j.appender.stdout.layout.ConversionPattern=%m%n + +## Uncomment to enable JWS2 activity logging to the file +#log4j.logger.compbio=ERROR, ACTIVITY +#log4j.appender.ACTIVITY=org.apache.log4j.RollingFileAppender +#log4j.appender.ACTIVITY.File=${logDir}/activity.log +#log4j.appender.ACTIVITY.MaxFileSize=10MB +#log4j.appender.ACTIVITY.MaxBackupIndex=10000 +#log4j.appender.ACTIVITY.layout=org.apache.log4j.PatternLayout +#log4j.appender.ACTIVITY.layout.ConversionPattern=%d{MM-dd@HH:mm:ss} %-5p %3x - %m%n + +## Uncomment for web access logging. Please do not change the log level! +#log4j.logger.ClustalWS-stats=INFO, STAT +#log4j.logger.MuscleWS-stats=INFO, STAT +#log4j.logger.TcoffeeWS-stats=INFO, STAT +#log4j.logger.MafftWS-stats=INFO, STAT +#log4j.logger.ProbconsWS-stats=INFO, STAT +#log4j.appender.STAT=org.apache.log4j.RollingFileAppender +#log4j.appender.STAT.File=${logDir}/wsaccess.log +#log4j.appender.STAT.MaxFileSize=10MB +#log4j.appender.STAT.MaxBackupIndex=10000 +#log4j.appender.STAT.layout=org.apache.log4j.PatternLayout +#log4j.appender.STAT.layout.ConversionPattern=%t %d %m%n + +## Uncomment for separate local engine execution log (debugging only) +#log4j.logger.compbio.engine.local.LocalExecutorService=INFO, C +#log4j.appender.C=org.apache.log4j.FileAppender +#log4j.appender.C.File=LocalExecutorService.log +#log4j.appender.C.layout=org.apache.log4j.PatternLayout +#log4j.appender.C.layout.ConversionPattern=%m%n + +## Parameter combinator logger (testing only) +#log4j.logger.RunnerLogger=ERROR, RUNNER +#log4j.appender.RUNNER=org.apache.log4j.FileAppender +#log4j.appender.RUNNER.File=RunnerParam.log +#log4j.appender.RUNNER.layout=org.apache.log4j.PatternLayout +#log4j.appender.RUNNER.layout.ConversionPattern=%r [%t] %p %c %x - %m%n + + +## NOT IN USE - collection of patterns choose the one that suits you +# %d{ABSOLUTE} %5p %c{1}:%L - +#log4j.appender.C.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n +# %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - +#log4j.appender.ClustalWS-stats.layout.ConversionPattern=%p %d %t %C{4} %m%n diff --git a/WEB-INF/classes/log4j.properties.statdb b/WEB-INF/classes/log4j.properties.statdb new file mode 100644 index 0000000..4c983ed --- /dev/null +++ b/WEB-INF/classes/log4j.properties.statdb @@ -0,0 +1,27 @@ + +## CHANGE THIS (The root directory where to store all the log files) +#logDir = . + +## Uncomment to enable JWS2 activity logging to standard out (to the console if available) +## for possible log levels please refer to Log4j documentation http://logging.apache.org/log4j/1.2/manual.html +## Valid log levels are: +## TRACE - log everything from below including very detailed messages (useful for debugging only) +## DEBUG - log everything from below including some minor events (useful for debugging only) +## INFO - log everything from below including some information messages +## WARN - log error and warnings +## ERROR - log errors and fatal events only +## FATAL - log fatal events only + +################################################################################################################################## +log4j.rootLogger=ERROR, R +log4j.appender.R=org.apache.log4j.FileAppender +log4j.appender.R.File=logs/StatDBcleaningErrorFile.log +log4j.appender.R.layout=org.apache.log4j.PatternLayout +log4j.appender.R.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - + +# %d{ABSOLUTE} %5p %c{1}:%L - +log4j.logger.compbio=WARN, B +log4j.appender.B=org.apache.log4j.FileAppender +log4j.appender.B.File=logs/StatDBcleaningWarningFile.log +log4j.appender.B.layout=org.apache.log4j.PatternLayout +log4j.appender.B.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - diff --git a/engine/compbio/engine/client/Util.java b/engine/compbio/engine/client/Util.java new file mode 100644 index 0000000..6fcdff7 --- /dev/null +++ b/engine/compbio/engine/client/Util.java @@ -0,0 +1,417 @@ +/* Copyright (c) 2009 Peter Troshin + * + * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 1.0 + * + * This library is free software; you can redistribute it and/or modify it under the terms of the + * Apache License version 2 as published by the Apache Software Foundation + * + * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache + * License for more details. + * + * A copy of the license is in apache_license.txt. It is also available here: + * @see: http://www.apache.org/licenses/LICENSE-2.0.txt + * + * Any republication or derived work distributed in source code form + * must include this copyright and license notice. + */ + +package compbio.engine.client; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.security.InvalidParameterException; +import java.util.Map; + +import org.apache.log4j.Logger; + +import compbio.engine.client.Executable.ExecProvider; +import compbio.engine.conf.DirectoryManager; +import compbio.engine.conf.PropertyHelperManager; +import compbio.metadata.JobStatus; +import compbio.metadata.LimitsManager; +import compbio.metadata.ResultNotAvailableException; +import compbio.util.FileUtil; +import compbio.util.PropertyHelper; +import compbio.util.SysPrefs; + +public final class Util { + + private static final PropertyHelper ph = PropertyHelperManager + .getPropertyHelper(); + + private static final Logger log = Logger.getLogger(Util.class); + + public static boolean isValidJobId(final String key) { + if (compbio.util.Util.isEmpty(key)) { + return false; + } + int delIdx = key.indexOf(DirectoryManager.DELIM); + if (delIdx < 0) { + return false; + } + String id = key.substring(delIdx + DirectoryManager.DELIM.length()); + try { + Long.parseLong(id); + } catch (NumberFormatException e) { + log.debug("Invalid key! " + e.getLocalizedMessage()); + return false; + } + return true; + } + + public static void writeStatFile(String workDirectory, + String fileAndEventName) { + // never override old stat files! + // Work directory could be null for cancelled or incomplete jobs, just + // ignore + if (!compbio.util.Util.isEmpty(workDirectory)) { + writeFile(workDirectory, fileAndEventName, + new Long(System.currentTimeMillis()).toString(), false); + } + } + + public static void writeFile(String workDirectory, String fileAndEventName, + String content, boolean override) { + File file = null; + if (compbio.util.Util.isEmpty(workDirectory)) { + log.debug("Calling compbio.engine.Util.writeFile() with not work directory." + + " Skipping writing statistics!"); + return; + } + assert !compbio.util.Util.isEmpty(content) : "Content expected!"; + FileWriter writer = null; + try { + file = new File(workDirectory, fileAndEventName); + // Do not override existing files unless asked to do so ! + if (file.exists() && !override) { + return; + } + writer = new FileWriter(file); + writer.write(content); + writer.close(); + log.debug("File " + fileAndEventName + " with content: " + content + + " has been recorder successfully! "); + } catch (IOException e) { + log.error("Could not record the " + fileAndEventName + " file in " + + workDirectory + " for local execution! Ignoring... " + + e.getMessage()); + } finally { + FileUtil.closeSilently(log, writer); + } + } + + public static final boolean writeMarker(String workDirectory, + JobStatus fileType) { + if (fileType == null) { + throw new NullPointerException("MarkerType must be provided!"); + } + if (fileType == fileType.FINISHED || fileType == fileType.STARTED) { + throw new IllegalArgumentException( + "Please use Util.writeStatFile(workDirectory, fileAndEventName) to record FINISHED and STARTED statuses!"); + } + if (!PathValidator.isValidDirectory(workDirectory)) { + // This is OK as some task could be cancelled even before they + // started + log.warn("Attempting to write " + fileType + + " marker in the work directory " + workDirectory + + " is not provided or does not exist!"); + return false; + } + try { + File sfile = new File(workDirectory, fileType.toString()); + if (!sfile.exists()) { + return sfile.createNewFile(); + } + } catch (IOException e) { + log.error( + "Could not record stat marker file " + fileType + + " into the directory " + workDirectory + " ! " + + e.getMessage(), e.getCause()); + } + return false; + } + + public static boolean isMarked(String workDirectory, JobStatus marker) { + if (!PathValidator.isValidDirectory(workDirectory)) { + throw new NullPointerException("Work directory " + workDirectory + + " is not provided or does not exist!"); + } + return new File(workDirectory, marker.toString()).exists(); + } + + public static Map mergeEnvVariables( + final Map sysEnvTobeModified, + final Map variables) { + if (variables.containsKey(EnvVariableProcessor.PATH)) { + String propPath = variables.get(EnvVariableProcessor.PATH); + String sysPATH = sysEnvTobeModified.get(EnvVariableProcessor.PATH); + String syspath = sysEnvTobeModified.get(EnvVariableProcessor.PATH + .toLowerCase()); + // This version appears surprisingly often on windows machines + boolean added = false; + String sysPath = sysEnvTobeModified.get("Path"); + if (sysPATH != null) { + sysEnvTobeModified.put(EnvVariableProcessor.PATH, sysPATH + + File.pathSeparator + propPath); + added = true; + } + if (syspath != null) { + sysEnvTobeModified.put(EnvVariableProcessor.PATH.toLowerCase(), + syspath + File.pathSeparator + propPath); + added = true; + } + if (sysPath != null) { + sysEnvTobeModified.put("Path", sysPath + File.pathSeparator + + propPath); + added = true; + } + // If not path variable is found, then add it + if (!added) { + sysEnvTobeModified.put(EnvVariableProcessor.PATH, propPath); + } + variables.remove(EnvVariableProcessor.PATH); + } + sysEnvTobeModified.putAll(variables); + return sysEnvTobeModified; + } + + public static String convertToAbsolute(String relativePath) { + // If specified path is relative, than make it absolute + String absolute = relativePath; + if (!PathValidator.isAbsolutePath(relativePath)) { + absolute = PropertyHelperManager.getLocalPath() + relativePath; + Util.log.trace("Changing local path in enviromental variable to absolute: FROM " + + relativePath + " TO " + absolute); + } + return absolute; + } + + public static String getExecProperty(String propertySpec, Executable exec) { + assert !compbio.util.Util.isEmpty(propertySpec); + assert exec != null; + return Util.getExecProperty(propertySpec, exec.getClass()); + } + + public static String getExecProperty(String propertySpec, Class clazz) { + assert !compbio.util.Util.isEmpty(propertySpec); + assert clazz != null; + String property = clazz.getSimpleName().toLowerCase() + "." + + propertySpec.toLowerCase(); + log.trace("Processing property: " + property); + return ph.getProperty(property); + } + + public static String getFullPath(String workDirectory, String fileName) { + assert !compbio.util.Util.isEmpty(fileName) : "Filename must be provided! "; + assert !compbio.util.Util.isEmpty(workDirectory) : "Workdirectory must be provided! "; + return workDirectory + File.separator + fileName; + } + + public static String getCommand(ExecProvider provider, Class clazz) { + if (provider == ExecProvider.Any) { + throw new IllegalArgumentException( + "A particular execution environment must be chosen"); + } + String execCommandName = clazz.getSimpleName().toLowerCase(); + String bin = ""; + if (provider == ExecProvider.Local) { + if (SysPrefs.isWindows) { + bin = ph.getProperty("local." + execCommandName + + ".bin.windows"); + } else { + bin = ph.getProperty("local." + execCommandName + ".bin"); + } + // For executable Jar files the location of Java executable is not + // required for local execution. If it is not provided, JABAWS will + // attempt to use Java from JAVA_HOME env variable + if (isJavaLibrary(clazz)) { + if (compbio.util.Util.isEmpty(bin)) { + bin = getJava(); + } + } + // If path to executable defined in the properties is not absolute, + // then make it so + // as setting working directory of ProcessBuilder will make it + // impossible + // to find an executable otherwise + if (!compbio.util.Util.isEmpty(bin) + && !PathValidator.isAbsolutePath(bin)) { + bin = bin.trim(); + if (bin.equalsIgnoreCase("java") + || bin.equalsIgnoreCase("java.exe")) { + // do not make path absolute to the java executable if + // relative path is provided. Java executable is not a part + // of JABAWS distribution! + } else { + bin = PropertyHelperManager.getLocalPath() + bin; + } + } + } else { + bin = ph.getProperty("cluster." + execCommandName + ".bin"); + } + // Could have done: Set executable flag if not set + // but - do not because in some cases more than one file must be made + // executable! + /* + * if (!compbio.util.Util.isEmpty(bin)) { File command = new File(bin); + * if (!command.canExecute()) { log.debug( + * "The command line binary is not executable! (just unpacked from war file? )" + * ); log.debug("Attempting to set executable flag for command: " + + * bin); command.setExecutable(true, false); } } + */ + log.debug("Using executable: " + bin); + return bin; // File.separator + } + /** + * Returns true of executableName.jar.file property has some value in the + * Executable.properties file, false otherwise. + * + * @param clazz + * @return + */ + public static boolean isJavaLibrary(Class clazz) { + String execCommandName = clazz.getSimpleName().toLowerCase(); + String java_lib = ph.getProperty(execCommandName + ".jar.file"); + return !compbio.util.Util.isEmpty(java_lib); + } + + /** + * Returns the absolute path to the Java executable from JAVA_HOME + * + * @return returns the absolute path to the Java executable from JAVA_HOME + */ + public static String getJava() { + String javahome = System.getProperty("java.home"); + if (compbio.util.Util.isEmpty(javahome)) { + javahome = System.getenv("JAVA_HOME"); + } + if (compbio.util.Util.isEmpty(javahome)) { + log.warn("Cannot find Java in java.home system property " + + "or JAVA_HOME environment variable! "); + return null; + } + File jh = new File(javahome); + if (jh.exists() && jh.isDirectory()) { + String java = javahome + File.separator + "bin" + File.separator + + "java"; + if (SysPrefs.isWindows) { + java += ".exe"; + } + File jexe = new File(java); + if (jexe.exists() && jexe.isFile() && jexe.canExecute()) { + log.info("Using Java from: " + jexe.getAbsolutePath()); + return jexe.getAbsolutePath(); + } else { + log.warn("Cannot find java executable in the JAVA_HOME!"); + } + } else { + log.warn("JAVA_HOME does not seems to point to a valid directory! Value: " + + javahome); + } + return null; + } + + public static ExecProvider getSupportedRuntimes(Class clazz) { + boolean localRuntimeSupport = false; + boolean clusterRuntimeSupport = false; + String executableName = clazz.getSimpleName().toLowerCase(); + String localRuntime1 = ph.getProperty("local." + executableName + + ".bin.windows"); + String localRuntime2 = ph.getProperty("local." + executableName + + ".bin"); + if (!compbio.util.Util.isEmpty(localRuntime1) + || !compbio.util.Util.isEmpty(localRuntime2)) { + localRuntimeSupport = true; + } else { + localRuntimeSupport = isJavaLibrary(clazz) && getJava() != null; + } + + String clusterRuntime = ph.getProperty("cluster." + executableName + + ".bin"); + if (!compbio.util.Util.isEmpty(clusterRuntime)) { + clusterRuntimeSupport = true; + } + if (localRuntimeSupport && clusterRuntimeSupport) { + return ExecProvider.Any; + } else if (localRuntimeSupport) { + return ExecProvider.Local; + } else if (clusterRuntimeSupport) { + return ExecProvider.Cluster; + } + // Means executable cannot be executed -> is improperly configured + // should be ignored + throw new InvalidParameterException( + "Executable is not provided for any runtime environments"); + } + public static ConfiguredExecutable loadExecutable(String taskId) + throws ResultNotAvailableException { + String workDir = compbio.engine.Configurator.getWorkDirectory(taskId); + // The results for this job has been collected once, or the JVM may + // have been restarted, + // so that the job is not in the job list + // ->load a ConfiguredExercutable from saved run and return it + FileInputStream fileInStream = null; + ConfiguredExecutable exec = null; + try { + fileInStream = new FileInputStream(workDir + File.separator + + RunConfiguration.rconfigFile); + RunConfiguration rconf = RunConfiguration.load(fileInStream); + exec = ConfExecutable.newConfExecutable(rconf); + fileInStream.close(); + } catch (FileNotFoundException e) { + log.error( + "Could not find run configuration to load!" + + e.getLocalizedMessage(), e.getCause()); + throw new ResultNotAvailableException( + "Could not find run configuration to load!" + + e.getMessage(), e.getCause()); + } catch (IOException e) { + log.error( + "IO Exception while reading run configuration file!" + + e.getLocalizedMessage(), e.getCause()); + throw new ResultNotAvailableException( + "Could not load run configuration!" + e.getMessage(), + e.getCause()); + } finally { + FileUtil.closeSilently(log, fileInStream); + } + return exec; + } + + /** + * For now just assume that all parameters which came in needs setting it + * will be a client responsibility to prepare RunnerConfig object then + * + * @param rconfig + * @return + * + * public static List toOptionString(RunnerConfig + * rconfig) { String option = ""; List options = new + * ArrayList(); for (Parameter par : + * rconfig.getParameters()) { if (par.getPossibleValues().isEmpty()) + * { option = par.getOptionName(); } else { option = + * par.getOptionName() + "=" + par.getPossibleValues().get(0); } // + * separate options options.add(option); } return options; } + */ + + public static LimitsManager getLimits(Class clazz) { + LimitsManager limits = null; + try { + limits = ConfExecutable.getRunnerLimits(clazz); + } catch (FileNotFoundException e) { + Util.log.warn("No limits are found for " + clazz + " executable! " + + e.getLocalizedMessage(), e.getCause()); + // its ok, limit may not be initialized + } catch (IOException e) { + Util.log.warn("IO exception while attempting to read limits for " + + clazz + " executable! " + e.getLocalizedMessage(), + e.getCause()); + } + return limits; + } + +} diff --git a/engine/compbio/engine/cluster/drmaa/AsyncJobRunner.java b/engine/compbio/engine/cluster/drmaa/AsyncJobRunner.java new file mode 100644 index 0000000..b9785ef --- /dev/null +++ b/engine/compbio/engine/cluster/drmaa/AsyncJobRunner.java @@ -0,0 +1,100 @@ +/* Copyright (c) 2009 Peter Troshin + * + * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 1.0 + * + * This library is free software; you can redistribute it and/or modify it under the terms of the + * Apache License version 2 as published by the Apache Software Foundation + * + * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache + * License for more details. + * + * A copy of the license is in apache_license.txt. It is also available here: + * @see: http://www.apache.org/licenses/LICENSE-2.0.txt + * + * Any republication or derived work distributed in source code form + * must include this copyright and license notice. + */ + +package compbio.engine.cluster.drmaa; + +import org.apache.log4j.Logger; +import org.ggf.drmaa.DrmaaException; + +import compbio.engine.AsyncExecutor; +import compbio.engine.Cleaner; +import compbio.engine.Configurator; +import compbio.engine.client.ConfiguredExecutable; +import compbio.engine.client.Util; +import compbio.metadata.JobSubmissionException; +import compbio.metadata.ResultNotAvailableException; + +/** + * Single cluster job runner class + * + * @author pvtroshin + * @version 1.0 August 2009 + * + * TODO after call to submitJob() no setters really work as the job + * template gets deleted, this needs to be taken into account in this + * class design! + */ +public class AsyncJobRunner implements AsyncExecutor { + + private static Logger log = Logger.getLogger(AsyncJobRunner.class); + + @Override + public String submitJob(ConfiguredExecutable executable) + throws JobSubmissionException { + JobRunner jr = new JobRunner(executable); + jr.submitJob(); // ignore cluster job id as it could be retrieved from + // fs + return executable.getTaskId(); + } + + @Override + public boolean cancelJob(String jobId) { + ClusterSession clustSession = ClusterSession.getInstance(); + return compbio.engine.cluster.drmaa.ClusterUtil.cancelJob(jobId, + clustSession); + } + + /** + * This will never return clust.engine.JobStatus.CANCELLED as for sun grid + * engine cancelled job is the same as failed. Cancelled jobs needs to be + * tracked manually! + */ + @Override + public compbio.metadata.JobStatus getJobStatus(String jobId) { + return JobRunner.getJobStatus(jobId); + } + + @Override + public boolean cleanup(String jobId) { + String workDir = Configurator.getWorkDirectory(jobId); + return Cleaner.deleteAllFiles(workDir); + } + + @Override + public ConfiguredExecutable getResults(String jobId) + throws ResultNotAvailableException { + + assert Util.isValidJobId(jobId); + + ClusterSession csession = ClusterSession.getInstance(); + ConfiguredExecutable exec; + try { + exec = csession.getResults(jobId); + } catch (DrmaaException e) { + log.error(e.getLocalizedMessage(), e.getCause()); + throw new ResultNotAvailableException(e); + } + return exec; + } + + @Override + public String getWorkDirectory(String jobId) { + return Configurator.getWorkDirectory(jobId); + } + +} // class end diff --git a/engine/compbio/engine/cluster/drmaa/ClusterUtil.java b/engine/compbio/engine/cluster/drmaa/ClusterUtil.java new file mode 100644 index 0000000..9ce527c --- /dev/null +++ b/engine/compbio/engine/cluster/drmaa/ClusterUtil.java @@ -0,0 +1,95 @@ +/* Copyright (c) 2009 Peter Troshin + * + * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 1.0 + * + * This library is free software; you can redistribute it and/or modify it under the terms of the + * Apache License version 2 as published by the Apache Software Foundation + * + * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache + * License for more details. + * + * A copy of the license is in apache_license.txt. It is also available here: + * @see: http://www.apache.org/licenses/LICENSE-2.0.txt + * + * Any republication or derived work distributed in source code form + * must include this copyright and license notice. + */ +package compbio.engine.cluster.drmaa; + +import java.io.IOException; +import java.text.NumberFormat; + +import org.apache.log4j.Logger; +import org.ggf.drmaa.DrmaaException; +import org.ggf.drmaa.JobInfo; +import org.ggf.drmaa.Session; + +import compbio.engine.Configurator; +import compbio.engine.client.Util; +import compbio.metadata.JobExecutionException; +import compbio.metadata.JobStatus; + +public class ClusterUtil { + + private static final Logger log = Logger.getLogger(ClusterUtil.class); + + public static final NumberFormat CLUSTER_STAT_IN_SEC = NumberFormat + .getInstance(); + + static { + CLUSTER_STAT_IN_SEC.setMinimumFractionDigits(4); + } + + public static final boolean cancelJob(final String jobId, + ClusterSession csession) { + assert Util.isValidJobId(jobId); + boolean cancelled = true; + Session session = csession.getSession(); + try { + log.info("Job " + jobId + " is successfully cancelled"); + compbio.engine.client.Util.writeMarker(Configurator.getWorkDirectory(jobId), + JobStatus.CANCELLED); + session.control(ClusterSession.getClusterJobId(jobId).getJobId(), + Session.TERMINATE); + } catch (DrmaaException e) { + // Log silently + log.error("Job " + jobId + " cancellation failed!"); + log.error("Cause: " + e.getLocalizedMessage(), e.getCause()); + cancelled = false; + } catch (IOException e) { + log.error( + "Could not read JOBID file to determine cluster jobid for taskid: " + + jobId + " Message: " + e.getLocalizedMessage(), e + .getCause()); + } finally { + log + .trace("Job " + + jobId + + " has been successfully removed from the cluster engine job list"); + csession.removeJob(jobId); + } + return cancelled; + } + + public static final JobInfo waitForResult(ClusterSession csession, + String jobId) throws JobExecutionException { + JobInfo jinfo = null; + assert Util.isValidJobId(jobId); + try { + jinfo = csession.waitForJob(jobId); + } catch (DrmaaException e) { + log.error(e.getLocalizedMessage(), e.getCause()); + throw new JobExecutionException(e); + } catch (IOException e) { + log.error("Could not read JOBID file for job " + jobId + + " Message " + e.getMessage(), e.getCause()); + throw new JobExecutionException(e); + } finally { + // at this point the job has finished + csession.removeJob(jobId); + } + return jinfo; + } + +} diff --git a/engine/compbio/engine/cluster/drmaa/JobRunner.java b/engine/compbio/engine/cluster/drmaa/JobRunner.java new file mode 100644 index 0000000..fdceb72 --- /dev/null +++ b/engine/compbio/engine/cluster/drmaa/JobRunner.java @@ -0,0 +1,395 @@ +/* Copyright (c) 2009 Peter Troshin + * + * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 1.0 + * + * This library is free software; you can redistribute it and/or modify it under the terms of the + * Apache License version 2 as published by the Apache Software Foundation + * + * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache + * License for more details. + * + * A copy of the license is in apache_license.txt. It is also available here: + * @see: http://www.apache.org/licenses/LICENSE-2.0.txt + * + * Any republication or derived work distributed in source code form + * must include this copyright and license notice. + */ + +package compbio.engine.cluster.drmaa; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.apache.log4j.Logger; +import org.ggf.drmaa.DrmaaException; +import org.ggf.drmaa.InvalidJobException; +import org.ggf.drmaa.JobInfo; +import org.ggf.drmaa.JobTemplate; +import org.ggf.drmaa.Session; + +import compbio.engine.Cleaner; +import compbio.engine.ClusterJobId; +import compbio.engine.Configurator; +import compbio.engine.SyncExecutor; + +import compbio.engine.client.ConfiguredExecutable; +import compbio.engine.client.Executable; +import compbio.engine.client.PathValidator; +import compbio.engine.client.PipedExecutable; +import compbio.engine.client.Util; +import compbio.engine.client.Executable.ExecProvider; +import compbio.metadata.JobExecutionException; +import compbio.metadata.JobStatus; +import compbio.metadata.JobSubmissionException; +import compbio.metadata.ResultNotAvailableException; + +/** + * Single cluster job runner class + * + * @author pvtroshin + * @date August 2009 + * + * TODO after call to submitJob() no setters really work as the job + * template gets deleted, this needs to be taken into account in this + * class design! + */ +public class JobRunner implements SyncExecutor { + + final JobTemplate jobtempl; + static ClusterSession clustSession = ClusterSession.getInstance(); + static Session session = clustSession.getSession(); + static final Logger log = Logger.getLogger(JobRunner.class); + final ConfiguredExecutable confExecutable; + private final String workDirectory; + String jobId; + + public JobRunner(ConfiguredExecutable confExec) + throws JobSubmissionException { + try { + String command = confExec.getCommand(ExecProvider.Cluster); + PathValidator.validateExecutable(command); + log.debug("Setting command " + command); + + jobtempl = session.createJobTemplate(); + jobtempl.setRemoteCommand(command); + jobtempl.setJoinFiles(false); + setJobName(confExec.getExecutable().getClass().getSimpleName()); + + this.workDirectory = confExec.getWorkDirectory(); + assert !compbio.util.Util.isEmpty(workDirectory); + + // Tell the job where to get/put things + jobtempl.setWorkingDirectory(this.workDirectory); + + /* + * Set environment variables for the process if any + */ + Map jobEnv = confExec.getEnvironment(); + if (jobEnv != null && !jobEnv.isEmpty()) { + setJobEnvironmentVariables(jobEnv); + } + List args = confExec.getParameters().getCommands(); + // Set optional parameters + if (args != null && args.size() > 0) { + jobtempl.setArgs(args); + } + + /* + * If executable need in/out data to be piped into it + */ + if (confExec.getExecutable() instanceof PipedExecutable) { + setPipes(confExec); + } + + /* + * If executable require special cluster configuration parameters to + * be set e.g. queue, ram, time etc + */ + setNativeSpecs(confExec.getExecutable()); + + + log.trace("using arguments: " + jobtempl.getArgs()); + this.confExecutable = confExec; + // Save run configuration + confExec.saveRunConfiguration(); + + } catch (DrmaaException e) { + log.error(e.getLocalizedMessage(), e.getCause()); + throw new JobSubmissionException(e); + } catch (IOException e) { + log.error(e.getLocalizedMessage(), e.getCause()); + throw new JobSubmissionException(e); + } + + } + + void setPipes(ConfiguredExecutable executable) throws DrmaaException { + + String output = executable.getOutput(); + String error = executable.getError(); + // Standard drmaa path format is hostname:path + // to avoid supplying hostnames with all local paths just prepend colon + // to the path + // Input and output can be null as in and out files may be defined in + // parameters + /* + * Use this for piping input into the process if (input != null) { if + * (!input.contains(":")) { input = makeLocalPath(input); + * log.trace("converting input to " + input); } + * jobtempl.setInputPath(input); log.debug("use Input: " + + * jobtempl.getInputPath()); } + */ + if (output != null) { + if (!output.contains(":")) { + output = makeLocalPath(output); + } + jobtempl.setOutputPath(output); + log.debug("Output to: " + jobtempl.getOutputPath()); + } + if (error != null) { + if (!error.contains(":")) { + error = makeLocalPath(error); + } + jobtempl.setErrorPath(error); + log.debug("Output errors to: " + jobtempl.getErrorPath()); + } + + } + + void setNativeSpecs(Executable executable) throws DrmaaException { + String nativeSpecs = executable.getClusterJobSettings(); + if(!compbio.util.Util.isEmpty(nativeSpecs)) { + log.debug("Using cluster job settings: " + nativeSpecs); + jobtempl.setNativeSpecification(nativeSpecs); + } + } + + void setEmail(String email) { + log.trace("Setting email to:" + email); + try { + jobtempl.setEmail(Collections.singleton(email)); + jobtempl.setBlockEmail(false); + } catch (DrmaaException e) { + log.debug(e.getLocalizedMessage()); + throw new IllegalArgumentException(e); + } + } + + void setJobName(String name) { + log.trace("Setting job name to:" + name); + try { + jobtempl.setJobName(name); + } catch (DrmaaException e) { + log.debug(e.getLocalizedMessage()); + throw new IllegalArgumentException(e); + } + } + + @SuppressWarnings("unchecked") + void setJobEnvironmentVariables(Map env_variables) { + assert env_variables != null && !env_variables.isEmpty(); + try { + log.trace("Setting job environment to:" + env_variables); + Map sysEnv = jobtempl.getJobEnvironment(); + if (sysEnv != null && !sysEnv.isEmpty()) { + Util.mergeEnvVariables(sysEnv, env_variables); + } else { + sysEnv = env_variables; + } + jobtempl.setJobEnvironment(sysEnv); + + } catch (DrmaaException e) { + log.debug(e.getLocalizedMessage()); + throw new IllegalArgumentException(e); + } + } + + private static String makeLocalPath(String path) { + return ":" + path; + } + + public boolean deepClean() { + throw new UnsupportedOperationException(); + // TODO + /* + * remove all files from these this.jobtempl.getInputPath(); + * this.jobtempl.getOutputPath(); this.jobtempl.getWorkingDirectory(); + */ + // executable.getInputFiles(); + } + + /** + * This will never return clust.engine.JobStatus.CANCELLED as for sun grid + * engine cancelled job is the same as failed. Cancelled jobs needs to be + * tracked manually! + */ + static compbio.metadata.JobStatus getJobStatus(String jobId) { + try { + ClusterJobId clusterJobId = ClusterSession.getClusterJobId(jobId); + switch (clustSession.getJobStatus(clusterJobId)) { + case Session.DONE: + compbio.engine.client.Util.writeStatFile(Configurator.getWorkDirectory(jobId), + JobStatus.FINISHED.toString()); + + return compbio.metadata.JobStatus.FINISHED; + case Session.FAILED: + compbio.engine.client.Util.writeMarker(Configurator.getWorkDirectory(jobId), + JobStatus.FAILED); + + return compbio.metadata.JobStatus.FAILED; + + case Session.RUNNING: + // will not persist this status as temporary + return compbio.metadata.JobStatus.RUNNING; + + case Session.SYSTEM_SUSPENDED: + case Session.USER_SYSTEM_SUSPENDED: + case Session.USER_SUSPENDED: + case Session.USER_SYSTEM_ON_HOLD: + case Session.USER_ON_HOLD: + case Session.SYSTEM_ON_HOLD: + case Session.QUEUED_ACTIVE: + // will not persist this status as temporary + return compbio.metadata.JobStatus.PENDING; + + default: + // It is possible that the this status is returned for a job that is almost completed + // when a state is changed from RUNNING to DONE + // It looks like a bug in DRMAA SGE implementation + return compbio.metadata.JobStatus.UNDEFINED; + } + } catch (InvalidJobException e) { + log.info("Job " + jobId + " could not be located by DRMAA " + + e.getLocalizedMessage(), e.getCause()); + log.info("Attempting to determine the status by marker files"); + return getRecordedJobStatus(jobId); + } catch (DrmaaException e) { + log.error( + "Exception in DRMAA system while quering the job status: " + + e.getLocalizedMessage(), e.getCause()); + } catch (IOException e) { + log.error("Could not read JOBID for taskId: " + jobId + + " Message: " + e.getLocalizedMessage(), e.getCause()); + } + + return JobStatus.UNDEFINED; + } + + static JobStatus getRecordedJobStatus(String jobId) { + /* + * Job has already been removed from the task list, so it running + * status could not be determined. Most likely it has been + * cancelled, finished or failed. + */ + String workDir = Configurator.getWorkDirectory(jobId); + if (Util.isMarked(workDir, JobStatus.FINISHED) + || Util.isMarked(workDir, JobStatus.COLLECTED)) { + return JobStatus.FINISHED; + } + if (Util.isMarked(workDir, JobStatus.CANCELLED)) { + return JobStatus.CANCELLED; + } + if (Util.isMarked(workDir, JobStatus.FAILED)) { + return JobStatus.FAILED; + } + return JobStatus.UNDEFINED; + } + + + @Override + public boolean cleanup() { + /* + * TODO there is two additional files created by sun grid engine which + * are named as follows: output this.getWorkDirectory() + + * executable.getClass().getSimpleName() + "." + "o" + this.jobId; error + * this.getWorkDirectory() + executable.getClass().getSimpleName() + "." + * + "e" + this.jobId; individual executable does not know about these + * two unless it implements PipedExecutable which need to collect data + * from these streams Thus this method will fail to remove the task + * directory completely + */ + return Cleaner.deleteFiles(confExecutable); + } + + JobInfo waitForJob(String jobId) throws JobExecutionException { + assert Util.isValidJobId(jobId); + return ClusterUtil.waitForResult(clustSession, jobId); + } + + boolean cancelJob(String jobId) { + assert Util.isValidJobId(jobId); + return compbio.engine.cluster.drmaa.ClusterUtil.cancelJob(jobId, + clustSession); + } + + @Override + public boolean cancelJob() { + return cancelJob(this.jobId); + } + + String submitJob() throws JobSubmissionException { + + String jobId; + try { + jobId = session.runJob(jobtempl); + log.info("submitted single job with jobids:"); + log.info("\t \"" + jobId + "\""); + session.deleteJobTemplate(jobtempl); + clustSession.addJob(jobId, confExecutable); + } catch (DrmaaException e) { + e.printStackTrace(); + throw new JobSubmissionException(e); + } + + return this.confExecutable.getTaskId(); + } + + public String getWorkDirectory() { + return this.workDirectory; + } + + @Override + public void executeJob() throws JobSubmissionException { + this.jobId = submitJob(); + } + + /** + * This method will block before the calculation has completed and then + * return the object containing a job execution statistics + * + * @return + * @throws JobExecutionException + */ + public JobInfo getJobInfo() throws JobExecutionException { + return waitForJob(this.jobId); + } + + @Override + public ConfiguredExecutable waitForResult() throws JobExecutionException { + ConfiguredExecutable confExec; + try { + confExec = new AsyncJobRunner().getResults(this.jobId); + if (confExec == null) { + log.warn("Could not find results of job " + this.jobId); + } + } catch (ResultNotAvailableException e) { + log.error(e.getMessage(), e.getCause()); + throw new JobExecutionException(e); + } + return confExec; + } + + @Override + public compbio.metadata.JobStatus getJobStatus() { + return getJobStatus(this.jobId); + } + + public static JobRunner getInstance(ConfiguredExecutable executable) + throws JobSubmissionException { + return new JobRunner(executable); + } + +} // class end diff --git a/runner/compbio/runner/Util.java b/runner/compbio/runner/Util.java new file mode 100644 index 0000000..7734c86 --- /dev/null +++ b/runner/compbio/runner/Util.java @@ -0,0 +1,134 @@ +/* Copyright (c) 2011 Peter Troshin + * + * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.0 + * + * This library is free software; you can redistribute it and/or modify it under the terms of the + * Apache License version 2 as published by the Apache Software Foundation + * + * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache + * License for more details. + * + * A copy of the license is in apache_license.txt. It is also available here: + * @see: http://www.apache.org/licenses/LICENSE-2.0.txt + * + * Any republication or derived work distributed in source code form + * must include this copyright and license notice. + */ + +package compbio.runner; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.apache.log4j.Logger; + +import compbio.data.sequence.Alignment; +import compbio.data.sequence.ClustalAlignmentUtil; +import compbio.data.sequence.FastaSequence; +import compbio.data.sequence.Score; +import compbio.data.sequence.SequenceUtil; +import compbio.data.sequence.UnknownFileFormatException; +import compbio.engine.client.ConfExecutable; +import compbio.engine.client.ConfiguredExecutable; +import compbio.engine.client.Executable; +import compbio.engine.conf.PropertyHelperManager; +import compbio.metadata.JobSubmissionException; +import compbio.metadata.PresetManager; +import compbio.metadata.RunnerConfig; +import compbio.util.PropertyHelper; + +public final class Util { + + public static Logger log = Logger.getLogger(Util.class); + + private static final PropertyHelper ph = PropertyHelperManager + .getPropertyHelper(); + + public static final String SPACE = " "; + + public static synchronized RunnerConfig getSupportedOptions( + Class> clazz) { + try { + return ConfExecutable.getRunnerOptions(clazz); + } catch (FileNotFoundException e) { + log.error( + "Could not load " + clazz + " Parameters !" + + e.getMessage(), e.getCause()); + } catch (IOException e) { + log.error("IO exception while reading " + clazz + " Parameters !" + + e.getMessage(), e.getCause()); + } + return null; + } + + public static PresetManager getPresets( + Class> clazz) { + try { + return ConfExecutable.getRunnerPresets(clazz); + } catch (FileNotFoundException e) { + log.warn( + "No presets are found for " + clazz + " executable! " + + e.getLocalizedMessage(), e.getCause()); + } catch (IOException e) { + log.warn("IO exception while reading presents! for " + clazz + + " executable! " + e.getLocalizedMessage(), e.getCause()); + } + return null; + } + + public static final Alignment readClustalFile(String workDirectory, + String clustFile) throws UnknownFileFormatException, IOException, + FileNotFoundException, NullPointerException { + assert !compbio.util.Util.isEmpty(workDirectory); + assert !compbio.util.Util.isEmpty(clustFile); + File cfile = new File(compbio.engine.client.Util.getFullPath( + workDirectory, clustFile)); + log.trace("CLUSTAL OUTPUT FILE PATH: " + cfile.getAbsolutePath()); + if (!(cfile.exists() && cfile.length() > 0)) { + throw new FileNotFoundException("Result for the jobId " + + workDirectory + " with file name " + clustFile + + " is not found!"); + } + return ClustalAlignmentUtil.readClustalFile(cfile); + } + + public static final Map readJronnFile(String workDirectory, + String clustFile) throws UnknownFileFormatException, IOException, + FileNotFoundException, NullPointerException { + assert !compbio.util.Util.isEmpty(workDirectory); + assert !compbio.util.Util.isEmpty(clustFile); + File cfile = new File(compbio.engine.client.Util.getFullPath( + workDirectory, clustFile)); + log.trace("Jronn OUTPUT FILE PATH: " + cfile.getAbsolutePath()); + if (!(cfile.exists() && cfile.length() > 0)) { + throw new FileNotFoundException("Result for the jobId " + + workDirectory + " with file name " + clustFile + + " is not found!"); + } + return SequenceUtil.readJRonn(cfile); + } + + public static void writeInput(List sequences, + ConfiguredExecutable exec) throws JobSubmissionException { + + try { + File filein = new File(exec.getInput()); + FileOutputStream fout = new FileOutputStream(filein); + log.debug("File path: " + filein.getAbsolutePath()); + SequenceUtil.writeFasta(fout, sequences); + fout.close(); + } catch (IOException e) { + log.error("IOException while writing input file into the disk: " + + e.getLocalizedMessage(), e); + throw new JobSubmissionException( + "We are sorry by JABAWS server seems to have a problem! " + + e.getLocalizedMessage(), e); + } + } + +}