+/* Copyright (c) 2011 Peter Troshin\r
+ * \r
+ * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.0 \r
+ * \r
+ * This library is free software; you can redistribute it and/or modify it under the terms of the\r
+ * Apache License version 2 as published by the Apache Software Foundation\r
+ * \r
+ * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without\r
+ * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache \r
+ * License for more details.\r
+ * \r
+ * A copy of the license is in apache_license.txt. It is also available here:\r
+ * @see: http://www.apache.org/licenses/LICENSE-2.0.txt\r
+ * \r
+ * Any republication or derived work distributed in source code form\r
+ * must include this copyright and license notice.\r
+ */\r
package compbio.stat.collector;\r
\r
import java.io.File;\r
import java.io.FileFilter;\r
-import java.io.FileWriter;\r
import java.io.IOException;\r
import java.sql.SQLException;\r
import java.text.SimpleDateFormat;\r
import java.util.ArrayList;\r
+import java.util.Date;\r
import java.util.HashMap;\r
import java.util.HashSet;\r
import java.util.List;\r
\r
import org.apache.log4j.Logger;\r
\r
-import compbio.engine.client.ConfExecutable;\r
import compbio.engine.client.Executable;\r
-import compbio.engine.conf.PropertyHelperManager;\r
+import compbio.engine.client.PathValidator;\r
+import compbio.engine.client.SkeletalExecutable;\r
import compbio.metadata.JobStatus;\r
-import compbio.runner.msa.ClustalW;\r
import compbio.util.FileUtil;\r
-import compbio.util.PropertyHelper;\r
import compbio.ws.client.Services;\r
+import compbio.ws.client.ServicesUtil;\r
\r
/**\r
* Number of runs of each WS = number of folders with name\r
* @author pvtroshin\r
* \r
*/\r
-public class ExecutionStatCollector {\r
+public class ExecutionStatCollector implements Runnable {\r
\r
static final int UNDEFINED = -1;\r
\r
\r
static SimpleDateFormat DF = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss");\r
\r
- static PropertyHelper ph = PropertyHelperManager.getPropertyHelper();\r
-\r
+ final private File workDirectory;\r
final private List<JobStat> stats;\r
+ /**\r
+ * Consider the job that has been working for longer than timeOutInHours\r
+ * completed, whatever the outcome\r
+ */\r
+ final private int timeOutInHours;\r
\r
- public ExecutionStatCollector(String workDirectory) {\r
- File[] files = FileUtil.getFiles(workDirectory, directories);\r
+ /**\r
+ * List subdirectories in the job directory\r
+ * \r
+ * @param workDirectory\r
+ * @param timeOutInHours\r
+ */\r
+ public ExecutionStatCollector(String workDirectory, int timeOutInHours) {\r
+ log.info("Starting stat collector for directory: " + workDirectory);\r
+ log.info("Maximum allowed runtime(h): " + timeOutInHours);\r
+ if (!PathValidator.isValidDirectory(workDirectory)) {\r
+ throw new IllegalArgumentException("workDirectory '"\r
+ + workDirectory + "' does not exist!");\r
+ }\r
+ this.workDirectory = new File(workDirectory);\r
stats = new ArrayList<JobStat>();\r
- for (File file : files) {\r
- JobDirectory jd = new JobDirectory(file);\r
- stats.add(jd.getJobStat());\r
- // System.out.println(jd.getJobStat().getJobReportTabulated());\r
+ if (timeOutInHours <= 0) {\r
+ throw new IllegalArgumentException(\r
+ "Timeout value must be greater than 0! Given value: "\r
+ + timeOutInHours);\r
}\r
+ this.timeOutInHours = timeOutInHours;\r
}\r
\r
- public StatProcessor getStats() {\r
- return new StatProcessor(stats);\r
+ boolean hasCompleted(JobDirectory jd) {\r
+ JobStat jstat = jd.getJobStat();\r
+ if (jstat.hasResult() || jstat.getIsCancelled()\r
+ || jstat.getIsFinished() || hasTimedOut(jd)) {\r
+ return true;\r
+ }\r
+ return false;\r
}\r
\r
- public void writeStatToDB() throws SQLException {\r
- Set<JobStat> rjobs = new HashSet<JobStat>(stats);\r
- StatDB statdb = new StatDB();\r
- statdb.removeRecordedJobs(rjobs);\r
- statdb.insertData(rjobs);\r
- statdb.conn.close();\r
+ boolean hasTimedOut(JobDirectory jd) {\r
+ return ((System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60)) > timeOutInHours;\r
}\r
\r
- static String getClusterJobDir() {\r
- String clusterdir = ph.getProperty("cluster.tmp.directory");\r
- if (clusterdir != null) {\r
- clusterdir.trim();\r
+ /*\r
+ * Make sure that collectStatistics methods was called prior to calling\r
+ * this! TODO consider running collectStatistics from here on the first call\r
+ */\r
+ StatProcessor getStats() {\r
+ if (stats.isEmpty()) {\r
+ log.info("Please make sure collectStatistics method was called prior to calling getStats()!");\r
}\r
- return clusterdir;\r
+ return new StatProcessor(stats);\r
}\r
\r
- static void updateTime(File statFile) throws IOException {\r
- long lastMod = statFile.lastModified();\r
- FileWriter fw = new FileWriter(statFile);\r
- fw.write(new Long(lastMod).toString());\r
- fw.close();\r
- }\r
+ void writeStatToDB() throws SQLException {\r
+ Set<JobStat> rjobs = new HashSet<JobStat>(stats);\r
+ StatDB statdb = new StatDB();\r
+ log.debug("Removing records that has already been recorded");\r
\r
- static String getLocalJobDir() {\r
- String locdir = ph.getProperty("local.tmp.directory");\r
- if (locdir != null) {\r
- locdir.trim();\r
- }\r
- return locdir;\r
+ statdb.removeRecordedJobs(rjobs);\r
+ log.debug("New records left: " + rjobs.size());\r
+ statdb.insertData(rjobs);\r
}\r
\r
+ /*\r
+ * static void updateTime(File statFile) throws IOException { long lastMod =\r
+ * statFile.lastModified(); FileWriter fw = new FileWriter(statFile);\r
+ * fw.write(new Long(lastMod).toString()); fw.close(); }\r
+ */\r
+\r
/**\r
- * \r
- * @param args\r
- * @throws IOException\r
- * @throws SQLException\r
+ * Not in use\r
*/\r
public static void main(String[] args) throws IOException, SQLException {\r
\r
// updateTime(new File(\r
// "D:\\workspace\\JABA2\\jobsout\\AACon#170462904473672\\STARTED"));\r
\r
- String workDir = PropertyHelperManager.getLocalPath()\r
- + getLocalJobDir().trim();\r
- System.out.println(workDir);\r
File[] files = FileUtil.getFiles("Y:\\fc\\www-jws2\\jaba\\jobsout",\r
directories);\r
List<JobStat> stats = new ArrayList<JobStat>();\r
static FileFilter directories = new FileFilter() {\r
@Override\r
public boolean accept(File pathname) {\r
- return pathname.isDirectory();\r
+ return pathname.isDirectory()\r
+ && !pathname.getName().startsWith(".");\r
}\r
};\r
\r
File jobdir;\r
Map<String, File> files = new HashMap<String, File>();\r
\r
- public JobDirectory(File directory) {\r
+ JobDirectory(File directory) {\r
this.jobdir = directory;\r
for (File f : jobdir.listFiles()) {\r
files.put(f.getName(), f);\r
}\r
}\r
\r
- public boolean hasStatus(JobStatus status) {\r
+ boolean hasStatus(JobStatus status) {\r
return files.containsKey(status.toString());\r
}\r
\r
if (startfile == null) {\r
startfile = files.get(JobStatus.SUBMITTED.toString());\r
}\r
- if (startfile != null) {\r
+ try {\r
+ if (startfile != null) {\r
+ String start = FileUtil.readFileToString(startfile);\r
+ starttime = Long.parseLong(start.trim());\r
+ }\r
+ } catch (IOException ignore) {\r
+ log.warn(\r
+ "IOException while reading STARTED status file! Ignoring...",\r
+ ignore);\r
+ // fall back\r
+ starttime = startfile.lastModified();\r
+ } catch (NumberFormatException ignore) {\r
+ log.warn(\r
+ "NumberFormatException while reading STARTED status file! Ignoring...",\r
+ ignore);\r
+ // fall back\r
starttime = startfile.lastModified();\r
- /*\r
- * String start = FileUtil.readFileToString(startfile);\r
- * starttime = Long.parseLong(start.trim());\r
- */\r
}\r
+\r
return starttime;\r
}\r
\r
clustjobId = FileUtil.readFileToString(jobid);\r
}\r
} catch (IOException ioe) {\r
- ioe.printStackTrace();\r
- // TODO LOG\r
+ log.error(\r
+ "IO Exception while reading the content of JOBID file for job "\r
+ + jobid, ioe);\r
}\r
return clustjobId.trim();\r
}\r
long ftime = UNDEFINED;\r
File finished = files.get(JobStatus.FINISHED.toString());\r
if (finished != null) {\r
- ftime = finished.lastModified();\r
- /*\r
- * String start = FileUtil.readFileToString(finished); ftime =\r
- * Long.parseLong(start.trim());\r
- */\r
- // System.out.println("f " + ftime);\r
- }\r
- /*\r
- * } catch (IOException e) { log.log(Level.WARN,\r
- * "Cannot parse finished time: " + e.getMessage(), e); } catch\r
- * (NumberFormatException e) { log.log(Level.WARN,\r
- * "Cannot parse finished time: " + e.getMessage(), e); }\r
- */\r
- return ftime;\r
- }\r
-\r
- @SuppressWarnings("unchecked")\r
- Class<Executable<?>> getWSRunnerName() {\r
- String name = jobdir.getName().split("#")[0];\r
- try {\r
- if (name.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX)) {\r
- assert ConfExecutable.CLUSTER_TASK_ID_PREFIX.length() == 1;\r
- name = name.substring(1);\r
+ try {\r
+ if (finished != null) {\r
+ String start = FileUtil.readFileToString(finished);\r
+ ftime = Long.parseLong(start.trim());\r
+ }\r
+ } catch (IOException ignore) {\r
+ log.warn(\r
+ "IOException while reading FINISHED status file! Ignoring...",\r
+ ignore);\r
+ // fall back\r
+ ftime = finished.lastModified();\r
+ } catch (NumberFormatException ignore) {\r
+ log.warn(\r
+ "NumberFormatException while reading FINISHED status file! Ignoring...",\r
+ ignore);\r
+ // fall back\r
+ ftime = finished.lastModified();\r
}\r
- name = ClustalW.class.getPackage().getName() + "." + name;\r
- return (Class<Executable<?>>) Class.forName(name);\r
- } catch (ClassNotFoundException e) {\r
- e.printStackTrace();\r
- throw new RuntimeException(\r
- "Cannot match the directory name to the executable! Executable name is "\r
- + name);\r
}\r
+ return ftime;\r
}\r
\r
private Services getService() {\r
- return Services.getService(getWSRunnerName());\r
+ return ServicesUtil.getServiceByJobDirectory(jobdir);\r
}\r
\r
- // Mafft, Muscle, Tcoffee, Clustal task:fasta.in result:fasta.out\r
- // Probcons task:fasta.in result:alignment.out\r
- /*\r
- * TODO replace with Universal names for WS!\r
- */\r
long getResultSize() {\r
- Class<Executable<?>> name = getWSRunnerName();\r
+ Class<? extends Executable<?>> name = ServicesUtil\r
+ .getRunnerByJobDirectory(jobdir);\r
+\r
File f = null;\r
- if (name.getSimpleName().equalsIgnoreCase("Probcons")) {\r
- f = files.get("alignment.out");\r
- } else if (name.getSimpleName().equalsIgnoreCase("ClustalW")) {\r
- f = files.get("output.txt");\r
+ if (name.getSimpleName().equalsIgnoreCase("IUPred")) {\r
+ f = files.get("out.glob");\r
+ if (f == null)\r
+ f = files.get("out.short");\r
+ if (f == null)\r
+ f = files.get("out.long");\r
} else {\r
- f = files.get("fasta.out");\r
+ f = files.get(SkeletalExecutable.OUTPUT);\r
}\r
if (f != null) {\r
return f.length();\r
}\r
\r
long getInputSize() {\r
- File input = files.get("fasta.in");\r
+ Class<? extends Executable<?>> name = ServicesUtil\r
+ .getRunnerByJobDirectory(jobdir);\r
+\r
+ File input = files.get(SkeletalExecutable.INPUT);\r
if (input != null) {\r
return input.length();\r
}\r
return false;\r
return true;\r
}\r
+ }\r
\r
+ // TODO test!\r
+ void collectStatistics() {\r
+ File[] files = workDirectory.listFiles(directories);\r
+ for (File file : files) {\r
+ if (!InputFilter.accept(new File(file.getPath() + File.separator\r
+ + SkeletalExecutable.INPUT))) {\r
+ // skip work directory with test input\r
+ continue;\r
+ }\r
+ JobDirectory jd = new JobDirectory(file);\r
+ JobStat jstat = jd.getJobStat();\r
+ // Do not record stats on the job that has not completed yet\r
+ if (hasCompleted(jd)) {\r
+ stats.add(jstat);\r
+ } else {\r
+ log.debug("Skipping the job: " + jstat);\r
+ log.debug("As it has not completed yet");\r
+ }\r
+ // System.out.println(jd.getJobStat().getJobReportTabulated());\r
+ }\r
+ }\r
+ @Override\r
+ public void run() {\r
+ log.info("Started updating statistics at " + new Date());\r
+ log.info("For directory: " + workDirectory.getAbsolutePath());\r
+\r
+ collectStatistics();\r
+\r
+ StatProcessor local_stats = getStats();\r
+ log.info("Found " + local_stats.getJobNumber() + " jobs!");\r
+ try {\r
+ writeStatToDB();\r
+ } catch (SQLException e) {\r
+ log.error("Fails to update jobs statistics database!");\r
+ log.error(e.getLocalizedMessage(), e);\r
+ }\r
+ log.info("Finished updating statistics at " + new Date());\r
}\r
}\r