New code update
[jabaws.git] / webservices / compbio / stat / collector / ExecutionStatCollector.java
index bf3130a..c50ab70 100644 (file)
@@ -1,3 +1,21 @@
+/* Copyright (c) 2013 Alexander Sherstnev\r
+ * Copyright (c) 2011 Peter Troshin\r
+ *  \r
+ *  JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.0     \r
+ * \r
+ *  This library is free software; you can redistribute it and/or modify it under the terms of the\r
+ *  Apache License version 2 as published by the Apache Software Foundation\r
+ * \r
+ *  This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without\r
+ *  even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache \r
+ *  License for more details.\r
+ * \r
+ *  A copy of the license is in apache_license.txt. It is also available here:\r
+ * @see: http://www.apache.org/licenses/LICENSE-2.0.txt\r
+ * \r
+ * Any republication or derived work distributed in source code form\r
+ * must include this copyright and license notice.\r
+ */\r
 package compbio.stat.collector;\r
 \r
 import java.io.File;\r
@@ -7,29 +25,23 @@ import java.sql.SQLException;
 import java.text.SimpleDateFormat;\r
 import java.util.ArrayList;\r
 import java.util.Date;\r
-import java.util.HashMap;\r
 import java.util.HashSet;\r
 import java.util.List;\r
-import java.util.Map;\r
 import java.util.Set;\r
 \r
 import org.apache.log4j.Logger;\r
 \r
-import compbio.engine.client.Executable;\r
-import compbio.metadata.JobStatus;\r
+import compbio.engine.client.PathValidator;\r
+import compbio.engine.client.SkeletalExecutable;\r
 import compbio.util.FileUtil;\r
-import compbio.ws.client.Services;\r
 \r
 /**\r
- * Number of runs of each WS = number of folders with name\r
- * \r
- * Number of successful runs = all runs with no result file\r
- * \r
- * Per period of time = limit per file creating time Runtime (avg/max) =\r
- * \r
- * started time - finished time\r
- * \r
- * Task & result size = result.size\r
+ * Class assumptions:\r
+ * 1. Number of runs of each WS = number of folders with name\r
+ * 2. Number of successful runs = all runs with no result file\r
+ * 3. Per period of time = limit per file creating time \r
+ * 4. Runtime (avg/max) = finish time - start time\r
+ * 5. Task & result size = result.size\r
  * \r
  * Abandoned runs - not collected runs\r
  * \r
@@ -45,18 +57,16 @@ import compbio.ws.client.Services;
  * work directory for local and cluster tasks = from Helper or cmd parameter. WS\r
  * names - enumeration. Status file names and content.\r
  * \r
- * @author pvtroshin\r
+ * @author Peter Troshin\r
+ * @author Alexander Sherstnev\r
  * \r
  */\r
 public class ExecutionStatCollector implements Runnable {\r
-\r
        static final int UNDEFINED = -1;\r
-\r
-       private static final Logger log = Logger\r
-                       .getLogger(ExecutionStatCollector.class);\r
-\r
        static SimpleDateFormat DF = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss");\r
+       private static final Logger log = Logger.getLogger(ExecutionStatCollector.class);\r
 \r
+       final private File workingDirectory;\r
        final private List<JobStat> stats;\r
        /**\r
         * Consider the job that has been working for longer than timeOutInHours\r
@@ -67,28 +77,22 @@ public class ExecutionStatCollector implements Runnable {
        /**\r
         * List subdirectories in the job directory\r
         * \r
-        * @param workDirectory\r
+        * @param workingDirectory\r
         * @param timeOutInHours\r
         */\r
-       public ExecutionStatCollector(String workDirectory, int timeOutInHours) {\r
-               log.info("Starting stat collector for directory: " + workDirectory);\r
+       public ExecutionStatCollector(String workingDirectory, int timeOutInHours) {\r
+               log.info("Starting stat collector for directory: " + workingDirectory);\r
                log.info("Maximum allowed runtime(h): " + timeOutInHours);\r
-               File[] files = FileUtil.getFiles(workDirectory, directories);\r
+               if (!PathValidator.isValidDirectory(workingDirectory)) {\r
+                       throw new IllegalArgumentException("workingDirectory '" + workingDirectory + "' does not exist!");\r
+               }\r
+               this.workingDirectory = new File(workingDirectory);\r
                stats = new ArrayList<JobStat>();\r
-               assert timeOutInHours > 0;\r
-               this.timeOutInHours = timeOutInHours;\r
-               for (File file : files) {\r
-                       JobDirectory jd = new JobDirectory(file);\r
-                       JobStat jstat = jd.getJobStat();\r
-                       // Do not record stats on the job that has not completed yet\r
-                       if (hasCompleted(jd)) {\r
-                               stats.add(jstat);\r
-                       } else {\r
-                               log.debug("Skipping the job: " + jstat);\r
-                               log.debug("As it has not completed yet");\r
-                       }\r
-                       // System.out.println(jd.getJobStat().getJobReportTabulated());\r
+               if (timeOutInHours <= 0) {\r
+                       throw new IllegalArgumentException(\r
+                                       "Timeout value must be greater than 0! Given value: " + timeOutInHours);\r
                }\r
+               this.timeOutInHours = timeOutInHours;\r
        }\r
 \r
        boolean hasCompleted(JobDirectory jd) {\r
@@ -104,11 +108,18 @@ public class ExecutionStatCollector implements Runnable {
                return ((System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60)) > timeOutInHours;\r
        }\r
 \r
-       public StatProcessor getStats() {\r
+       /*\r
+        * Make sure that collectStatistics methods was called prior to calling\r
+        * this! TODO consider running collectStatistics from here on the first call\r
+        */\r
+       StatProcessor getStats() {\r
+               if (stats.isEmpty()) {\r
+                       log.info("Please make sure collectStatistics method was called prior to calling getStats()!");\r
+               }\r
                return new StatProcessor(stats);\r
        }\r
 \r
-       public void writeStatToDB() throws SQLException {\r
+       void writeStatToDB() throws SQLException {\r
                Set<JobStat> rjobs = new HashSet<JobStat>(stats);\r
                StatDB statdb = new StatDB();\r
                log.debug("Removing records that has already been recorded");\r
@@ -118,17 +129,8 @@ public class ExecutionStatCollector implements Runnable {
                statdb.insertData(rjobs);\r
        }\r
 \r
-       /*\r
-        * static void updateTime(File statFile) throws IOException { long lastMod =\r
-        * statFile.lastModified(); FileWriter fw = new FileWriter(statFile);\r
-        * fw.write(new Long(lastMod).toString()); fw.close(); }\r
-        */\r
-\r
        /**\r
-        * \r
-        * @param args\r
-        * @throws IOException\r
-        * @throws SQLException\r
+        * Not in use\r
         */\r
        public static void main(String[] args) throws IOException, SQLException {\r
 \r
@@ -158,158 +160,39 @@ public class ExecutionStatCollector implements Runnable {
        static FileFilter directories = new FileFilter() {\r
                @Override\r
                public boolean accept(File pathname) {\r
-                       return pathname.isDirectory()\r
-                                       && !pathname.getName().startsWith(".");\r
+                       return pathname.isDirectory() && !pathname.getName().startsWith(".");\r
                }\r
        };\r
 \r
-       static class JobDirectory {\r
-\r
-               File jobdir;\r
-               Map<String, File> files = new HashMap<String, File>();\r
-\r
-               public JobDirectory(File directory) {\r
-                       this.jobdir = directory;\r
-                       for (File f : jobdir.listFiles()) {\r
-                               files.put(f.getName(), f);\r
-                       }\r
-               }\r
-\r
-               public boolean hasStatus(JobStatus status) {\r
-                       return files.containsKey(status.toString());\r
-               }\r
-\r
-               boolean isCollected() {\r
-                       return hasStatus(JobStatus.COLLECTED);\r
-               }\r
-\r
-               boolean isCancelled() {\r
-                       return hasStatus(JobStatus.CANCELLED);\r
-               }\r
-\r
-               long getStartTime() {\r
-                       long starttime = UNDEFINED;\r
-                       File startfile = files.get(JobStatus.STARTED.toString());\r
-                       if (startfile == null) {\r
-                               startfile = files.get(JobStatus.SUBMITTED.toString());\r
-                       }\r
-                       if (startfile != null) {\r
-                               starttime = startfile.lastModified();\r
-                               /*\r
-                                * String start = FileUtil.readFileToString(startfile);\r
-                                * starttime = Long.parseLong(start.trim());\r
-                                */\r
-                       }\r
-                       return starttime;\r
-               }\r
-\r
-               String getClusterJobID() {\r
-                       String clustjobId = "";\r
-                       File jobid = files.get("JOBID");\r
-                       try {\r
-                               if (jobid != null) {\r
-                                       clustjobId = FileUtil.readFileToString(jobid);\r
+       // TODO test!\r
+       void collectStatistics() {\r
+               // clear stats array;\r
+               stats.clear();\r
+               File[] dirs = workingDirectory.listFiles(directories);\r
+               for (File dir : dirs) {\r
+                       // skip work directory with test inputas \r
+                       log.debug("check directory: " + dir.getName() + "...");\r
+                       if (InputFilter.accept(new File(dir.getPath() + File.separator + SkeletalExecutable.INPUT))) {\r
+                               JobDirectory jd = new JobDirectory(dir);\r
+                               JobStat jstat = jd.getJobStat();\r
+                               // Do not record stats on the job that has not completed yet\r
+                               if (hasCompleted(jd)) {\r
+                                       stats.add(jstat);\r
+                               } else {\r
+                                       log.debug("Skipping the job: " + jstat + " as it has not completed yet");\r
                                }\r
-                       } catch (IOException ioe) {\r
-                               ioe.printStackTrace();\r
-                               // TODO LOG\r
-                       }\r
-                       return clustjobId.trim();\r
-               }\r
-\r
-               long getFinishedTime() {\r
-                       long ftime = UNDEFINED;\r
-                       File finished = files.get(JobStatus.FINISHED.toString());\r
-                       if (finished != null) {\r
-                               ftime = finished.lastModified();\r
-                               /*\r
-                                * String start = FileUtil.readFileToString(finished); ftime =\r
-                                * Long.parseLong(start.trim());\r
-                                */\r
-                               // System.out.println("f " + ftime);\r
-                       }\r
-                       /*\r
-                        * } catch (IOException e) { log.log(Level.WARN,\r
-                        * "Cannot parse finished time: " + e.getMessage(), e); } catch\r
-                        * (NumberFormatException e) { log.log(Level.WARN,\r
-                        * "Cannot parse finished time: " + e.getMessage(), e); }\r
-                        */\r
-                       return ftime;\r
-               }\r
-\r
-               private Services getService() {\r
-                       return Services.getServiceByJobDirectory(jobdir);\r
-               }\r
-\r
-               // Mafft, Muscle, Tcoffee, Clustal task:fasta.in result:fasta.out\r
-               // Probcons task:fasta.in result:alignment.out\r
-               /*\r
-                * TODO replace with Universal names for WS!\r
-                */\r
-               long getResultSize() {\r
-                       Class<? extends Executable<?>> name = Services\r
-                                       .getRunnerByJobDirectory(jobdir);\r
-\r
-                       File f = null;\r
-                       if (name.getSimpleName().equalsIgnoreCase("Probcons")) {\r
-                               f = files.get("alignment.out");\r
-                       } else if (name.getSimpleName().equalsIgnoreCase("ClustalW")) {\r
-                               f = files.get("output.txt");\r
                        } else {\r
-                               f = files.get("fasta.out");\r
+                               log.trace("training input: " + dir.getName() + File.separator + SkeletalExecutable.INPUT);\r
                        }\r
-                       if (f != null) {\r
-                               return f.length();\r
-                       }\r
-                       return UNDEFINED;\r
-               }\r
-\r
-               long getInputSize() {\r
-                       File input = files.get("fasta.in");\r
-                       if (input != null) {\r
-                               return input.length();\r
-                       }\r
-                       return UNDEFINED;\r
-               }\r
-\r
-               JobStat getJobStat() {\r
-                       return JobStat.newInstance(getService(), getClusterJobID(),\r
-                                       jobdir.getName(), getStartTime(), getFinishedTime(),\r
-                                       getInputSize(), getResultSize(), isCancelled(),\r
-                                       isCollected());\r
-               }\r
-\r
-               @Override\r
-               public int hashCode() {\r
-                       final int prime = 31;\r
-                       int result = 1;\r
-                       result = prime * result\r
-                                       + ((jobdir == null) ? 0 : jobdir.hashCode());\r
-                       return result;\r
-               }\r
-\r
-               @Override\r
-               public boolean equals(Object obj) {\r
-                       if (this == obj)\r
-                               return true;\r
-                       if (obj == null)\r
-                               return false;\r
-                       if (getClass() != obj.getClass())\r
-                               return false;\r
-                       JobDirectory other = (JobDirectory) obj;\r
-                       if (jobdir == null) {\r
-                               if (other.jobdir != null)\r
-                                       return false;\r
-                       } else if (!jobdir.equals(other.jobdir))\r
-                               return false;\r
-                       return true;\r
                }\r
-\r
        }\r
 \r
        @Override\r
        public void run() {\r
                log.info("Started updating statistics at " + new Date());\r
+               log.info("For directory: " + workingDirectory.getAbsolutePath());\r
+\r
+               collectStatistics();\r
 \r
                StatProcessor local_stats = getStats();\r
                log.info("Found " + local_stats.getJobNumber() + " jobs!");\r
@@ -321,5 +204,4 @@ public class ExecutionStatCollector implements Runnable {
                }\r
                log.info("Finished updating statistics at " + new Date());\r
        }\r
-\r
 }\r