Delete old job directory automatically
authorSasha <asherstnev@main-laptop2>
Fri, 26 Apr 2013 17:26:47 +0000 (18:26 +0100)
committerSasha <asherstnev@main-laptop2>
Fri, 26 Apr 2013 17:26:47 +0000 (18:26 +0100)
conf/Engine.cluster.properties
conf/Engine.local.properties
engine/compbio/engine/Cleaner.java
webservices/compbio/stat/collector/DirCleaner.java [new file with mode: 0644]
webservices/compbio/stat/collector/ExecutionStatCollector.java
webservices/compbio/stat/servlet/StatisticCollector.java
webservices/compbio/ws/server/ShutdownEngines.java

index caa2541..e18fa4c 100644 (file)
@@ -16,7 +16,18 @@ cluster.tmp.directory=/cluster/gjb_lab/fc/www-jws2/jaba/jobsout
 \r
 # Enable/disable cluster statistics collector \r
 cluster.stat.collector.enable=false\r
+# Frequency of statistics collecting (in minutes)\r
+# normal rate: once a date = 24 * 60 = 1440\r
+cluster.stat.collector.update.frequency=1440\r
 \r
 # Maximum amount of time the job is considered running in hours\r
 # Optional defaults to 7 days (168h) \r
 cluster.stat.maxruntime=24\r
+\r
+# Maximum amount of time the job directory is living (in hours), \r
+# -1 means the directories are never deleted\r
+# Defaults is one week, i.e. 168 hours\r
+cluster.jobdir.maxlifespan=168\r
+# Frequency of cleaning job directory (in minutes)\r
+# normal rate: once a date = 24 * 60 = 1440\r
+cluster.jobdir.cleaning.frequency=1
\ No newline at end of file
index 2f211e8..71a8174 100644 (file)
@@ -16,7 +16,18 @@ local.tmp.directory=jobsout
 \r
 # Enable/disable cluster statistics collector\r
 local.stat.collector.enable=true\r
+# Frequency of statistics collecting (in minutes)\r
+# normal rate: once a date = 24 * 60 = 1440\r
+local.stat.collector.update.frequency=1\r
 \r
 # Maximum amount of time the job is considered running in hours\r
 # Optional defaults to 24 hours\r
-local.stat.maxruntime=24
\ No newline at end of file
+local.stat.maxruntime=24\r
+\r
+# Maximum amount of time the job directory is living (in hours), \r
+# -1 means the directories are never deleted\r
+# Defaults is one week, i.e. 168 hours\r
+local.jobdir.maxlifespan=168\r
+# Frequency of cleaning job directory (in minutes)\r
+# normal rate: once a date = 24 * 60 = 1440\r
+local.jobdir.cleaning.frequency=1\r
index 4813388..ce9c774 100644 (file)
@@ -22,11 +22,17 @@ import java.util.List;
 \r
 import org.apache.log4j.Logger;\r
 \r
+import compbio.engine.conf.PropertyHelperManager;\r
 import compbio.engine.client.ConfiguredExecutable;\r
 import compbio.engine.client.PathValidator;\r
 import compbio.engine.local.ExecutableWrapper;\r
+import compbio.util.PropertyHelper;\r
 \r
-@Deprecated\r
+//@Deprecated\r
+\r
+// TODO\r
+// understand what this class does and why it was set as deprecated ...\r
+// how to check timestamps of files before deleting\r
 public class Cleaner {\r
 \r
        private static final Logger log = Logger.getLogger(Cleaner.class);\r
@@ -56,10 +62,8 @@ public class Cleaner {
                // Remove process std output and error capture files, do not care\r
                // whether succeed or not\r
                // as these are only created for local processes, so may not exist\r
-               removeFile(exec.getWorkDirectory() + File.separator\r
-                               + ExecutableWrapper.PROC_OUT_FILE);\r
-               removeFile(exec.getWorkDirectory() + File.separator\r
-                               + ExecutableWrapper.PROC_ERR_FILE);\r
+               removeFile(exec.getWorkDirectory() + File.separator + ExecutableWrapper.PROC_OUT_FILE);\r
+               removeFile(exec.getWorkDirectory() + File.separator + ExecutableWrapper.PROC_ERR_FILE);\r
                // Remove the task directory if all files were successfully removed\r
                return removeFile(exec.getWorkDirectory());\r
        }\r
@@ -88,12 +92,11 @@ public class Cleaner {
 \r
        public static boolean deleteAllFiles(String directory) {\r
                if (compbio.util.Util.isEmpty(directory)) {\r
-                       throw new NullPointerException("Direcotry must be provided! ");\r
+                       throw new NullPointerException("Directory must be provided! ");\r
                }\r
                File rootdir = new File(directory);\r
                if (!rootdir.exists()) {\r
-                       log.error("Directory " + directory\r
-                                       + " does not exist. Have been deleted already?");\r
+                       log.error("Directory " + directory + " does not exist. Have been deleted already?");\r
                        return false;\r
                }\r
                if (!rootdir.isDirectory()) {\r
@@ -105,15 +108,24 @@ public class Cleaner {
                int deletedCount = 0;\r
                for (File f : files) {\r
                        if (f.isDirectory()) {\r
-                               log.error("Cannot delete subdirecotries! Skipping...");\r
+                               log.error("Cannot delete subdirectories! Skipping...");\r
                        } else {\r
-                               boolean deleted = f.delete();\r
-                               if (deleted) {\r
+                               if (f.delete()) {\r
                                        deletedCount++;\r
                                }\r
                        }\r
                }\r
+               rootdir.delete();\r
                return deletedCount == files.length;\r
        }\r
 \r
+       public static boolean deleteDirectory(String directory) {\r
+               if (deleteAllFiles (directory)) {\r
+                       File rootdir = new File(directory);\r
+                       return rootdir.delete();\r
+               }\r
+               return false;\r
+       }\r
+\r
+       \r
 }\r
diff --git a/webservices/compbio/stat/collector/DirCleaner.java b/webservices/compbio/stat/collector/DirCleaner.java
new file mode 100644 (file)
index 0000000..d10461e
--- /dev/null
@@ -0,0 +1,129 @@
+/* Copyright (c) 2013 Alexander Sherstnev\r
+ *  \r
+ *  JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.1     \r
+ * \r
+ *  This library is free software; you can redistribute it and/or modify it under the terms of the\r
+ *  Apache License version 2 as published by the Apache Software Foundation\r
+ * \r
+ *  This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without\r
+ *  even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache \r
+ *  License for more details.\r
+ * \r
+ *  A copy of the license is in apache_license.txt. It is also available here:\r
+ * @see: http://www.apache.org/licenses/LICENSE-2.0.txt\r
+ * \r
+ * Any republication or derived work distributed in source code form\r
+ * must include this copyright and license notice.\r
+ */\r
+package compbio.stat.collector;\r
+\r
+import java.io.File;\r
+import java.io.FileFilter;\r
+import java.io.IOException;\r
+import java.sql.SQLException;\r
+import java.text.SimpleDateFormat;\r
+import java.util.ArrayList;\r
+import java.util.Date;\r
+\r
+import org.apache.log4j.Logger;\r
+\r
+import compbio.engine.Cleaner;\r
+import compbio.engine.client.Executable;\r
+import compbio.engine.client.PathValidator;\r
+import compbio.engine.client.SkeletalExecutable;\r
+import compbio.metadata.JobStatus;\r
+import compbio.stat.collector.JobStat;\r
+import compbio.util.FileUtil;\r
+\r
+/**\r
+ * Number of runs of each WS = number of folders with name\r
+ * \r
+ * Number of successful runs = all runs with no result file\r
+ * \r
+ * Per period of time = limit per file creating time Runtime (avg/max) =\r
+ * \r
+ * started time - finished time\r
+ * \r
+ * Task & result size = result.size\r
+ * \r
+ * Abandoned runs - not collected runs\r
+ * \r
+ * Cancelled runs - cancelled\r
+ * \r
+ * Cluster vs local runs\r
+ * \r
+ * Reasons for failure = look in the err out?\r
+ * \r
+ * Metadata required:\r
+ * \r
+ * work directory for local and cluster tasks = from Helper or cmd parameter. WS\r
+ * names - enumeration. Status file names and content.\r
+ * \r
+ * @author pvtroshin\r
+ * \r
+ */\r
+public class DirCleaner implements Runnable {\r
+\r
+       static final int UNDEFINED = -1;\r
+\r
+       private static final Logger log = Logger.getLogger(DirCleaner.class);\r
+\r
+       final private File workDirectory;\r
+       final private int LifeSpanInHours;\r
+\r
+       /**\r
+        * \r
+        * @param workDirectory\r
+        * @param timeOutInHours\r
+        */\r
+       public DirCleaner(String workDirectory, int LifeSpanInHours) {\r
+               log.info("Starting cleaning for directory: " + workDirectory);\r
+               log.info("Maximum allowed directory life span (h): " + LifeSpanInHours);\r
+               if (!PathValidator.isValidDirectory(workDirectory)) {\r
+                       throw new IllegalArgumentException("workDirectory '" + workDirectory + "' does not exist!");\r
+               }\r
+               this.workDirectory = new File(workDirectory);\r
+               this.LifeSpanInHours = LifeSpanInHours;\r
+       }\r
+\r
+       boolean hasCompleted(JobDirectory jd) {\r
+               JobStat jstat = jd.getJobStat();\r
+               if (jstat.hasResult() || jstat.getIsCancelled() || jstat.getIsFinished()) {\r
+                       return true;\r
+               }\r
+               return false;\r
+       }\r
+\r
+       boolean livesOverLifeSpan(JobDirectory jd) {\r
+               return ((System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60)) > LifeSpanInHours;\r
+       }\r
+\r
+       static FileFilter directories = new FileFilter() {\r
+               @Override\r
+               public boolean accept(File pathname) {\r
+                       return pathname.isDirectory() && !pathname.getName().startsWith(".");\r
+               }\r
+       };\r
+       \r
+       // TODO test!\r
+       void doCleaning() {\r
+               File[] dirs = workDirectory.listFiles(directories);\r
+               for (File dir : dirs) {\r
+                       // Do not look at dirs with unfinished jobs\r
+                       JobDirectory jd = new JobDirectory(dir);\r
+                       if (hasCompleted(jd) && livesOverLifeSpan(jd)) {\r
+                               Cleaner.deleteDirectory(workDirectory.getAbsolutePath() + File.separator + dir.getName());\r
+                               log.debug("Directory " + dir.getName() + " is deleted in doCleaning");\r
+                       } else {\r
+                               log.debug("Directory " + dir.getName() + " is too new and kept in doCleaning");\r
+                       }\r
+               }\r
+       }\r
+       @Override\r
+       public void run() {\r
+               log.info("Started cleaning job directory at " + new Date());\r
+               log.info("For directory: " + workDirectory.getAbsolutePath());\r
+               doCleaning();\r
+               log.info("Finished cleaning job directory at " + new Date());\r
+       }\r
+}\r
index 62f39c9..c50ab70 100644 (file)
@@ -1,4 +1,5 @@
-/* Copyright (c) 2011 Peter Troshin\r
+/* Copyright (c) 2013 Alexander Sherstnev\r
+ * Copyright (c) 2011 Peter Troshin\r
  *  \r
  *  JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.0     \r
  * \r
@@ -24,32 +25,23 @@ import java.sql.SQLException;
 import java.text.SimpleDateFormat;\r
 import java.util.ArrayList;\r
 import java.util.Date;\r
-import java.util.HashMap;\r
 import java.util.HashSet;\r
 import java.util.List;\r
-import java.util.Map;\r
 import java.util.Set;\r
 \r
 import org.apache.log4j.Logger;\r
 \r
-import compbio.engine.client.Executable;\r
 import compbio.engine.client.PathValidator;\r
 import compbio.engine.client.SkeletalExecutable;\r
-import compbio.metadata.JobStatus;\r
 import compbio.util.FileUtil;\r
-import compbio.ws.client.Services;\r
-import compbio.ws.client.ServicesUtil;\r
 \r
 /**\r
- * Number of runs of each WS = number of folders with name\r
- * \r
- * Number of successful runs = all runs with no result file\r
- * \r
- * Per period of time = limit per file creating time Runtime (avg/max) =\r
- * \r
- * started time - finished time\r
- * \r
- * Task & result size = result.size\r
+ * Class assumptions:\r
+ * 1. Number of runs of each WS = number of folders with name\r
+ * 2. Number of successful runs = all runs with no result file\r
+ * 3. Per period of time = limit per file creating time \r
+ * 4. Runtime (avg/max) = finish time - start time\r
+ * 5. Task & result size = result.size\r
  * \r
  * Abandoned runs - not collected runs\r
  * \r
@@ -65,18 +57,16 @@ import compbio.ws.client.ServicesUtil;
  * work directory for local and cluster tasks = from Helper or cmd parameter. WS\r
  * names - enumeration. Status file names and content.\r
  * \r
- * @author pvtroshin\r
+ * @author Peter Troshin\r
+ * @author Alexander Sherstnev\r
  * \r
  */\r
 public class ExecutionStatCollector implements Runnable {\r
-\r
        static final int UNDEFINED = -1;\r
-\r
-       private static final Logger log = Logger.getLogger(ExecutionStatCollector.class);\r
-\r
        static SimpleDateFormat DF = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss");\r
+       private static final Logger log = Logger.getLogger(ExecutionStatCollector.class);\r
 \r
-       final private File workDirectory;\r
+       final private File workingDirectory;\r
        final private List<JobStat> stats;\r
        /**\r
         * Consider the job that has been working for longer than timeOutInHours\r
@@ -87,16 +77,16 @@ public class ExecutionStatCollector implements Runnable {
        /**\r
         * List subdirectories in the job directory\r
         * \r
-        * @param workDirectory\r
+        * @param workingDirectory\r
         * @param timeOutInHours\r
         */\r
-       public ExecutionStatCollector(String workDirectory, int timeOutInHours) {\r
-               log.info("Starting stat collector for directory: " + workDirectory);\r
+       public ExecutionStatCollector(String workingDirectory, int timeOutInHours) {\r
+               log.info("Starting stat collector for directory: " + workingDirectory);\r
                log.info("Maximum allowed runtime(h): " + timeOutInHours);\r
-               if (!PathValidator.isValidDirectory(workDirectory)) {\r
-                       throw new IllegalArgumentException("workDirectory '" + workDirectory + "' does not exist!");\r
+               if (!PathValidator.isValidDirectory(workingDirectory)) {\r
+                       throw new IllegalArgumentException("workingDirectory '" + workingDirectory + "' does not exist!");\r
                }\r
-               this.workDirectory = new File(workDirectory);\r
+               this.workingDirectory = new File(workingDirectory);\r
                stats = new ArrayList<JobStat>();\r
                if (timeOutInHours <= 0) {\r
                        throw new IllegalArgumentException(\r
@@ -139,12 +129,6 @@ public class ExecutionStatCollector implements Runnable {
                statdb.insertData(rjobs);\r
        }\r
 \r
-       /*\r
-        * static void updateTime(File statFile) throws IOException { long lastMod =\r
-        * statFile.lastModified(); FileWriter fw = new FileWriter(statFile);\r
-        * fw.write(new Long(lastMod).toString()); fw.close(); }\r
-        */\r
-\r
        /**\r
         * Not in use\r
         */\r
@@ -176,201 +160,37 @@ public class ExecutionStatCollector implements Runnable {
        static FileFilter directories = new FileFilter() {\r
                @Override\r
                public boolean accept(File pathname) {\r
-                       return pathname.isDirectory()\r
-                                       && !pathname.getName().startsWith(".");\r
+                       return pathname.isDirectory() && !pathname.getName().startsWith(".");\r
                }\r
        };\r
 \r
-       static class JobDirectory {\r
-\r
-               File jobdir;\r
-               Map<String, File> files = new HashMap<String, File>();\r
-\r
-               JobDirectory(File directory) {\r
-                       this.jobdir = directory;\r
-                       for (File f : jobdir.listFiles()) {\r
-                               files.put(f.getName(), f);\r
-                       }\r
-               }\r
-\r
-               boolean hasStatus(JobStatus status) {\r
-                       return files.containsKey(status.toString());\r
-               }\r
-\r
-               boolean isCollected() {\r
-                       return hasStatus(JobStatus.COLLECTED);\r
-               }\r
-\r
-               boolean isCancelled() {\r
-                       return hasStatus(JobStatus.CANCELLED);\r
-               }\r
-\r
-               long getStartTime() {\r
-                       long starttime = UNDEFINED;\r
-                       File startfile = files.get(JobStatus.STARTED.toString());\r
-                       if (startfile == null) {\r
-                               startfile = files.get(JobStatus.SUBMITTED.toString());\r
-                       }\r
-                       try {\r
-                               if (startfile != null) {\r
-                                       String start = FileUtil.readFileToString(startfile);\r
-                                       starttime = Long.parseLong(start.trim());\r
-                               }\r
-                       } catch (IOException ignore) {\r
-                               log.warn(\r
-                                               "IOException while reading STARTED status file! Ignoring...",\r
-                                               ignore);\r
-                               // fall back\r
-                               starttime = startfile.lastModified();\r
-                       } catch (NumberFormatException ignore) {\r
-                               log.warn(\r
-                                               "NumberFormatException while reading STARTED status file! Ignoring...",\r
-                                               ignore);\r
-                               // fall back\r
-                               starttime = startfile.lastModified();\r
-                       }\r
-\r
-                       return starttime;\r
-               }\r
-\r
-               String getClusterJobID() {\r
-                       String clustjobId = "";\r
-                       File jobid = files.get("JOBID");\r
-                       try {\r
-                               if (jobid != null) {\r
-                                       clustjobId = FileUtil.readFileToString(jobid);\r
-                               }\r
-                       } catch (IOException ioe) {\r
-                               log.error(\r
-                                               "IO Exception while reading the content of JOBID file for job "\r
-                                                               + jobid, ioe);\r
-                       }\r
-                       return clustjobId.trim();\r
-               }\r
-\r
-               long getFinishedTime() {\r
-                       long ftime = UNDEFINED;\r
-                       File finished = files.get(JobStatus.FINISHED.toString());\r
-                       if (finished != null) {\r
-                               try {\r
-                                       if (finished != null) {\r
-                                               String start = FileUtil.readFileToString(finished);\r
-                                               ftime = Long.parseLong(start.trim());\r
-                                       }\r
-                               } catch (IOException ignore) {\r
-                                       log.warn(\r
-                                                       "IOException while reading FINISHED status file! Ignoring...",\r
-                                                       ignore);\r
-                                       // fall back\r
-                                       ftime = finished.lastModified();\r
-                               } catch (NumberFormatException ignore) {\r
-                                       log.warn(\r
-                                                       "NumberFormatException while reading FINISHED status file! Ignoring...",\r
-                                                       ignore);\r
-                                       // fall back\r
-                                       ftime = finished.lastModified();\r
-                               }\r
-                       }\r
-                       return ftime;\r
-               }\r
-\r
-               private Services getService() {\r
-                       return ServicesUtil.getServiceByJobDirectory(jobdir);\r
-               }\r
-\r
-               long getResultSize() {\r
-                       Class<? extends Executable<?>> name = ServicesUtil\r
-                                       .getRunnerByJobDirectory(jobdir);\r
-\r
-                       File f = null;\r
-                       if (name.getSimpleName().equalsIgnoreCase("IUPred")) {\r
-                               f = files.get("out.glob");\r
-                               if (f == null)\r
-                                       f = files.get("out.short");\r
-                               if (f == null)\r
-                                       f = files.get("out.long");\r
-                       } else {\r
-                               f = files.get(SkeletalExecutable.OUTPUT);\r
-                       }\r
-                       if (f != null) {\r
-                               return f.length();\r
-                       }\r
-                       return UNDEFINED;\r
-               }\r
-\r
-               long getInputSize() {\r
-                       Class<? extends Executable<?>> name = ServicesUtil\r
-                                       .getRunnerByJobDirectory(jobdir);\r
-\r
-                       File input = files.get(SkeletalExecutable.INPUT);\r
-                       if (input != null) {\r
-                               return input.length();\r
-                       }\r
-                       return UNDEFINED;\r
-               }\r
-\r
-               JobStat getJobStat() {\r
-                       return JobStat.newInstance(getService(), getClusterJobID(),\r
-                                       jobdir.getName(), getStartTime(), getFinishedTime(),\r
-                                       getInputSize(), getResultSize(), isCancelled(),\r
-                                       isCollected());\r
-               }\r
-\r
-               @Override\r
-               public int hashCode() {\r
-                       final int prime = 31;\r
-                       int result = 1;\r
-                       result = prime * result\r
-                                       + ((jobdir == null) ? 0 : jobdir.hashCode());\r
-                       return result;\r
-               }\r
-\r
-               @Override\r
-               public boolean equals(Object obj) {\r
-                       if (this == obj)\r
-                               return true;\r
-                       if (obj == null)\r
-                               return false;\r
-                       if (getClass() != obj.getClass())\r
-                               return false;\r
-                       JobDirectory other = (JobDirectory) obj;\r
-                       if (jobdir == null) {\r
-                               if (other.jobdir != null)\r
-                                       return false;\r
-                       } else if (!jobdir.equals(other.jobdir))\r
-                               return false;\r
-                       return true;\r
-               }\r
-       }\r
-\r
        // TODO test!\r
        void collectStatistics() {\r
                // clear stats array;\r
                stats.clear();\r
-               File[] files = workDirectory.listFiles(directories);\r
-               for (File file : files) {\r
-                       // skip work directory with test input\r
-                       if (InputFilter.accept(new File(file.getPath() + File.separator + SkeletalExecutable.INPUT))) {\r
-                               JobDirectory jd = new JobDirectory(file);\r
+               File[] dirs = workingDirectory.listFiles(directories);\r
+               for (File dir : dirs) {\r
+                       // skip work directory with test inputas \r
+                       log.debug("check directory: " + dir.getName() + "...");\r
+                       if (InputFilter.accept(new File(dir.getPath() + File.separator + SkeletalExecutable.INPUT))) {\r
+                               JobDirectory jd = new JobDirectory(dir);\r
                                JobStat jstat = jd.getJobStat();\r
                                // Do not record stats on the job that has not completed yet\r
                                if (hasCompleted(jd)) {\r
                                        stats.add(jstat);\r
-                                       System.out.println("added: id = " + jd);\r
                                } else {\r
-                                       log.debug("Skipping the job: " + jstat);\r
-                                       log.debug("As it has not completed yet");\r
+                                       log.debug("Skipping the job: " + jstat + " as it has not completed yet");\r
                                }\r
-                               // System.out.println(jd.getJobStat().getJobReportTabulated());\r
                        } else {\r
-                               log.trace("training input: " + file.getPath() + File.separator + SkeletalExecutable.INPUT);\r
+                               log.trace("training input: " + dir.getName() + File.separator + SkeletalExecutable.INPUT);\r
                        }\r
                }\r
        }\r
+\r
        @Override\r
        public void run() {\r
                log.info("Started updating statistics at " + new Date());\r
-               log.info("For directory: " + workDirectory.getAbsolutePath());\r
+               log.info("For directory: " + workingDirectory.getAbsolutePath());\r
 \r
                collectStatistics();\r
 \r
index 3a7d1bc..8a01122 100644 (file)
@@ -68,32 +68,35 @@ public class StatisticCollector implements ServletContextListener {
                int clusterMaxRuntime = getClusterJobTimeOut();\r
 \r
                int localMaxRuntime = getLocalJobTimeOut();\r
-               String localWorkDir = compbio.engine.client.Util\r
-                               .convertToAbsolute(getLocalJobDir());\r
+               String localWorkDir = compbio.engine.client.Util.convertToAbsolute(getLocalJobDir());\r
 \r
-               log.info("Initializing statistics collector");\r
+               log.info("Initializing statistics collectors");\r
                executor = Executors.newScheduledThreadPool(2);\r
 \r
                if (collectClusterStats()) {\r
-                       // collect statistics every minute. DEBUGGING ONLY!!!\r
-                       long CollectingFrequency = 1;\r
-                       // collect statistics every day. normal running... \r
-                       //CollectingFrequency = 24 * 60;\r
+                       // collect statistics with this frequency\r
+                       long CollectingFrequency = updateClusterStatsFrequency();\r
+                       // CollectingFrequency = 0 if the parameter is not found\r
+                       if (0 == CollectingFrequency) {\r
+                               CollectingFrequency = 1;\r
+                       }\r
 \r
                        ExecutionStatCollector clusterCollector = new ExecutionStatCollector(clusterWorkDir, clusterMaxRuntime);\r
-                       clustercf = executor.scheduleAtFixedRate(clusterCollector, 1, CollectingFrequency, TimeUnit.MINUTES);\r
-                       log.info("Collecting cluster statistics ");\r
+                       clustercf = executor.scheduleAtFixedRate(clusterCollector, 30, 60 * CollectingFrequency, TimeUnit.SECONDS);\r
+                       log.info("Collecting cluster statistics every " + CollectingFrequency + " minutes");\r
                } else {\r
                        log.info("Cluster statistics collector is disabled or not configured! ");\r
                }\r
                if (collectLocalStats()) {\r
-                       // collect statistics every minute. DEBUGGING ONLY!!!\r
-                       long CollectingFrequency = 1;\r
-                       // collect statistics every day. normal running... \r
-                       //CollectingFrequency = 24 * 60;\r
+                       // collect statistics with this frequency\r
+                       long CollectingFrequency = updateLocalStatsFrequency();\r
+                       // CollectingFrequency = 0 if the parameter is not found\r
+                       if (0 == CollectingFrequency) {\r
+                               CollectingFrequency = 1;\r
+                       }\r
 \r
                        ExecutionStatCollector localCollector = new ExecutionStatCollector(     localWorkDir, localMaxRuntime);\r
-                       localcf = executor.scheduleAtFixedRate(localCollector, 1, CollectingFrequency, TimeUnit.MINUTES);\r
+                       localcf = executor.scheduleAtFixedRate(localCollector, 30, 60 * CollectingFrequency, TimeUnit.SECONDS);\r
                        log.info("Collecting local statistics every " + CollectingFrequency + " minutes");\r
                } else {\r
                        log.info("Local statistics collector is disabled or not configured! ");\r
@@ -136,6 +139,16 @@ public class StatisticCollector implements ServletContextListener {
                return propName;\r
        }\r
 \r
+       private static int getIntProperty(String propValue) {\r
+               int value = 0;\r
+               if (!Util.isEmpty(propValue)) {\r
+                       propValue = propValue.trim();\r
+                       value = Integer.parseInt(propValue);\r
+               }\r
+               return value;\r
+       }\r
+\r
+       \r
        static boolean collectClusterStats() {\r
                return getBooleanProperty(ph\r
                                .getProperty("cluster.stat.collector.enable"));\r
@@ -145,6 +158,16 @@ public class StatisticCollector implements ServletContextListener {
                return getBooleanProperty(ph.getProperty("local.stat.collector.enable"));\r
        }\r
 \r
+       static int updateClusterStatsFrequency() {\r
+               return getIntProperty(ph\r
+                               .getProperty("cluster.stat.collector.update.frequency"));\r
+       }\r
+\r
+       static int updateLocalStatsFrequency() {\r
+               return getIntProperty(ph.getProperty("local.stat.collector.update.frequency"));\r
+       }\r
+\r
+       \r
        private static boolean getBooleanProperty(String propValue) {\r
                boolean enabled = false;\r
                if (!Util.isEmpty(propValue)) {\r
index daf0c44..c858f44 100644 (file)
  */\r
 package compbio.ws.server;\r
 \r
+import java.util.concurrent.Executors;\r
+import java.util.concurrent.ScheduledExecutorService;\r
+import java.util.concurrent.ScheduledFuture;\r
+import java.util.concurrent.TimeUnit;\r
+\r
 import javax.servlet.ServletContextEvent;\r
 import javax.servlet.ServletContextListener;\r
 \r
 import org.apache.log4j.Logger;\r
 \r
+import compbio.stat.collector.DirCleaner;\r
+import compbio.stat.collector.StatDB;\r
+import compbio.engine.conf.PropertyHelperManager;\r
 import compbio.engine.local.ExecutableWrapper;\r
 import compbio.engine.local.LocalExecutorService;\r
+import compbio.util.PropertyHelper;\r
+import compbio.util.Util;\r
 \r
 /**\r
- * Switch off engines if JABAWS web application is undeployed, or web server is\r
- * shutdown\r
+ * Two tasks:\r
+ * 1. Switch off engines if JABAWS web application is un-deployed, or web server is shutdown\r
+ * 2. delete old job directories\r
  * \r
  * @author Peter Troshin\r
- * @version 1.0\r
+ * @author Alexander Sherstnev\r
+ * @version 2.0\r
  */\r
 public class ShutdownEngines implements ServletContextListener {\r
 \r
        private final Logger log = Logger.getLogger(ShutdownEngines.class);\r
-\r
+       static PropertyHelper ph = PropertyHelperManager.getPropertyHelper();\r
+       \r
+       private ScheduledFuture<?> localcl;\r
+       private ScheduledFuture<?> clustercl;\r
+       private ScheduledExecutorService executor;\r
+       \r
        @Override\r
        public void contextDestroyed(ServletContextEvent ignored) {\r
+               // stop cleaning job directories\r
+//             try {\r
+                       if (null != localcl) {\r
+                               localcl.cancel(true);\r
+                       }\r
+                       if (null != clustercl) {\r
+                               clustercl.cancel(true);\r
+                       }\r
+                       //executor.shutdown();\r
+                       //executor.awaitTermination(3, TimeUnit.SECONDS);\r
+//             } catch (InterruptedException e) {\r
+//                     log.warn(e.getMessage(), e);\r
+//             }\r
                // Shutdown local engine\r
                log.info("JABAWS context is destroyed. Shutting down engines...");\r
                LocalExecutorService.shutDown();\r
@@ -48,7 +78,51 @@ public class ShutdownEngines implements ServletContextListener {
 \r
        @Override\r
        public void contextInitialized(ServletContextEvent arg0) {\r
-               // Do nothing\r
+               log.info("Initializing directory cleaners");\r
+               executor = Executors.newScheduledThreadPool(2);\r
+\r
+               // configure cluster cleaner\r
+               String clusterWorkDir = getClusterJobDir();\r
+               int clusterDirLifespan = PropertyHelperManager.getIntProperty(ph.getProperty("cluster.jobdir.maxlifespan"));\r
+               int clusterCleaningRate = PropertyHelperManager.getIntProperty(ph.getProperty("cluster.jobdir.cleaning.frequency"));\r
+               boolean cleanClasterDir = PropertyHelperManager.getBooleanProperty(ph.getProperty("cluster.stat.collector.enable"));\r
+               \r
+               if (0 < clusterDirLifespan && cleanClasterDir) {\r
+                       DirCleaner clusterDirCleaner = new DirCleaner(  clusterWorkDir, clusterDirLifespan);\r
+                       clustercl = executor.scheduleAtFixedRate(clusterDirCleaner, 1, clusterCleaningRate, TimeUnit.MINUTES);\r
+                       log.info("Cleaning local job directory every " + clusterCleaningRate + " minutes");\r
+               } else {\r
+                       log.info("Cluster job directory cleaner is disabled. ");\r
+               }\r
+\r
+               // configure local cleaner\r
+               String localWorkDir = compbio.engine.client.Util.convertToAbsolute(getLocalJobDir());\r
+               int localDirLiveSpan = PropertyHelperManager.getIntProperty(ph.getProperty("local.jobdir.maxlifespan"));\r
+               int localCleaningRate = PropertyHelperManager.getIntProperty(ph.getProperty("local.jobdir.cleaning.frequency"));\r
+               boolean cleanLocalDir = PropertyHelperManager.getBooleanProperty(ph.getProperty("local.stat.collector.enable"));\r
+\r
+               if (0 < localDirLiveSpan && cleanLocalDir) {\r
+                       DirCleaner localDirCleaner = new DirCleaner(    localWorkDir, localDirLiveSpan);\r
+                       localcl = executor.scheduleAtFixedRate(localDirCleaner, 1, localCleaningRate, TimeUnit.MINUTES);\r
+                       log.info("Cleaning local job directory every " + localCleaningRate + " minutes");\r
+               } else {\r
+                       log.info("Local job directory cleaner is disabled. ");\r
+               }\r
+       }\r
+\r
+       static String getClusterJobDir() {\r
+               String ln = ph.getProperty("cluster.tmp.directory");\r
+               if (null != ln ) {\r
+                 ln = ln.trim();\r
+               }\r
+               return ln;\r
        }\r
 \r
+       static String getLocalJobDir() {\r
+               String ln = ph.getProperty("local.tmp.directory");\r
+               if (null != ln ) {\r
+                       ln = ln.trim();\r
+               }\r
+               return ln;\r
+       }\r
 }\r