\r
import org.apache.log4j.Logger;\r
\r
-import compbio.engine.conf.PropertyHelperManager;\r
import compbio.engine.client.ConfiguredExecutable;\r
import compbio.engine.client.PathValidator;\r
import compbio.engine.local.ExecutableWrapper;\r
-import compbio.util.PropertyHelper;\r
\r
//@Deprecated\r
\r
} else {\r
if (f.delete()) {\r
deletedCount++;\r
+ log.debug("file " + f.getName() + " removed");\r
+ } else {\r
+ log.debug("file " + f.getName() + " is not removed");\r
}\r
}\r
+ \r
}\r
rootdir.delete();\r
return deletedCount == files.length;\r
\r
public final class LocalExecutorService extends ThreadPoolExecutor {\r
\r
- private final static Logger log = Logger\r
- .getLogger(LocalExecutorService.class);\r
+ private final static Logger log = Logger.getLogger(LocalExecutorService.class);\r
private final static String threadNumPropName = "engine.local.thread.number";\r
\r
private static LocalExecutorService INSTANCE = null;\r
import org.testng.annotations.Test;\r
\r
import compbio.metadata.AllTestSuit;\r
-import compbio.stat.collector.ExecutionStatCollector.JobDirectory;\r
+import compbio.stat.collector.JobDirectory;\r
\r
public class ExecutionStatCollectorTester {\r
\r
}\r
\r
boolean livesOverLifeSpan(JobDirectory jd) {\r
- return ((System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60)) > LifeSpanInHours;\r
+ long LifeTime = (System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60);\r
+ log.debug("lifetime = " + LifeTime + ", lifespan = " + LifeSpanInHours);\r
+ return LifeTime > LifeSpanInHours;\r
}\r
\r
static FileFilter directories = new FileFilter() {\r
for (File dir : dirs) {\r
// Do not look at dirs with unfinished jobs\r
JobDirectory jd = new JobDirectory(dir);\r
- if (hasCompleted(jd) && livesOverLifeSpan(jd)) {\r
- Cleaner.deleteDirectory(workDirectory.getAbsolutePath() + File.separator + dir.getName());\r
- log.debug("Directory " + dir.getName() + " is deleted in doCleaning");\r
+ Date d = new Date (dir.lastModified());\r
+ log.debug("Directory " + dir.getName() + " has timestamp: " + d);\r
+ // TODO. removed hasCompeted. Maybe it needs to be restored...\r
+ // if (hasCompleted(jd) && livesOverLifeSpan(jd)) {\r
+ if (livesOverLifeSpan(jd)) {\r
+ if (Cleaner.deleteDirectory(workDirectory.getAbsolutePath() + File.separator + dir.getName())) {\r
+ log.error("Directory " + dir.getName() + " failed to deleted...");\r
+ } else {\r
+ log.debug("Directory " + dir.getName() + " is deleted");\r
+ }\r
} else {\r
- log.debug("Directory " + dir.getName() + " is too new and kept in doCleaning");\r
+ log.debug("Directory " + dir.getName() + " is too new and kept");\r
}\r
}\r
}\r
--- /dev/null
+/* Copyright (c) 2013 Alexander Sherstnev\r
+ * \r
+ * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.0 \r
+ * \r
+ * This library is free software; you can redistribute it and/or modify it under the terms of the\r
+ * Apache License version 2 as published by the Apache Software Foundation\r
+ * \r
+ * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without\r
+ * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache \r
+ * License for more details.\r
+ * \r
+ * A copy of the license is in apache_license.txt. It is also available here:\r
+ * @see: http://www.apache.org/licenses/LICENSE-2.0.txt\r
+ * \r
+ * Any republication or derived work distributed in source code form\r
+ * must include this copyright and license notice.\r
+ */\r
+package compbio.stat.collector;\r
+\r
+import java.io.File;\r
+import java.io.IOException;\r
+import java.util.HashMap;\r
+import java.util.Map;\r
+\r
+import org.apache.log4j.Logger;\r
+\r
+import compbio.engine.client.Executable;\r
+import compbio.engine.client.SkeletalExecutable;\r
+import compbio.metadata.JobStatus;\r
+import compbio.util.FileUtil;\r
+import compbio.ws.client.Services;\r
+import compbio.ws.client.ServicesUtil;\r
+\r
+/**\r
+ * \r
+ * @author Alexander Sherstnev\r
+ * \r
+ */\r
+public class JobDirectory {\r
+\r
+ static final int UNDEFINED = -1;\r
+\r
+ private static final Logger log = Logger.getLogger(JobDirectory.class);\r
+ \r
+ File jobdir;\r
+ Map<String, File> files = new HashMap<String, File>();\r
+\r
+ JobDirectory(File directory) {\r
+ this.jobdir = directory;\r
+ for (File f : jobdir.listFiles()) {\r
+ files.put(f.getName(), f);\r
+ }\r
+ }\r
+\r
+ boolean hasStatus(JobStatus status) {\r
+ return files.containsKey(status.toString());\r
+ }\r
+\r
+ boolean isCollected() {\r
+ return hasStatus(JobStatus.COLLECTED);\r
+ }\r
+\r
+ boolean isCancelled() {\r
+ return hasStatus(JobStatus.CANCELLED);\r
+ }\r
+\r
+ long getStartTime() {\r
+ long starttime = UNDEFINED;\r
+ File startfile = files.get(JobStatus.STARTED.toString());\r
+ if (startfile == null) {\r
+ startfile = files.get(JobStatus.SUBMITTED.toString());\r
+ }\r
+ try {\r
+ if (startfile != null) {\r
+ String start = FileUtil.readFileToString(startfile);\r
+ starttime = Long.parseLong(start.trim());\r
+ }\r
+ } catch (IOException ignore) {\r
+ log.warn("IOException while reading STARTED status file! Ignoring...", ignore);\r
+ // fall back\r
+ starttime = startfile.lastModified();\r
+ } catch (NumberFormatException ignore) {\r
+ log.warn("NumberFormatException while reading STARTED status file! Ignoring...", ignore);\r
+ // fall back\r
+ starttime = startfile.lastModified();\r
+ }\r
+ return starttime;\r
+ }\r
+\r
+ String getClusterJobID() {\r
+ String clustjobId = "";\r
+ File jobid = files.get("JOBID");\r
+ try {\r
+ if (jobid != null) {\r
+ clustjobId = FileUtil.readFileToString(jobid);\r
+ }\r
+ } catch (IOException ioe) {\r
+ log.error(\r
+ "IO Exception while reading the content of JOBID file for job "\r
+ + jobid, ioe);\r
+ }\r
+ return clustjobId.trim();\r
+ }\r
+\r
+ long getFinishedTime() {\r
+ long ftime = UNDEFINED;\r
+ File finished = files.get(JobStatus.FINISHED.toString());\r
+ if (finished != null) {\r
+ try {\r
+ if (finished != null) {\r
+ String start = FileUtil.readFileToString(finished);\r
+ ftime = Long.parseLong(start.trim());\r
+ }\r
+ } catch (IOException ignore) {\r
+ log.warn(\r
+ "IOException while reading FINISHED status file! Ignoring...",\r
+ ignore);\r
+ // fall back\r
+ ftime = finished.lastModified();\r
+ } catch (NumberFormatException ignore) {\r
+ log.warn(\r
+ "NumberFormatException while reading FINISHED status file! Ignoring...",\r
+ ignore);\r
+ // fall back\r
+ ftime = finished.lastModified();\r
+ }\r
+ }\r
+ return ftime;\r
+ }\r
+\r
+ private Services getService() {\r
+ return ServicesUtil.getServiceByJobDirectory(jobdir);\r
+ }\r
+\r
+ long getResultSize() {\r
+ Class<? extends Executable<?>> name = ServicesUtil\r
+ .getRunnerByJobDirectory(jobdir);\r
+\r
+ File f = null;\r
+ if (name.getSimpleName().equalsIgnoreCase("IUPred")) {\r
+ f = files.get("out.glob");\r
+ if (f == null)\r
+ f = files.get("out.short");\r
+ if (f == null)\r
+ f = files.get("out.long");\r
+ } else {\r
+ f = files.get(SkeletalExecutable.OUTPUT);\r
+ }\r
+ if (f != null) {\r
+ return f.length();\r
+ }\r
+ return UNDEFINED;\r
+ }\r
+\r
+ long getInputSize() {\r
+ Class<? extends Executable<?>> name = ServicesUtil\r
+ .getRunnerByJobDirectory(jobdir);\r
+\r
+ File input = files.get(SkeletalExecutable.INPUT);\r
+ if (input != null) {\r
+ return input.length();\r
+ }\r
+ return UNDEFINED;\r
+ }\r
+\r
+ JobStat getJobStat() {\r
+ return JobStat.newInstance(getService(), getClusterJobID(),\r
+ jobdir.getName(), getStartTime(), getFinishedTime(),\r
+ getInputSize(), getResultSize(), isCancelled(),\r
+ isCollected());\r
+ }\r
+\r
+ public int hashCode() {\r
+ final int prime = 31;\r
+ int result = 1;\r
+ result = prime * result\r
+ + ((jobdir == null) ? 0 : jobdir.hashCode());\r
+ return result;\r
+ }\r
+\r
+ public boolean equals(Object obj) {\r
+ if (this == obj)\r
+ return true;\r
+ if (obj == null)\r
+ return false;\r
+ if (getClass() != obj.getClass())\r
+ return false;\r
+ JobDirectory other = (JobDirectory) obj;\r
+ if (jobdir == null) {\r
+ if (other.jobdir != null)\r
+ return false;\r
+ } else if (!jobdir.equals(other.jobdir))\r
+ return false;\r
+ return true;\r
+ }\r
+}\r
+\r
--- /dev/null
+/* Copyright (c) 2011 Peter Troshin\r
+ * \r
+ * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.0 \r
+ * \r
+ * This library is free software; you can redistribute it and/or modify it under the terms of the\r
+ * Apache License version 2 as published by the Apache Software Foundation\r
+ * \r
+ * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without\r
+ * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache \r
+ * License for more details.\r
+ * \r
+ * A copy of the license is in apache_license.txt. It is also available here:\r
+ * @see: http://www.apache.org/licenses/LICENSE-2.0.txt\r
+ * \r
+ * Any republication or derived work distributed in source code form\r
+ * must include this copyright and license notice.\r
+ */\r
+package compbio.ws.server;\r
+\r
+import java.util.concurrent.Executors;\r
+import java.util.concurrent.ScheduledExecutorService;\r
+import java.util.concurrent.ScheduledFuture;\r
+import java.util.concurrent.TimeUnit;\r
+\r
+import javax.servlet.ServletContextEvent;\r
+import javax.servlet.ServletContextListener;\r
+\r
+import org.apache.log4j.Logger;\r
+\r
+import compbio.stat.collector.DirCleaner;\r
+import compbio.stat.collector.StatDB;\r
+import compbio.engine.conf.PropertyHelperManager;\r
+import compbio.engine.local.ExecutableWrapper;\r
+import compbio.engine.local.LocalExecutorService;\r
+import compbio.util.PropertyHelper;\r
+import compbio.util.Util;\r
+\r
+/**\r
+ * Two tasks:\r
+ * 1. Switch off engines if JABAWS web application is un-deployed, or web server is shutdown\r
+ * 2. delete old job directories\r
+ * \r
+ * @author Peter Troshin\r
+ * @author Alexander Sherstnev\r
+ * @version 2.0\r
+ */\r
+public class MainManager implements ServletContextListener {\r
+\r
+ private final Logger log = Logger.getLogger(MainManager.class);\r
+ static PropertyHelper ph = PropertyHelperManager.getPropertyHelper();\r
+ \r
+ private ScheduledFuture<?> localcl;\r
+ private ScheduledFuture<?> clustercl;\r
+ private ScheduledExecutorService executor;\r
+ \r
+ @Override\r
+ public void contextDestroyed(ServletContextEvent ignored) {\r
+ // stop cleaning job directories\r
+// try {\r
+ if (null != localcl) {\r
+ localcl.cancel(true);\r
+ }\r
+ if (null != clustercl) {\r
+ clustercl.cancel(true);\r
+ }\r
+ //executor.shutdown();\r
+ //executor.awaitTermination(3, TimeUnit.SECONDS);\r
+// } catch (InterruptedException e) {\r
+// log.warn(e.getMessage(), e);\r
+// }\r
+ // Shutdown local engine\r
+ log.info("JABAWS context is destroyed. Shutting down engines...");\r
+ LocalExecutorService.shutDown();\r
+ log.info("Local engine is shutdown OK");\r
+ ExecutableWrapper.shutdownService();\r
+ log.info("Individual executables stream engine is shutdown OK");\r
+ }\r
+\r
+ @Override\r
+ public void contextInitialized(ServletContextEvent arg0) {\r
+ log.info("Initializing directory cleaners");\r
+ executor = Executors.newScheduledThreadPool(2);\r
+\r
+ // configure cluster cleaner\r
+ String clusterWorkDir = getClusterJobDir();\r
+ int clusterDirLifespan = PropertyHelperManager.getIntProperty(ph.getProperty("cluster.jobdir.maxlifespan"));\r
+ int clusterCleaningRate = PropertyHelperManager.getIntProperty(ph.getProperty("cluster.jobdir.cleaning.frequency"));\r
+ boolean cleanClasterDir = PropertyHelperManager.getBooleanProperty(ph.getProperty("cluster.stat.collector.enable"));\r
+ \r
+ if (0 < clusterDirLifespan && cleanClasterDir) {\r
+ DirCleaner clusterDirCleaner = new DirCleaner( clusterWorkDir, clusterDirLifespan);\r
+ clustercl = executor.scheduleAtFixedRate(clusterDirCleaner, 1, clusterCleaningRate, TimeUnit.MINUTES);\r
+ log.info("Cleaning local job directory every " + clusterCleaningRate + " minutes");\r
+ } else {\r
+ log.info("Cluster job directory cleaner is disabled. ");\r
+ }\r
+\r
+ // configure local cleaner\r
+ String localWorkDir = compbio.engine.client.Util.convertToAbsolute(getLocalJobDir());\r
+ int localDirLiveSpan = PropertyHelperManager.getIntProperty(ph.getProperty("local.jobdir.maxlifespan"));\r
+ int localCleaningRate = PropertyHelperManager.getIntProperty(ph.getProperty("local.jobdir.cleaning.frequency"));\r
+ boolean cleanLocalDir = PropertyHelperManager.getBooleanProperty(ph.getProperty("local.stat.collector.enable"));\r
+\r
+ if (0 < localDirLiveSpan && cleanLocalDir) {\r
+ DirCleaner localDirCleaner = new DirCleaner( localWorkDir, localDirLiveSpan);\r
+ localcl = executor.scheduleAtFixedRate(localDirCleaner, 1, localCleaningRate, TimeUnit.MINUTES);\r
+ log.info("Cleaning local job directory every " + localCleaningRate + " minutes");\r
+ } else {\r
+ log.info("Local job directory cleaner is disabled. ");\r
+ }\r
+ }\r
+\r
+ static String getClusterJobDir() {\r
+ String ln = ph.getProperty("cluster.tmp.directory");\r
+ if (null != ln ) {\r
+ ln = ln.trim();\r
+ }\r
+ return ln;\r
+ }\r
+\r
+ static String getLocalJobDir() {\r
+ String ln = ph.getProperty("local.tmp.directory");\r
+ if (null != ln ) {\r
+ ln = ln.trim();\r
+ }\r
+ return ln;\r
+ }\r
+}\r