From: Sasha Date: Fri, 26 Apr 2013 17:26:47 +0000 (+0100) Subject: Delete old job directory automatically X-Git-Url: http://source.jalview.org/gitweb/?a=commitdiff_plain;h=00811f6fe37b88bbf9f3164473d42c815e8a3299;p=jabaws.git Delete old job directory automatically --- diff --git a/conf/Engine.cluster.properties b/conf/Engine.cluster.properties index caa2541..e18fa4c 100644 --- a/conf/Engine.cluster.properties +++ b/conf/Engine.cluster.properties @@ -16,7 +16,18 @@ cluster.tmp.directory=/cluster/gjb_lab/fc/www-jws2/jaba/jobsout # Enable/disable cluster statistics collector cluster.stat.collector.enable=false +# Frequency of statistics collecting (in minutes) +# normal rate: once a date = 24 * 60 = 1440 +cluster.stat.collector.update.frequency=1440 # Maximum amount of time the job is considered running in hours # Optional defaults to 7 days (168h) cluster.stat.maxruntime=24 + +# Maximum amount of time the job directory is living (in hours), +# -1 means the directories are never deleted +# Defaults is one week, i.e. 168 hours +cluster.jobdir.maxlifespan=168 +# Frequency of cleaning job directory (in minutes) +# normal rate: once a date = 24 * 60 = 1440 +cluster.jobdir.cleaning.frequency=1 \ No newline at end of file diff --git a/conf/Engine.local.properties b/conf/Engine.local.properties index 2f211e8..71a8174 100644 --- a/conf/Engine.local.properties +++ b/conf/Engine.local.properties @@ -16,7 +16,18 @@ local.tmp.directory=jobsout # Enable/disable cluster statistics collector local.stat.collector.enable=true +# Frequency of statistics collecting (in minutes) +# normal rate: once a date = 24 * 60 = 1440 +local.stat.collector.update.frequency=1 # Maximum amount of time the job is considered running in hours # Optional defaults to 24 hours -local.stat.maxruntime=24 \ No newline at end of file +local.stat.maxruntime=24 + +# Maximum amount of time the job directory is living (in hours), +# -1 means the directories are never deleted +# Defaults is one week, i.e. 168 hours +local.jobdir.maxlifespan=168 +# Frequency of cleaning job directory (in minutes) +# normal rate: once a date = 24 * 60 = 1440 +local.jobdir.cleaning.frequency=1 diff --git a/engine/compbio/engine/Cleaner.java b/engine/compbio/engine/Cleaner.java index 4813388..ce9c774 100644 --- a/engine/compbio/engine/Cleaner.java +++ b/engine/compbio/engine/Cleaner.java @@ -22,11 +22,17 @@ import java.util.List; import org.apache.log4j.Logger; +import compbio.engine.conf.PropertyHelperManager; import compbio.engine.client.ConfiguredExecutable; import compbio.engine.client.PathValidator; import compbio.engine.local.ExecutableWrapper; +import compbio.util.PropertyHelper; -@Deprecated +//@Deprecated + +// TODO +// understand what this class does and why it was set as deprecated ... +// how to check timestamps of files before deleting public class Cleaner { private static final Logger log = Logger.getLogger(Cleaner.class); @@ -56,10 +62,8 @@ public class Cleaner { // Remove process std output and error capture files, do not care // whether succeed or not // as these are only created for local processes, so may not exist - removeFile(exec.getWorkDirectory() + File.separator - + ExecutableWrapper.PROC_OUT_FILE); - removeFile(exec.getWorkDirectory() + File.separator - + ExecutableWrapper.PROC_ERR_FILE); + removeFile(exec.getWorkDirectory() + File.separator + ExecutableWrapper.PROC_OUT_FILE); + removeFile(exec.getWorkDirectory() + File.separator + ExecutableWrapper.PROC_ERR_FILE); // Remove the task directory if all files were successfully removed return removeFile(exec.getWorkDirectory()); } @@ -88,12 +92,11 @@ public class Cleaner { public static boolean deleteAllFiles(String directory) { if (compbio.util.Util.isEmpty(directory)) { - throw new NullPointerException("Direcotry must be provided! "); + throw new NullPointerException("Directory must be provided! "); } File rootdir = new File(directory); if (!rootdir.exists()) { - log.error("Directory " + directory - + " does not exist. Have been deleted already?"); + log.error("Directory " + directory + " does not exist. Have been deleted already?"); return false; } if (!rootdir.isDirectory()) { @@ -105,15 +108,24 @@ public class Cleaner { int deletedCount = 0; for (File f : files) { if (f.isDirectory()) { - log.error("Cannot delete subdirecotries! Skipping..."); + log.error("Cannot delete subdirectories! Skipping..."); } else { - boolean deleted = f.delete(); - if (deleted) { + if (f.delete()) { deletedCount++; } } } + rootdir.delete(); return deletedCount == files.length; } + public static boolean deleteDirectory(String directory) { + if (deleteAllFiles (directory)) { + File rootdir = new File(directory); + return rootdir.delete(); + } + return false; + } + + } diff --git a/webservices/compbio/stat/collector/DirCleaner.java b/webservices/compbio/stat/collector/DirCleaner.java new file mode 100644 index 0000000..d10461e --- /dev/null +++ b/webservices/compbio/stat/collector/DirCleaner.java @@ -0,0 +1,129 @@ +/* Copyright (c) 2013 Alexander Sherstnev + * + * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.1 + * + * This library is free software; you can redistribute it and/or modify it under the terms of the + * Apache License version 2 as published by the Apache Software Foundation + * + * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache + * License for more details. + * + * A copy of the license is in apache_license.txt. It is also available here: + * @see: http://www.apache.org/licenses/LICENSE-2.0.txt + * + * Any republication or derived work distributed in source code form + * must include this copyright and license notice. + */ +package compbio.stat.collector; + +import java.io.File; +import java.io.FileFilter; +import java.io.IOException; +import java.sql.SQLException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; + +import org.apache.log4j.Logger; + +import compbio.engine.Cleaner; +import compbio.engine.client.Executable; +import compbio.engine.client.PathValidator; +import compbio.engine.client.SkeletalExecutable; +import compbio.metadata.JobStatus; +import compbio.stat.collector.JobStat; +import compbio.util.FileUtil; + +/** + * Number of runs of each WS = number of folders with name + * + * Number of successful runs = all runs with no result file + * + * Per period of time = limit per file creating time Runtime (avg/max) = + * + * started time - finished time + * + * Task & result size = result.size + * + * Abandoned runs - not collected runs + * + * Cancelled runs - cancelled + * + * Cluster vs local runs + * + * Reasons for failure = look in the err out? + * + * Metadata required: + * + * work directory for local and cluster tasks = from Helper or cmd parameter. WS + * names - enumeration. Status file names and content. + * + * @author pvtroshin + * + */ +public class DirCleaner implements Runnable { + + static final int UNDEFINED = -1; + + private static final Logger log = Logger.getLogger(DirCleaner.class); + + final private File workDirectory; + final private int LifeSpanInHours; + + /** + * + * @param workDirectory + * @param timeOutInHours + */ + public DirCleaner(String workDirectory, int LifeSpanInHours) { + log.info("Starting cleaning for directory: " + workDirectory); + log.info("Maximum allowed directory life span (h): " + LifeSpanInHours); + if (!PathValidator.isValidDirectory(workDirectory)) { + throw new IllegalArgumentException("workDirectory '" + workDirectory + "' does not exist!"); + } + this.workDirectory = new File(workDirectory); + this.LifeSpanInHours = LifeSpanInHours; + } + + boolean hasCompleted(JobDirectory jd) { + JobStat jstat = jd.getJobStat(); + if (jstat.hasResult() || jstat.getIsCancelled() || jstat.getIsFinished()) { + return true; + } + return false; + } + + boolean livesOverLifeSpan(JobDirectory jd) { + return ((System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60)) > LifeSpanInHours; + } + + static FileFilter directories = new FileFilter() { + @Override + public boolean accept(File pathname) { + return pathname.isDirectory() && !pathname.getName().startsWith("."); + } + }; + + // TODO test! + void doCleaning() { + File[] dirs = workDirectory.listFiles(directories); + for (File dir : dirs) { + // Do not look at dirs with unfinished jobs + JobDirectory jd = new JobDirectory(dir); + if (hasCompleted(jd) && livesOverLifeSpan(jd)) { + Cleaner.deleteDirectory(workDirectory.getAbsolutePath() + File.separator + dir.getName()); + log.debug("Directory " + dir.getName() + " is deleted in doCleaning"); + } else { + log.debug("Directory " + dir.getName() + " is too new and kept in doCleaning"); + } + } + } + @Override + public void run() { + log.info("Started cleaning job directory at " + new Date()); + log.info("For directory: " + workDirectory.getAbsolutePath()); + doCleaning(); + log.info("Finished cleaning job directory at " + new Date()); + } +} diff --git a/webservices/compbio/stat/collector/ExecutionStatCollector.java b/webservices/compbio/stat/collector/ExecutionStatCollector.java index 62f39c9..c50ab70 100644 --- a/webservices/compbio/stat/collector/ExecutionStatCollector.java +++ b/webservices/compbio/stat/collector/ExecutionStatCollector.java @@ -1,4 +1,5 @@ -/* Copyright (c) 2011 Peter Troshin +/* Copyright (c) 2013 Alexander Sherstnev + * Copyright (c) 2011 Peter Troshin * * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 2.0 * @@ -24,32 +25,23 @@ import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; -import compbio.engine.client.Executable; import compbio.engine.client.PathValidator; import compbio.engine.client.SkeletalExecutable; -import compbio.metadata.JobStatus; import compbio.util.FileUtil; -import compbio.ws.client.Services; -import compbio.ws.client.ServicesUtil; /** - * Number of runs of each WS = number of folders with name - * - * Number of successful runs = all runs with no result file - * - * Per period of time = limit per file creating time Runtime (avg/max) = - * - * started time - finished time - * - * Task & result size = result.size + * Class assumptions: + * 1. Number of runs of each WS = number of folders with name + * 2. Number of successful runs = all runs with no result file + * 3. Per period of time = limit per file creating time + * 4. Runtime (avg/max) = finish time - start time + * 5. Task & result size = result.size * * Abandoned runs - not collected runs * @@ -65,18 +57,16 @@ import compbio.ws.client.ServicesUtil; * work directory for local and cluster tasks = from Helper or cmd parameter. WS * names - enumeration. Status file names and content. * - * @author pvtroshin + * @author Peter Troshin + * @author Alexander Sherstnev * */ public class ExecutionStatCollector implements Runnable { - static final int UNDEFINED = -1; - - private static final Logger log = Logger.getLogger(ExecutionStatCollector.class); - static SimpleDateFormat DF = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss"); + private static final Logger log = Logger.getLogger(ExecutionStatCollector.class); - final private File workDirectory; + final private File workingDirectory; final private List stats; /** * Consider the job that has been working for longer than timeOutInHours @@ -87,16 +77,16 @@ public class ExecutionStatCollector implements Runnable { /** * List subdirectories in the job directory * - * @param workDirectory + * @param workingDirectory * @param timeOutInHours */ - public ExecutionStatCollector(String workDirectory, int timeOutInHours) { - log.info("Starting stat collector for directory: " + workDirectory); + public ExecutionStatCollector(String workingDirectory, int timeOutInHours) { + log.info("Starting stat collector for directory: " + workingDirectory); log.info("Maximum allowed runtime(h): " + timeOutInHours); - if (!PathValidator.isValidDirectory(workDirectory)) { - throw new IllegalArgumentException("workDirectory '" + workDirectory + "' does not exist!"); + if (!PathValidator.isValidDirectory(workingDirectory)) { + throw new IllegalArgumentException("workingDirectory '" + workingDirectory + "' does not exist!"); } - this.workDirectory = new File(workDirectory); + this.workingDirectory = new File(workingDirectory); stats = new ArrayList(); if (timeOutInHours <= 0) { throw new IllegalArgumentException( @@ -139,12 +129,6 @@ public class ExecutionStatCollector implements Runnable { statdb.insertData(rjobs); } - /* - * static void updateTime(File statFile) throws IOException { long lastMod = - * statFile.lastModified(); FileWriter fw = new FileWriter(statFile); - * fw.write(new Long(lastMod).toString()); fw.close(); } - */ - /** * Not in use */ @@ -176,201 +160,37 @@ public class ExecutionStatCollector implements Runnable { static FileFilter directories = new FileFilter() { @Override public boolean accept(File pathname) { - return pathname.isDirectory() - && !pathname.getName().startsWith("."); + return pathname.isDirectory() && !pathname.getName().startsWith("."); } }; - static class JobDirectory { - - File jobdir; - Map files = new HashMap(); - - JobDirectory(File directory) { - this.jobdir = directory; - for (File f : jobdir.listFiles()) { - files.put(f.getName(), f); - } - } - - boolean hasStatus(JobStatus status) { - return files.containsKey(status.toString()); - } - - boolean isCollected() { - return hasStatus(JobStatus.COLLECTED); - } - - boolean isCancelled() { - return hasStatus(JobStatus.CANCELLED); - } - - long getStartTime() { - long starttime = UNDEFINED; - File startfile = files.get(JobStatus.STARTED.toString()); - if (startfile == null) { - startfile = files.get(JobStatus.SUBMITTED.toString()); - } - try { - if (startfile != null) { - String start = FileUtil.readFileToString(startfile); - starttime = Long.parseLong(start.trim()); - } - } catch (IOException ignore) { - log.warn( - "IOException while reading STARTED status file! Ignoring...", - ignore); - // fall back - starttime = startfile.lastModified(); - } catch (NumberFormatException ignore) { - log.warn( - "NumberFormatException while reading STARTED status file! Ignoring...", - ignore); - // fall back - starttime = startfile.lastModified(); - } - - return starttime; - } - - String getClusterJobID() { - String clustjobId = ""; - File jobid = files.get("JOBID"); - try { - if (jobid != null) { - clustjobId = FileUtil.readFileToString(jobid); - } - } catch (IOException ioe) { - log.error( - "IO Exception while reading the content of JOBID file for job " - + jobid, ioe); - } - return clustjobId.trim(); - } - - long getFinishedTime() { - long ftime = UNDEFINED; - File finished = files.get(JobStatus.FINISHED.toString()); - if (finished != null) { - try { - if (finished != null) { - String start = FileUtil.readFileToString(finished); - ftime = Long.parseLong(start.trim()); - } - } catch (IOException ignore) { - log.warn( - "IOException while reading FINISHED status file! Ignoring...", - ignore); - // fall back - ftime = finished.lastModified(); - } catch (NumberFormatException ignore) { - log.warn( - "NumberFormatException while reading FINISHED status file! Ignoring...", - ignore); - // fall back - ftime = finished.lastModified(); - } - } - return ftime; - } - - private Services getService() { - return ServicesUtil.getServiceByJobDirectory(jobdir); - } - - long getResultSize() { - Class> name = ServicesUtil - .getRunnerByJobDirectory(jobdir); - - File f = null; - if (name.getSimpleName().equalsIgnoreCase("IUPred")) { - f = files.get("out.glob"); - if (f == null) - f = files.get("out.short"); - if (f == null) - f = files.get("out.long"); - } else { - f = files.get(SkeletalExecutable.OUTPUT); - } - if (f != null) { - return f.length(); - } - return UNDEFINED; - } - - long getInputSize() { - Class> name = ServicesUtil - .getRunnerByJobDirectory(jobdir); - - File input = files.get(SkeletalExecutable.INPUT); - if (input != null) { - return input.length(); - } - return UNDEFINED; - } - - JobStat getJobStat() { - return JobStat.newInstance(getService(), getClusterJobID(), - jobdir.getName(), getStartTime(), getFinishedTime(), - getInputSize(), getResultSize(), isCancelled(), - isCollected()); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result - + ((jobdir == null) ? 0 : jobdir.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - JobDirectory other = (JobDirectory) obj; - if (jobdir == null) { - if (other.jobdir != null) - return false; - } else if (!jobdir.equals(other.jobdir)) - return false; - return true; - } - } - // TODO test! void collectStatistics() { // clear stats array; stats.clear(); - File[] files = workDirectory.listFiles(directories); - for (File file : files) { - // skip work directory with test input - if (InputFilter.accept(new File(file.getPath() + File.separator + SkeletalExecutable.INPUT))) { - JobDirectory jd = new JobDirectory(file); + File[] dirs = workingDirectory.listFiles(directories); + for (File dir : dirs) { + // skip work directory with test inputas + log.debug("check directory: " + dir.getName() + "..."); + if (InputFilter.accept(new File(dir.getPath() + File.separator + SkeletalExecutable.INPUT))) { + JobDirectory jd = new JobDirectory(dir); JobStat jstat = jd.getJobStat(); // Do not record stats on the job that has not completed yet if (hasCompleted(jd)) { stats.add(jstat); - System.out.println("added: id = " + jd); } else { - log.debug("Skipping the job: " + jstat); - log.debug("As it has not completed yet"); + log.debug("Skipping the job: " + jstat + " as it has not completed yet"); } - // System.out.println(jd.getJobStat().getJobReportTabulated()); } else { - log.trace("training input: " + file.getPath() + File.separator + SkeletalExecutable.INPUT); + log.trace("training input: " + dir.getName() + File.separator + SkeletalExecutable.INPUT); } } } + @Override public void run() { log.info("Started updating statistics at " + new Date()); - log.info("For directory: " + workDirectory.getAbsolutePath()); + log.info("For directory: " + workingDirectory.getAbsolutePath()); collectStatistics(); diff --git a/webservices/compbio/stat/servlet/StatisticCollector.java b/webservices/compbio/stat/servlet/StatisticCollector.java index 3a7d1bc..8a01122 100644 --- a/webservices/compbio/stat/servlet/StatisticCollector.java +++ b/webservices/compbio/stat/servlet/StatisticCollector.java @@ -68,32 +68,35 @@ public class StatisticCollector implements ServletContextListener { int clusterMaxRuntime = getClusterJobTimeOut(); int localMaxRuntime = getLocalJobTimeOut(); - String localWorkDir = compbio.engine.client.Util - .convertToAbsolute(getLocalJobDir()); + String localWorkDir = compbio.engine.client.Util.convertToAbsolute(getLocalJobDir()); - log.info("Initializing statistics collector"); + log.info("Initializing statistics collectors"); executor = Executors.newScheduledThreadPool(2); if (collectClusterStats()) { - // collect statistics every minute. DEBUGGING ONLY!!! - long CollectingFrequency = 1; - // collect statistics every day. normal running... - //CollectingFrequency = 24 * 60; + // collect statistics with this frequency + long CollectingFrequency = updateClusterStatsFrequency(); + // CollectingFrequency = 0 if the parameter is not found + if (0 == CollectingFrequency) { + CollectingFrequency = 1; + } ExecutionStatCollector clusterCollector = new ExecutionStatCollector(clusterWorkDir, clusterMaxRuntime); - clustercf = executor.scheduleAtFixedRate(clusterCollector, 1, CollectingFrequency, TimeUnit.MINUTES); - log.info("Collecting cluster statistics "); + clustercf = executor.scheduleAtFixedRate(clusterCollector, 30, 60 * CollectingFrequency, TimeUnit.SECONDS); + log.info("Collecting cluster statistics every " + CollectingFrequency + " minutes"); } else { log.info("Cluster statistics collector is disabled or not configured! "); } if (collectLocalStats()) { - // collect statistics every minute. DEBUGGING ONLY!!! - long CollectingFrequency = 1; - // collect statistics every day. normal running... - //CollectingFrequency = 24 * 60; + // collect statistics with this frequency + long CollectingFrequency = updateLocalStatsFrequency(); + // CollectingFrequency = 0 if the parameter is not found + if (0 == CollectingFrequency) { + CollectingFrequency = 1; + } ExecutionStatCollector localCollector = new ExecutionStatCollector( localWorkDir, localMaxRuntime); - localcf = executor.scheduleAtFixedRate(localCollector, 1, CollectingFrequency, TimeUnit.MINUTES); + localcf = executor.scheduleAtFixedRate(localCollector, 30, 60 * CollectingFrequency, TimeUnit.SECONDS); log.info("Collecting local statistics every " + CollectingFrequency + " minutes"); } else { log.info("Local statistics collector is disabled or not configured! "); @@ -136,6 +139,16 @@ public class StatisticCollector implements ServletContextListener { return propName; } + private static int getIntProperty(String propValue) { + int value = 0; + if (!Util.isEmpty(propValue)) { + propValue = propValue.trim(); + value = Integer.parseInt(propValue); + } + return value; + } + + static boolean collectClusterStats() { return getBooleanProperty(ph .getProperty("cluster.stat.collector.enable")); @@ -145,6 +158,16 @@ public class StatisticCollector implements ServletContextListener { return getBooleanProperty(ph.getProperty("local.stat.collector.enable")); } + static int updateClusterStatsFrequency() { + return getIntProperty(ph + .getProperty("cluster.stat.collector.update.frequency")); + } + + static int updateLocalStatsFrequency() { + return getIntProperty(ph.getProperty("local.stat.collector.update.frequency")); + } + + private static boolean getBooleanProperty(String propValue) { boolean enabled = false; if (!Util.isEmpty(propValue)) { diff --git a/webservices/compbio/ws/server/ShutdownEngines.java b/webservices/compbio/ws/server/ShutdownEngines.java index daf0c44..c858f44 100644 --- a/webservices/compbio/ws/server/ShutdownEngines.java +++ b/webservices/compbio/ws/server/ShutdownEngines.java @@ -17,27 +17,57 @@ */ package compbio.ws.server; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import org.apache.log4j.Logger; +import compbio.stat.collector.DirCleaner; +import compbio.stat.collector.StatDB; +import compbio.engine.conf.PropertyHelperManager; import compbio.engine.local.ExecutableWrapper; import compbio.engine.local.LocalExecutorService; +import compbio.util.PropertyHelper; +import compbio.util.Util; /** - * Switch off engines if JABAWS web application is undeployed, or web server is - * shutdown + * Two tasks: + * 1. Switch off engines if JABAWS web application is un-deployed, or web server is shutdown + * 2. delete old job directories * * @author Peter Troshin - * @version 1.0 + * @author Alexander Sherstnev + * @version 2.0 */ public class ShutdownEngines implements ServletContextListener { private final Logger log = Logger.getLogger(ShutdownEngines.class); - + static PropertyHelper ph = PropertyHelperManager.getPropertyHelper(); + + private ScheduledFuture localcl; + private ScheduledFuture clustercl; + private ScheduledExecutorService executor; + @Override public void contextDestroyed(ServletContextEvent ignored) { + // stop cleaning job directories +// try { + if (null != localcl) { + localcl.cancel(true); + } + if (null != clustercl) { + clustercl.cancel(true); + } + //executor.shutdown(); + //executor.awaitTermination(3, TimeUnit.SECONDS); +// } catch (InterruptedException e) { +// log.warn(e.getMessage(), e); +// } // Shutdown local engine log.info("JABAWS context is destroyed. Shutting down engines..."); LocalExecutorService.shutDown(); @@ -48,7 +78,51 @@ public class ShutdownEngines implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent arg0) { - // Do nothing + log.info("Initializing directory cleaners"); + executor = Executors.newScheduledThreadPool(2); + + // configure cluster cleaner + String clusterWorkDir = getClusterJobDir(); + int clusterDirLifespan = PropertyHelperManager.getIntProperty(ph.getProperty("cluster.jobdir.maxlifespan")); + int clusterCleaningRate = PropertyHelperManager.getIntProperty(ph.getProperty("cluster.jobdir.cleaning.frequency")); + boolean cleanClasterDir = PropertyHelperManager.getBooleanProperty(ph.getProperty("cluster.stat.collector.enable")); + + if (0 < clusterDirLifespan && cleanClasterDir) { + DirCleaner clusterDirCleaner = new DirCleaner( clusterWorkDir, clusterDirLifespan); + clustercl = executor.scheduleAtFixedRate(clusterDirCleaner, 1, clusterCleaningRate, TimeUnit.MINUTES); + log.info("Cleaning local job directory every " + clusterCleaningRate + " minutes"); + } else { + log.info("Cluster job directory cleaner is disabled. "); + } + + // configure local cleaner + String localWorkDir = compbio.engine.client.Util.convertToAbsolute(getLocalJobDir()); + int localDirLiveSpan = PropertyHelperManager.getIntProperty(ph.getProperty("local.jobdir.maxlifespan")); + int localCleaningRate = PropertyHelperManager.getIntProperty(ph.getProperty("local.jobdir.cleaning.frequency")); + boolean cleanLocalDir = PropertyHelperManager.getBooleanProperty(ph.getProperty("local.stat.collector.enable")); + + if (0 < localDirLiveSpan && cleanLocalDir) { + DirCleaner localDirCleaner = new DirCleaner( localWorkDir, localDirLiveSpan); + localcl = executor.scheduleAtFixedRate(localDirCleaner, 1, localCleaningRate, TimeUnit.MINUTES); + log.info("Cleaning local job directory every " + localCleaningRate + " minutes"); + } else { + log.info("Local job directory cleaner is disabled. "); + } + } + + static String getClusterJobDir() { + String ln = ph.getProperty("cluster.tmp.directory"); + if (null != ln ) { + ln = ln.trim(); + } + return ln; } + static String getLocalJobDir() { + String ln = ph.getProperty("local.tmp.directory"); + if (null != ln ) { + ln = ln.trim(); + } + return ln; + } }