From: pvtroshin Date: Fri, 27 May 2011 18:11:08 +0000 (+0000) Subject: Further work on statistics display - improvements to stat collector X-Git-Url: http://source.jalview.org/gitweb/?a=commitdiff_plain;h=243b5d0278c06196854f7312d4c2d933deb89061;p=jabaws.git Further work on statistics display - improvements to stat collector git-svn-id: link to svn.lifesci.dundee.ac.uk/svn/barton/ptroshin/JABA2@4184 e3abac25-378b-4346-85de-24260fe3988d --- diff --git a/webservices/compbio/stat/collector/ExecutionStatCollector.java b/webservices/compbio/stat/collector/ExecutionStatCollector.java index cef65f9..140de7c 100644 --- a/webservices/compbio/stat/collector/ExecutionStatCollector.java +++ b/webservices/compbio/stat/collector/ExecutionStatCollector.java @@ -2,11 +2,11 @@ package compbio.stat.collector; import java.io.File; import java.io.FileFilter; -import java.io.FileWriter; import java.io.IOException; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -17,11 +17,9 @@ import org.apache.log4j.Logger; import compbio.engine.client.ConfExecutable; import compbio.engine.client.Executable; -import compbio.engine.conf.PropertyHelperManager; import compbio.metadata.JobStatus; import compbio.runner.msa.ClustalW; import compbio.util.FileUtil; -import compbio.util.PropertyHelper; import compbio.ws.client.Services; /** @@ -52,7 +50,7 @@ import compbio.ws.client.Services; * @author pvtroshin * */ -public class ExecutionStatCollector { +public class ExecutionStatCollector implements Runnable { static final int UNDEFINED = -1; @@ -61,20 +59,53 @@ public class ExecutionStatCollector { static SimpleDateFormat DF = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss"); - static PropertyHelper ph = PropertyHelperManager.getPropertyHelper(); - final private List stats; + /** + * Consider the job that has been working for longer than timeOutInHours + * completed, whatever the outcome + */ + final private int timeOutInHours; - public ExecutionStatCollector(String workDirectory) { + /** + * List subdirectories in the job directory + * + * @param workDirectory + * @param timeOutInHours + */ + public ExecutionStatCollector(String workDirectory, int timeOutInHours) { + log.info("Starting stat collector for directory: " + workDirectory); + log.info("Maximum allowed runtime(h): " + timeOutInHours); File[] files = FileUtil.getFiles(workDirectory, directories); stats = new ArrayList(); + assert timeOutInHours > 0; + this.timeOutInHours = timeOutInHours; for (File file : files) { JobDirectory jd = new JobDirectory(file); - stats.add(jd.getJobStat()); + JobStat jstat = jd.getJobStat(); + // Do not record stats on the job that has not completed yet + if (hasCompleted(jd)) { + stats.add(jstat); + } else { + log.debug("Skipping the job: " + jstat); + log.debug("As it has not completed yet"); + } // System.out.println(jd.getJobStat().getJobReportTabulated()); } } + boolean hasCompleted(JobDirectory jd) { + JobStat jstat = jd.getJobStat(); + if (jstat.hasResult() || jstat.getIsCancelled() + || jstat.getIsFinished() || hasTimedOut(jd)) { + return true; + } + return false; + } + + boolean hasTimedOut(JobDirectory jd) { + return ((System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60)) > timeOutInHours; + } + public StatProcessor getStats() { return new StatProcessor(stats); } @@ -82,33 +113,18 @@ public class ExecutionStatCollector { public void writeStatToDB() throws SQLException { Set rjobs = new HashSet(stats); StatDB statdb = new StatDB(); + log.debug("Removing records that has already been recorded"); + statdb.removeRecordedJobs(rjobs); + log.debug("New records left: " + rjobs.size()); statdb.insertData(rjobs); - statdb.conn.close(); - } - - static String getClusterJobDir() { - String clusterdir = ph.getProperty("cluster.tmp.directory"); - if (clusterdir != null) { - clusterdir.trim(); - } - return clusterdir; - } - - static void updateTime(File statFile) throws IOException { - long lastMod = statFile.lastModified(); - FileWriter fw = new FileWriter(statFile); - fw.write(new Long(lastMod).toString()); - fw.close(); } - static String getLocalJobDir() { - String locdir = ph.getProperty("local.tmp.directory"); - if (locdir != null) { - locdir.trim(); - } - return locdir; - } + /* + * static void updateTime(File statFile) throws IOException { long lastMod = + * statFile.lastModified(); FileWriter fw = new FileWriter(statFile); + * fw.write(new Long(lastMod).toString()); fw.close(); } + */ /** * @@ -121,9 +137,6 @@ public class ExecutionStatCollector { // updateTime(new File( // "D:\\workspace\\JABA2\\jobsout\\AACon#170462904473672\\STARTED")); - String workDir = PropertyHelperManager.getLocalPath() - + getLocalJobDir().trim(); - System.out.println(workDir); File[] files = FileUtil.getFiles("Y:\\fc\\www-jws2\\jaba\\jobsout", directories); List stats = new ArrayList(); @@ -147,7 +160,8 @@ public class ExecutionStatCollector { static FileFilter directories = new FileFilter() { @Override public boolean accept(File pathname) { - return pathname.isDirectory(); + return pathname.isDirectory() + && !pathname.getName().startsWith("."); } }; @@ -310,4 +324,20 @@ public class ExecutionStatCollector { } } + + @Override + public void run() { + log.info("Started updating statistics at " + new Date()); + + StatProcessor local_stats = getStats(); + log.info("Found " + local_stats.getJobNumber() + " jobs!"); + try { + writeStatToDB(); + } catch (SQLException e) { + log.error("Fails to update jobs statistics database!"); + log.error(e.getLocalizedMessage(), e); + } + log.info("Finished updating statistics at " + new Date()); + } + } diff --git a/webservices/compbio/stat/collector/StatDB.java b/webservices/compbio/stat/collector/StatDB.java index 55bf71a..0f851ce 100644 --- a/webservices/compbio/stat/collector/StatDB.java +++ b/webservices/compbio/stat/collector/StatDB.java @@ -270,6 +270,18 @@ public class StatDB { return stats; } + + /** + * Removes the job if + * + * 1) It has already been recorded + * + * 2) It has not completed and did not timeout - this is to prevent + * recording the information on the incomplete jobs. + * + * @param fsJobs + * @throws SQLException + */ public void removeRecordedJobs(Set fsJobs) throws SQLException { String query = "select job_id from exec_stat"; @@ -288,7 +300,7 @@ public class StatDB { st.close(); } - private static synchronized final void shutdownDBServer() { + public static synchronized final void shutdownDBServer() { // ## DATABASE SHUTDOWN SECTION ## /*** * In embedded mode, an application should shut down Derby. Shutdown diff --git a/webservices/compbio/stat/servlet/StatisticCollector.java b/webservices/compbio/stat/servlet/StatisticCollector.java index 3ababa3..0ffe77d 100644 --- a/webservices/compbio/stat/servlet/StatisticCollector.java +++ b/webservices/compbio/stat/servlet/StatisticCollector.java @@ -1,20 +1,131 @@ package compbio.stat.servlet; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; +import org.apache.log4j.Logger; + +import compbio.engine.conf.PropertyHelperManager; +import compbio.stat.collector.ExecutionStatCollector; +import compbio.stat.collector.StatDB; +import compbio.util.PropertyHelper; + public class StatisticCollector implements ServletContextListener { + static PropertyHelper ph = PropertyHelperManager.getPropertyHelper(); + + private final Logger log = Logger.getLogger(StatisticCollector.class); + + private ScheduledFuture localcf; + private ScheduledFuture clustercf; + private ScheduledExecutorService executor; + @Override public void contextDestroyed(ServletContextEvent arg0) { - // TODO Auto-generated method stub - + try { + if (localcf != null) { + localcf.cancel(true); + } + if (clustercf != null) { + clustercf.cancel(true); + } + executor.shutdown(); + executor.awaitTermination(3, TimeUnit.SECONDS); + } catch (InterruptedException e) { + log.warn(e.getMessage(), e); + } finally { + StatDB.shutdownDBServer(); + executor.shutdownNow(); + } } @Override public void contextInitialized(ServletContextEvent arg0) { - // TODO Auto-generated method stub + String clusterWorkDir = getClusterJobDir(); + int clusterMaxRuntime = getClusterJobTimeOut(); + + int localMaxRuntime = getLocalJobTimeOut(); + String localWorkDir = getLocalJobDir(); + + log.info("Initializing statistics collector"); + executor = Executors.newScheduledThreadPool(1); + + if (collectClusterStats()) { + ExecutionStatCollector clusterCollector = new ExecutionStatCollector( + clusterWorkDir, clusterMaxRuntime); + clustercf = executor.scheduleAtFixedRate(clusterCollector, 60, + 24 * 60, TimeUnit.MINUTES); + log.info("Collecting cluster statistics "); + } + if (collectLocalStats()) { + ExecutionStatCollector localCollector = new ExecutionStatCollector( + localWorkDir, localMaxRuntime); + localcf = executor.scheduleAtFixedRate(localCollector, 10, 24 * 60, + TimeUnit.MINUTES); + log.info("Collecting local statistics "); + } + + } + + static String getClusterJobDir() { + return getStringProperty(ph.getProperty("cluster.tmp.directory")); + } + + static int getClusterJobTimeOut() { + int maxRunTime = 24 * 7; + String clusterMaxRuntime = ph.getProperty("cluster.stat.maxruntime"); + if (clusterMaxRuntime != null) { + clusterMaxRuntime = clusterMaxRuntime.trim(); + maxRunTime = Integer.parseInt(clusterMaxRuntime); + } + return maxRunTime; + } + + static int getLocalJobTimeOut() { + int maxRunTime = 24; + String localMaxRuntime = ph.getProperty("local.stat.maxruntime"); + if (localMaxRuntime != null) { + localMaxRuntime = localMaxRuntime.trim(); + maxRunTime = Integer.parseInt(localMaxRuntime); + } + + return maxRunTime; + } + + static String getLocalJobDir() { + return getStringProperty(ph.getProperty("local.tmp.directory")); + } + + private static String getStringProperty(String propName) { + String locdir = ph.getProperty(propName); + if (locdir != null) { + locdir = locdir.trim(); + } + return locdir; + } + + static boolean collectClusterStats() { + return getBooleanProperty(ph + .getProperty("cluster.stat.collector.enable")); + + } + + static boolean collectLocalStats() { + return getBooleanProperty(ph.getProperty("local.stat.collector.enable")); + } + private static boolean getBooleanProperty(String propName) { + boolean enabled = false; + if (propName != null) { + propName = propName.trim(); + enabled = Boolean.parseBoolean(propName); + } + return enabled; } }