<classpathentry kind="lib" path="WEB-INF/lib/drmaa.jar"/>\r
<classpathentry kind="lib" path="WEB-INF/lib/compbio-util-1.3.jar"/>\r
<classpathentry kind="lib" path="WEB-INF/lib/compbio-annotations-1.0.jar"/>\r
+ <classpathentry kind="lib" path="WEB-INF/lib/derby.jar"/>\r
<classpathentry kind="output" path="WEB-INF/classes"/>\r
</classpath>\r
TODO: \r
\r
+Current stat collector has to rely on file dates! \r
+\r
Test all WS as names from Executables were removed\r
\r
Add default names for input and output in every executable and use them consistently\r
+++ /dev/null
-package compbio.engine;\r
-\r
-import java.io.File;\r
-import java.io.FileFilter;\r
-import java.io.IOException;\r
-import java.util.Date;\r
-import java.util.HashMap;\r
-import java.util.Map;\r
-\r
-import org.apache.log4j.Level;\r
-import org.apache.log4j.Logger;\r
-\r
-import compbio.engine.client.ConfExecutable;\r
-import compbio.engine.conf.PropertyHelperManager;\r
-import compbio.metadata.JobStatus;\r
-import compbio.util.FileUtil;\r
-import compbio.util.PropertyHelper;\r
-\r
-/**\r
- * Number of runs of each WS = number of folders with name\r
- * \r
- * Number of successful runs = all runs with no result file\r
- * \r
- * Per period of time = limit per file creating time Runtime (avg/max) =\r
- * \r
- * started time - finished time Task & result size = result.size\r
- * \r
- * Abandoned runs - not collected runs\r
- * \r
- * Cancelled runs - cancelled\r
- * \r
- * Cluster vs local runs\r
- * \r
- * Reasons for failure = look in the err out?\r
- * \r
- * \r
- * Metadata required:\r
- * \r
- * work directory for local and cluster tasks = from Helper or cmd parameter. WS\r
- * names - enumeration. Status file names and content.\r
- * \r
- * @author pvtroshin\r
- * \r
- */\r
-public class ExecutionStatCollector {\r
-\r
- private static final Logger log = Logger\r
- .getLogger(ExecutionStatCollector.class);\r
-\r
- static PropertyHelper ph = PropertyHelperManager.getPropertyHelper();\r
-\r
- static String getClusterJobDir() {\r
- String clusterdir = ph.getProperty("cluster.tmp.directory");\r
- if (clusterdir != null) {\r
- clusterdir.trim();\r
- }\r
- return clusterdir;\r
- }\r
-\r
- static String getLocalJobDir() {\r
- String locdir = ph.getProperty("local.tmp.directory");\r
- if (locdir != null) {\r
- locdir.trim();\r
- }\r
- return locdir;\r
- }\r
-\r
- /**\r
- * \r
- * @param args\r
- */\r
- public static void main(String[] args) {\r
-\r
- String workDir = PropertyHelperManager.getLocalPath()\r
- + getLocalJobDir().trim();\r
- System.out.println(workDir);\r
- File[] files = FileUtil.getFiles("H:/www-jws2/job_dir/jobsout",\r
- directories);\r
- for (File file : files) {\r
- JobDirectory jd = new JobDirectory(file);\r
- System.out.println(jd.getJobReport());\r
- }\r
- }\r
-\r
- static FileFilter directories = new FileFilter() {\r
- @Override\r
- public boolean accept(File pathname) {\r
- return pathname.isDirectory();\r
- }\r
- };\r
-\r
- static class JobDirectory {\r
- static final int UNDEFINED = -1;\r
-\r
- File jobdir;\r
- Map<String, File> files = new HashMap<String, File>();\r
-\r
- public JobDirectory(File directory) {\r
- this.jobdir = directory;\r
- for (File f : jobdir.listFiles()) {\r
- files.put(f.getName(), f);\r
- }\r
- }\r
-\r
- public boolean hasStatus(JobStatus status) {\r
- return files.containsKey(status.toString());\r
- }\r
-\r
- long getStartTime() {\r
- long starttime = UNDEFINED;\r
- try {\r
- File startfile = files.get(JobStatus.STARTED.toString());\r
- if (startfile == null) {\r
- startfile = files.get(JobStatus.SUBMITTED.toString());\r
- }\r
- if (startfile != null) {\r
- String start = FileUtil.readFileToString(startfile);\r
- starttime = Long.parseLong(start.trim());\r
- }\r
- } catch (IOException e) {\r
- log.log(Level.WARN,\r
- "Cannot parse start time: " + e.getMessage(), e);\r
- } catch (NumberFormatException e) {\r
- log.log(Level.WARN,\r
- "Cannot parse start time: " + e.getMessage(), e);\r
- }\r
- return starttime;\r
- }\r
- long getFinishedTime() {\r
- long ftime = UNDEFINED;\r
- try {\r
- File finished = files.get(JobStatus.FINISHED.toString());\r
- if (finished != null) {\r
- String start = FileUtil.readFileToString(finished);\r
- ftime = Long.parseLong(start.trim());\r
- // System.out.println("f " + ftime);\r
- }\r
- } catch (IOException e) {\r
- log.log(Level.WARN,\r
- "Cannot parse finished time: " + e.getMessage(), e);\r
- } catch (NumberFormatException e) {\r
- log.log(Level.WARN,\r
- "Cannot parse finished time: " + e.getMessage(), e);\r
- }\r
- return ftime;\r
- }\r
-\r
- public int getRuntime() {\r
- return (int) (getFinishedTime() - getStartTime());\r
- }\r
-\r
- String getWSName() {\r
- String name = jobdir.getName().split("#")[0];\r
- if (name.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX)) {\r
- assert ConfExecutable.CLUSTER_TASK_ID_PREFIX.length() == 1;\r
- name = name.substring(1);\r
- }\r
- return name;\r
- }\r
-\r
- // Mafft, Muscle, Tcoffee, Clustal task:fasta.in result:fasta.out\r
- // Probcons task:fasta.in result:alignment.out\r
- /*\r
- * TODO replace with Universal names for WS!\r
- */\r
- long getResultSize() {\r
- String name = getWSName();\r
- File f = null;\r
- if (name.equalsIgnoreCase("Probcons")) {\r
- f = files.get("alignment.out");\r
- }\r
- f = files.get("fasta.out");\r
- if (f != null) {\r
- return f.length();\r
- }\r
- return UNDEFINED;\r
- }\r
-\r
- long getInputSize() {\r
- File input = files.get("fasta.in");\r
- if (input != null) {\r
- return input.length();\r
- }\r
- return UNDEFINED;\r
- }\r
-\r
- String getJobReport() {\r
- String report = "JOB: " + jobdir.getName() + "\n";\r
- if (getStartTime() != UNDEFINED) {\r
- report += "Started " + new Date(getStartTime()) + "\n";\r
- }\r
- if (getFinishedTime() != UNDEFINED) {\r
- report += "Finished " + new Date(getFinishedTime()) + "\n";\r
- }\r
- if (getStartTime() != UNDEFINED && getFinishedTime() != UNDEFINED) {\r
- report += "Runtime " + getRuntime() + "\n";\r
- }\r
- report += "Input size " + getInputSize() + "\n";\r
- report += "Result size " + getResultSize() + "\n";\r
- return report;\r
- }\r
- }\r
-}\r
--- /dev/null
+package compbio.ws.execstat;\r
+\r
+import java.io.File;\r
+import java.io.FileFilter;\r
+import java.io.FileWriter;\r
+import java.io.IOException;\r
+import java.text.SimpleDateFormat;\r
+import java.util.ArrayList;\r
+import java.util.HashMap;\r
+import java.util.List;\r
+import java.util.Map;\r
+\r
+import org.apache.log4j.Logger;\r
+\r
+import compbio.engine.client.ConfExecutable;\r
+import compbio.engine.conf.PropertyHelperManager;\r
+import compbio.metadata.JobStatus;\r
+import compbio.util.FileUtil;\r
+import compbio.util.PropertyHelper;\r
+import compbio.ws.client.Services;\r
+\r
+/**\r
+ * Number of runs of each WS = number of folders with name\r
+ * \r
+ * Number of successful runs = all runs with no result file\r
+ * \r
+ * Per period of time = limit per file creating time Runtime (avg/max) =\r
+ * \r
+ * started time - finished time\r
+ * \r
+ * Task & result size = result.size\r
+ * \r
+ * Abandoned runs - not collected runs\r
+ * \r
+ * Cancelled runs - cancelled\r
+ * \r
+ * Cluster vs local runs\r
+ * \r
+ * Reasons for failure = look in the err out?\r
+ * \r
+ * \r
+ * Metadata required:\r
+ * \r
+ * work directory for local and cluster tasks = from Helper or cmd parameter. WS\r
+ * names - enumeration. Status file names and content.\r
+ * \r
+ * @author pvtroshin\r
+ * \r
+ */\r
+public class ExecutionStatCollector {\r
+\r
+ static final int UNDEFINED = -1;\r
+\r
+ private static final Logger log = Logger\r
+ .getLogger(ExecutionStatCollector.class);\r
+\r
+ static SimpleDateFormat DF = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss");\r
+\r
+ static PropertyHelper ph = PropertyHelperManager.getPropertyHelper();\r
+\r
+ static String getClusterJobDir() {\r
+ String clusterdir = ph.getProperty("cluster.tmp.directory");\r
+ if (clusterdir != null) {\r
+ clusterdir.trim();\r
+ }\r
+ return clusterdir;\r
+ }\r
+\r
+ static void updateTime(File statFile) throws IOException {\r
+ long lastMod = statFile.lastModified();\r
+ FileWriter fw = new FileWriter(statFile);\r
+ fw.write(new Long(lastMod).toString());\r
+ fw.close();\r
+ }\r
+\r
+ static String getLocalJobDir() {\r
+ String locdir = ph.getProperty("local.tmp.directory");\r
+ if (locdir != null) {\r
+ locdir.trim();\r
+ }\r
+ return locdir;\r
+ }\r
+\r
+ /**\r
+ * \r
+ * @param args\r
+ * @throws IOException\r
+ */\r
+ public static void main(String[] args) throws IOException {\r
+\r
+ // updateTime(new File(\r
+ // "D:\\workspace\\JABA2\\jobsout\\AACon#170462904473672\\STARTED"));\r
+\r
+ String workDir = PropertyHelperManager.getLocalPath()\r
+ + getLocalJobDir().trim();\r
+ System.out.println(workDir);\r
+ File[] files = FileUtil.getFiles("H:/www-jws2/job_dir/jobsout",\r
+ directories);\r
+ List<StatProcessor.JobStat> stats = new ArrayList<StatProcessor.JobStat>();\r
+ for (File file : files) {\r
+ JobDirectory jd = new JobDirectory(file);\r
+ stats.add(jd.getJobStat());\r
+ // System.out.println(jd.getJobStat().getJobReportTabulated());\r
+ }\r
+ StatProcessor sp = new StatProcessor(stats);\r
+ System.out.println(sp.reportStat());\r
+ System.out.println();\r
+ System.out.println("!!!!!!!!!!!!!!!!!!");\r
+ System.out.println();\r
+ System.out.println(sp.getSingleWSStat(Services.TcoffeeWS).reportStat());\r
+ }\r
+\r
+ static FileFilter directories = new FileFilter() {\r
+ @Override\r
+ public boolean accept(File pathname) {\r
+ return pathname.isDirectory();\r
+ }\r
+ };\r
+\r
+ static class JobDirectory {\r
+\r
+ File jobdir;\r
+ Map<String, File> files = new HashMap<String, File>();\r
+\r
+ public JobDirectory(File directory) {\r
+ this.jobdir = directory;\r
+ for (File f : jobdir.listFiles()) {\r
+ files.put(f.getName(), f);\r
+ }\r
+ }\r
+\r
+ public boolean hasStatus(JobStatus status) {\r
+ return files.containsKey(status.toString());\r
+ }\r
+\r
+ boolean isCollected() {\r
+ return hasStatus(JobStatus.COLLECTED);\r
+ }\r
+\r
+ boolean isCancelled() {\r
+ return hasStatus(JobStatus.CANCELLED);\r
+ }\r
+\r
+ long getStartTime() {\r
+ long starttime = UNDEFINED;\r
+ File startfile = files.get(JobStatus.STARTED.toString());\r
+ if (startfile == null) {\r
+ startfile = files.get(JobStatus.SUBMITTED.toString());\r
+ }\r
+ if (startfile != null) {\r
+ starttime = startfile.lastModified();\r
+ /*\r
+ * String start = FileUtil.readFileToString(startfile);\r
+ * starttime = Long.parseLong(start.trim());\r
+ */\r
+ }\r
+ return starttime;\r
+ }\r
+\r
+ long getFinishedTime() {\r
+ long ftime = UNDEFINED;\r
+ File finished = files.get(JobStatus.FINISHED.toString());\r
+ if (finished != null) {\r
+ ftime = finished.lastModified();\r
+ /*\r
+ * String start = FileUtil.readFileToString(finished); ftime =\r
+ * Long.parseLong(start.trim());\r
+ */\r
+ // System.out.println("f " + ftime);\r
+ }\r
+ /*\r
+ * } catch (IOException e) { log.log(Level.WARN,\r
+ * "Cannot parse finished time: " + e.getMessage(), e); } catch\r
+ * (NumberFormatException e) { log.log(Level.WARN,\r
+ * "Cannot parse finished time: " + e.getMessage(), e); }\r
+ */\r
+ return ftime;\r
+ }\r
+\r
+ String getWSName() {\r
+ String name = jobdir.getName().split("#")[0];\r
+ if (name.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX)) {\r
+ assert ConfExecutable.CLUSTER_TASK_ID_PREFIX.length() == 1;\r
+ name = name.substring(1);\r
+ }\r
+ if (name.startsWith("ClustalW")) {\r
+ name = name.trim().substring(name.length() - 1);\r
+ }\r
+ return name;\r
+ }\r
+\r
+ Services getService() {\r
+ return Services.getService(getWSName() + "WS");\r
+ }\r
+ // Mafft, Muscle, Tcoffee, Clustal task:fasta.in result:fasta.out\r
+ // Probcons task:fasta.in result:alignment.out\r
+ /*\r
+ * TODO replace with Universal names for WS!\r
+ */\r
+ long getResultSize() {\r
+ String name = getWSName();\r
+ File f = null;\r
+ if (name.equalsIgnoreCase("Probcons")) {\r
+ f = files.get("alignment.out");\r
+ }\r
+ f = files.get("fasta.out");\r
+ if (f != null) {\r
+ return f.length();\r
+ }\r
+ return UNDEFINED;\r
+ }\r
+\r
+ long getInputSize() {\r
+ File input = files.get("fasta.in");\r
+ if (input != null) {\r
+ return input.length();\r
+ }\r
+ return UNDEFINED;\r
+ }\r
+\r
+ StatProcessor.JobStat getJobStat() {\r
+ return new StatProcessor.JobStat(getService(), jobdir.getName(),\r
+ getStartTime(), getFinishedTime(), getInputSize(),\r
+ getResultSize(), isCollected(), isCancelled());\r
+ }\r
+\r
+ @Override\r
+ public int hashCode() {\r
+ final int prime = 31;\r
+ int result = 1;\r
+ result = prime * result\r
+ + ((jobdir == null) ? 0 : jobdir.hashCode());\r
+ return result;\r
+ }\r
+\r
+ @Override\r
+ public boolean equals(Object obj) {\r
+ if (this == obj)\r
+ return true;\r
+ if (obj == null)\r
+ return false;\r
+ if (getClass() != obj.getClass())\r
+ return false;\r
+ JobDirectory other = (JobDirectory) obj;\r
+ if (jobdir == null) {\r
+ if (other.jobdir != null)\r
+ return false;\r
+ } else if (!jobdir.equals(other.jobdir))\r
+ return false;\r
+ return true;\r
+ }\r
+\r
+ }\r
+}\r
--- /dev/null
+package compbio.ws.execstat;\r
+\r
+import java.util.ArrayList;\r
+import java.util.Collections;\r
+import java.util.Comparator;\r
+import java.util.Date;\r
+import java.util.List;\r
+\r
+import compbio.ws.client.Services;\r
+\r
+public class StatProcessor {\r
+\r
+ List<JobStat> stats;\r
+\r
+ StatProcessor(List<JobStat> stats) {\r
+ this.stats = stats;\r
+ }\r
+\r
+ List<JobStat> getAbandonedJobs() {\r
+ List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (!js.isCollected) {\r
+ abJobs.add(js);\r
+ }\r
+ }\r
+ return abJobs;\r
+ }\r
+\r
+ List<JobStat> getCancelledJobs() {\r
+ List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (js.isCancelled) {\r
+ abJobs.add(js);\r
+ }\r
+ }\r
+ return abJobs;\r
+ }\r
+\r
+ List<JobStat> sortByRuntime() {\r
+ List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>(stats);\r
+ Collections.sort(abJobs, JobStat.RUNTIME);\r
+ return abJobs;\r
+ }\r
+\r
+ List<JobStat> sortByStartTime() {\r
+ List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>(stats);\r
+ Collections.sort(abJobs, JobStat.STARTTIME);\r
+ return abJobs;\r
+ }\r
+\r
+ List<JobStat> sortByResultSize() {\r
+ List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>(stats);\r
+ Collections.sort(abJobs, JobStat.RESULTSIZE);\r
+ return abJobs;\r
+ }\r
+\r
+ int getJobNumber() {\r
+ return stats.size();\r
+ }\r
+\r
+ public StatProcessor getSingleWSStat(Services webService) {\r
+ List<JobStat> wsStat = new ArrayList<StatProcessor.JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (js.webService == webService) {\r
+ wsStat.add(js);\r
+ }\r
+ }\r
+ return new StatProcessor(wsStat);\r
+ }\r
+\r
+ long getTotalRuntime() {\r
+ long counter = 0;\r
+ for (JobStat js : stats) {\r
+ int jobtime = js.getRuntime();\r
+ if (jobtime != ExecutionStatCollector.UNDEFINED) {\r
+ counter += jobtime;\r
+ }\r
+ }\r
+ return counter;\r
+ }\r
+\r
+ List<JobStat> getUnsuccessfulJobs() {\r
+ List<JobStat> aJobs = new ArrayList<StatProcessor.JobStat>();\r
+ for (JobStat js : stats) {\r
+ int jobtime = js.getRuntime();\r
+ if (js.resultSize == ExecutionStatCollector.UNDEFINED) {\r
+ aJobs.add(js);\r
+ }\r
+ }\r
+ return aJobs;\r
+ }\r
+\r
+ public String reportStat() {\r
+ String report = "Total Jobs: " + getJobNumber() + "\n";\r
+ report += "Abandoned Jobs: " + getAbandonedJobs().size() + "\n";\r
+ report += "Cancelled Jobs: " + getCancelledJobs().size() + "\n";\r
+ report += "Total Runtime (s): " + getTotalRuntime() + "\n";\r
+ report += "Unsuccessful Jobs: " + getUnsuccessfulJobs().size() + "\n";\r
+ report += "10 longest jobs: \n\n" + sortByRuntime().subList(0, 9)\r
+ + "\n";\r
+ report += "10 biggest jobs: \n\n" + sortByResultSize().subList(0, 9)\r
+ + "\n";\r
+ return report;\r
+ }\r
+\r
+ static class JobStat {\r
+\r
+ static final Comparator<JobStat> RUNTIME = new Comparator<JobStat>() {\r
+ @Override\r
+ public int compare(JobStat o1, JobStat o2) {\r
+ return new Integer(o2.getRuntime()).compareTo(o1.getRuntime());\r
+ }\r
+ };\r
+\r
+ static final Comparator<JobStat> STARTTIME = new Comparator<JobStat>() {\r
+ @Override\r
+ public int compare(JobStat o1, JobStat o2) {\r
+ return new Long(o1.start).compareTo(o2.start);\r
+ }\r
+ };\r
+\r
+ static final Comparator<JobStat> RESULTSIZE = new Comparator<JobStat>() {\r
+ @Override\r
+ public int compare(JobStat o1, JobStat o2) {\r
+ return new Long(o2.resultSize).compareTo(o1.resultSize);\r
+ }\r
+ };\r
+\r
+ Services webService;\r
+ String jobname;\r
+ long start;\r
+ long finish;\r
+ long inputSize;\r
+ long resultSize;\r
+ boolean isCollected;\r
+ boolean isCancelled;\r
+\r
+ JobStat(Services webService, String jobname, long start, long finish,\r
+ long inputSize, long resultSize, boolean isCollected,\r
+ boolean isCancelled) {\r
+ super();\r
+ this.webService = webService;\r
+ this.jobname = jobname;\r
+ this.start = start;\r
+ this.finish = finish;\r
+ this.inputSize = inputSize;\r
+ this.resultSize = resultSize;\r
+ this.isCollected = isCollected;\r
+ this.isCancelled = isCancelled;\r
+ }\r
+\r
+ @Override\r
+ public int hashCode() {\r
+ final int prime = 31;\r
+ int result = 1;\r
+ result = prime * result + (int) (finish ^ (finish >>> 32));\r
+ result = prime * result + (int) (inputSize ^ (inputSize >>> 32));\r
+ result = prime * result + (isCancelled ? 1231 : 1237);\r
+ result = prime * result + (isCollected ? 1231 : 1237);\r
+ result = prime * result\r
+ + ((jobname == null) ? 0 : jobname.hashCode());\r
+ result = prime * result + (int) (resultSize ^ (resultSize >>> 32));\r
+ result = prime * result + (int) (start ^ (start >>> 32));\r
+ return result;\r
+ }\r
+\r
+ @Override\r
+ public boolean equals(Object obj) {\r
+ if (this == obj)\r
+ return true;\r
+ if (obj == null)\r
+ return false;\r
+ if (getClass() != obj.getClass())\r
+ return false;\r
+ JobStat other = (JobStat) obj;\r
+ if (finish != other.finish)\r
+ return false;\r
+ if (inputSize != other.inputSize)\r
+ return false;\r
+ if (isCancelled != other.isCancelled)\r
+ return false;\r
+ if (isCollected != other.isCollected)\r
+ return false;\r
+ if (jobname == null) {\r
+ if (other.jobname != null)\r
+ return false;\r
+ } else if (!jobname.equals(other.jobname))\r
+ return false;\r
+ if (resultSize != other.resultSize)\r
+ return false;\r
+ if (start != other.start)\r
+ return false;\r
+ return true;\r
+ }\r
+\r
+ public int getRuntime() {\r
+ if (start != ExecutionStatCollector.UNDEFINED\r
+ && finish != ExecutionStatCollector.UNDEFINED) {\r
+ return (int) (finish - start) / 1000;\r
+ }\r
+ return ExecutionStatCollector.UNDEFINED;\r
+ }\r
+\r
+ @Override\r
+ public String toString() {\r
+ return getJobReport();\r
+ }\r
+\r
+ String getJobReport() {\r
+ String report = "WS: " + webService + "\n";\r
+ report += "JOB: " + jobname + "\n";\r
+ if (start != ExecutionStatCollector.UNDEFINED) {\r
+ report += "Started " + new Date(start) + "\n";\r
+ }\r
+ if (finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += "Finished " + new Date(finish) + "\n";\r
+ }\r
+ if (start != ExecutionStatCollector.UNDEFINED\r
+ && finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += "Runtime " + getRuntime() + "\n";\r
+ }\r
+ report += "Input size " + inputSize + "\n";\r
+ report += "Result size " + resultSize + "\n";\r
+ report += "Collected? " + isCollected + "\n";\r
+ return report;\r
+ }\r
+\r
+ /**\r
+ * Header Job Started Finished Runtime Input Result\r
+ */\r
+ String getJobReportTabulated() {\r
+ String report = webService + "\t";\r
+ report += jobname + "\t";\r
+ if (start != ExecutionStatCollector.UNDEFINED) {\r
+ report += ExecutionStatCollector.DF.format(new Date(start))\r
+ + "\t";\r
+ } else {\r
+ report += ExecutionStatCollector.UNDEFINED + "\t";\r
+ }\r
+ if (finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += ExecutionStatCollector.DF.format(new Date(finish))\r
+ + "\t";\r
+ } else {\r
+ report += ExecutionStatCollector.UNDEFINED + "\t";\r
+ }\r
+ if (start != ExecutionStatCollector.UNDEFINED\r
+ && finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += getRuntime() + "\t";\r
+ } else {\r
+ report += ExecutionStatCollector.UNDEFINED + "\t";\r
+ }\r
+ report += inputSize + "\t";\r
+ report += resultSize + "\t";\r
+ report += isCollected + "\t";\r
+ return report;\r
+ }\r
+\r
+ }\r
+\r
+}\r
--- /dev/null
+package compbio.ws.execstat;\r
+\r
+import java.sql.Connection;\r
+import java.sql.DriverManager;\r
+import java.sql.PreparedStatement;\r
+import java.sql.SQLException;\r
+import java.sql.Statement;\r
+import java.sql.Timestamp;\r
+\r
+import compbio.ws.execstat.StatProcessor.JobStat;\r
+\r
+public class StatWriter {\r
+\r
+ /* the default framework is embedded */\r
+ // private final String framework = "embedded";\r
+ private static final String driver = "org.apache.derby.jdbc.EmbeddedDriver";\r
+ private static final String protocol = "jdbc:derby:";\r
+ private static final String statDBName = "ExecutionStatistic";\r
+\r
+ static Connection getDBConnection() throws SQLException {\r
+ // TODO\r
+ System.setProperty("derby.system.home", ".");\r
+\r
+ Connection conn = DriverManager.getConnection(protocol + statDBName\r
+ + ";create=true");\r
+\r
+ // We want to control transactions manually. Autocommit is on by\r
+ // default in JDBC.\r
+ conn.setAutoCommit(true);\r
+ return conn;\r
+ }\r
+\r
+ // ServiceName,jobname,start,finish,inputSize,resultSize,isCancelled,isCollected\r
+ static void createStatTable() throws SQLException {\r
+ Connection conn = getDBConnection();\r
+ /*\r
+ * Creating a statement object that we can use for running various SQL\r
+ * statements commands against the database.\r
+ */\r
+ Statement s = conn.createStatement();\r
+ String create = "create table exec_stat("\r
+ + "number INT GENERATED ALWAYS AS IDENTITY,"\r
+ + "service_name VARCHAR(15) NOT NULL, "\r
+ + "job_id VARCHAR(35) NOT NULL PRIMARY KEY, "\r
+ + "start TIMESTAMP," + "finish TIMESTAMP,"\r
+ + "inputsize BIGINT," + "resultsize BIGINT,"\r
+ + "isCancelled SMALLINT NOT NULL,"\r
+ + "isCollected SMALLINT NOT NULL)";\r
+ // We create a table...\r
+ System.out.println(create);\r
+ s.execute(create);\r
+ s.close();\r
+ conn.close();\r
+ }\r
+\r
+ static void insertData(JobStat jobstatus) throws SQLException {\r
+ Connection conn = getDBConnection();\r
+ String insert = "insert into exec_stat (service_name, job_id, start, finish, "\r
+ + "inputsize, resultsize, isCancelled, isCollected) "\r
+ + "VALUES (?, ?, ?, ?, ?, ?, ?, ? )";\r
+ PreparedStatement pstm = conn.prepareStatement(insert);\r
+\r
+ pstm.setString(1, "webservice");\r
+ pstm.setString(2, "@Clustal#980273495452357");\r
+ pstm.setTimestamp(3, new Timestamp(190385934834l));\r
+ pstm.setTimestamp(4, new Timestamp(190332423423l));\r
+ pstm.setLong(5, 1232);\r
+ pstm.setLong(6, 1432422);\r
+ pstm.setShort(7, (short) 1);\r
+ pstm.setShort(8, (short) 0);\r
+ pstm.executeUpdate();\r
+ pstm.close();\r
+ conn.close();\r
+ }\r
+\r
+ void shutdownDBServer() {\r
+ // ## DATABASE SHUTDOWN SECTION ##\r
+ /***\r
+ * In embedded mode, an application should shut down Derby. Shutdown\r
+ * throws the XJ015 exception to confirm success.\r
+ ***/\r
+ boolean gotSQLExc = false;\r
+ try {\r
+ DriverManager.getConnection("jdbc:derby:;shutdown=true");\r
+ } catch (SQLException se) {\r
+ if (se.getSQLState().equals("XJ015")) {\r
+ gotSQLExc = true;\r
+ }\r
+ }\r
+ if (!gotSQLExc) {\r
+ System.out.println("Database did not shut down normally");\r
+ } else {\r
+ System.out.println("Database shut down normally");\r
+ }\r
+ }\r
+ public static void main(String[] args) throws SQLException {\r
+ // createStatTable();\r
+ insertData(null);\r
+ }\r
+}\r