-package compbio.ws.execstat;\r
+package compbio.stat.collector;\r
\r
import java.io.File;\r
import java.io.FileFilter;\r
import java.util.HashSet;\r
import java.util.List;\r
import java.util.Map;\r
+import java.util.Set;\r
\r
import org.apache.log4j.Logger;\r
\r
import compbio.engine.client.ConfExecutable;\r
+import compbio.engine.client.Executable;\r
import compbio.engine.conf.PropertyHelperManager;\r
import compbio.metadata.JobStatus;\r
+import compbio.runner.msa.ClustalW;\r
import compbio.util.FileUtil;\r
import compbio.util.PropertyHelper;\r
import compbio.ws.client.Services;\r
\r
static PropertyHelper ph = PropertyHelperManager.getPropertyHelper();\r
\r
+ final private List<JobStat> stats;\r
+\r
+ public ExecutionStatCollector(String workDirectory) {\r
+ File[] files = FileUtil.getFiles(workDirectory, directories);\r
+ stats = new ArrayList<JobStat>();\r
+ for (File file : files) {\r
+ JobDirectory jd = new JobDirectory(file);\r
+ stats.add(jd.getJobStat());\r
+ // System.out.println(jd.getJobStat().getJobReportTabulated());\r
+ }\r
+ }\r
+\r
+ public StatProcessor getStats() {\r
+ return new StatProcessor(stats);\r
+ }\r
+\r
+ public void writeStatToDB() throws SQLException {\r
+ Set<JobStat> rjobs = new HashSet<JobStat>(stats);\r
+ StatDB statdb = new StatDB();\r
+ statdb.removeRecordedJobs(rjobs);\r
+ statdb.insertData(rjobs);\r
+ statdb.conn.close();\r
+ }\r
+\r
static String getClusterJobDir() {\r
String clusterdir = ph.getProperty("cluster.tmp.directory");\r
if (clusterdir != null) {\r
String workDir = PropertyHelperManager.getLocalPath()\r
+ getLocalJobDir().trim();\r
System.out.println(workDir);\r
- File[] files = FileUtil.getFiles("H:/www-jws2/job_dir/local_jobsout",\r
+ File[] files = FileUtil.getFiles("Y:\\fc\\www-jws2\\jaba\\jobsout",\r
directories);\r
- List<StatProcessor.JobStat> stats = new ArrayList<StatProcessor.JobStat>();\r
+ List<JobStat> stats = new ArrayList<JobStat>();\r
for (File file : files) {\r
JobDirectory jd = new JobDirectory(file);\r
stats.add(jd.getJobStat());\r
System.out.println();\r
System.out.println("!!!!!!!!!!!!!!!!!!");\r
System.out.println();\r
- System.out.println(sp.getSingleWSStat(Services.TcoffeeWS).reportStat());\r
\r
- StatDB.insertData(new HashSet<StatProcessor.JobStat>(sp\r
- .getSingleWSStat(Services.TcoffeeWS).stats));\r
+ Set<JobStat> rjobs = new HashSet<JobStat>(sp.stats);\r
+ StatDB statdb = new StatDB();\r
+ statdb.removeRecordedJobs(rjobs);\r
+ statdb.insertData(rjobs);\r
}\r
\r
static FileFilter directories = new FileFilter() {\r
return ftime;\r
}\r
\r
- String getWSName() {\r
+ @SuppressWarnings("unchecked")\r
+ Class<Executable<?>> getWSRunnerName() {\r
String name = jobdir.getName().split("#")[0];\r
- if (name.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX)) {\r
- assert ConfExecutable.CLUSTER_TASK_ID_PREFIX.length() == 1;\r
- name = name.substring(1);\r
- }\r
- if (name.startsWith("ClustalW")) {\r
- name = name.trim().substring(name.length() - 1);\r
+ try {\r
+ if (name.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX)) {\r
+ assert ConfExecutable.CLUSTER_TASK_ID_PREFIX.length() == 1;\r
+ name = name.substring(1);\r
+ }\r
+ name = ClustalW.class.getPackage().getName() + "." + name;\r
+ return (Class<Executable<?>>) Class.forName(name);\r
+ } catch (ClassNotFoundException e) {\r
+ e.printStackTrace();\r
+ throw new RuntimeException(\r
+ "Cannot match the directory name to the executable! Executable name is "\r
+ + name);\r
}\r
- return name;\r
}\r
\r
- Services getService() {\r
- return Services.getService(getWSName() + "WS");\r
+ private Services getService() {\r
+ return Services.getService(getWSRunnerName());\r
}\r
+\r
// Mafft, Muscle, Tcoffee, Clustal task:fasta.in result:fasta.out\r
// Probcons task:fasta.in result:alignment.out\r
/*\r
* TODO replace with Universal names for WS!\r
*/\r
long getResultSize() {\r
- String name = getWSName();\r
+ Class<Executable<?>> name = getWSRunnerName();\r
File f = null;\r
- if (name.equalsIgnoreCase("Probcons")) {\r
+ if (name.getSimpleName().equalsIgnoreCase("Probcons")) {\r
f = files.get("alignment.out");\r
+ } else if (name.getSimpleName().equalsIgnoreCase("ClustalW")) {\r
+ f = files.get("output.txt");\r
+ } else {\r
+ f = files.get("fasta.out");\r
}\r
- f = files.get("fasta.out");\r
if (f != null) {\r
return f.length();\r
}\r
return UNDEFINED;\r
}\r
\r
- StatProcessor.JobStat getJobStat() {\r
- return new StatProcessor.JobStat(getService(), getClusterJobID(),\r
+ JobStat getJobStat() {\r
+ return JobStat.newInstance(getService(), getClusterJobID(),\r
jobdir.getName(), getStartTime(), getFinishedTime(),\r
- getInputSize(), getResultSize(), isCollected(),\r
- isCancelled());\r
+ getInputSize(), getResultSize(), isCancelled(),\r
+ isCollected());\r
}\r
\r
@Override\r
--- /dev/null
+package compbio.stat.collector;\r
+\r
+import java.sql.Timestamp;\r
+import java.text.SimpleDateFormat;\r
+import java.util.Comparator;\r
+import java.util.Date;\r
+\r
+import compbio.engine.client.ConfExecutable;\r
+import compbio.util.Util;\r
+import compbio.ws.client.Services;\r
+\r
+public class JobStat {\r
+\r
+ static final Comparator<JobStat> RUNTIME = new Comparator<JobStat>() {\r
+ @Override\r
+ public int compare(JobStat o1, JobStat o2) {\r
+ return new Integer(o2.getRuntime()).compareTo(o1.getRuntime());\r
+ }\r
+ };\r
+\r
+ static final Comparator<JobStat> STARTTIME = new Comparator<JobStat>() {\r
+ @Override\r
+ public int compare(JobStat o1, JobStat o2) {\r
+ return new Long(o1.start).compareTo(o2.start);\r
+ }\r
+ };\r
+\r
+ static final Comparator<JobStat> RESULTSIZE = new Comparator<JobStat>() {\r
+ @Override\r
+ public int compare(JobStat o1, JobStat o2) {\r
+ return new Long(o2.resultSize).compareTo(o1.resultSize);\r
+ }\r
+ };\r
+\r
+ Services webService;\r
+ String clusterJobId;\r
+ String jobname;\r
+ long start;\r
+ long finish;\r
+ long inputSize;\r
+ long resultSize;\r
+ boolean isCollected;\r
+ boolean isCancelled;\r
+\r
+ private JobStat(Services webService, String clusterJobId, String jobname,\r
+ long start, long finish, long inputSize, long resultSize,\r
+ boolean isCancelled, boolean isCollected) {\r
+ super();\r
+ this.webService = webService;\r
+ this.clusterJobId = clusterJobId;\r
+ this.jobname = jobname;\r
+ this.start = start;\r
+ this.finish = finish;\r
+ this.inputSize = inputSize;\r
+ this.resultSize = resultSize;\r
+ this.isCancelled = isCancelled;\r
+ this.isCollected = isCollected;\r
+ validate();\r
+ }\r
+\r
+ static JobStat newInstance(Services webService, String clusterJobId,\r
+ String jobname, long start, long finish, long inputSize,\r
+ long resultSize, boolean isCancelled, boolean isCollected) {\r
+ return new JobStat(webService, clusterJobId, jobname, start, finish,\r
+ inputSize, resultSize, isCancelled, isCollected);\r
+ }\r
+\r
+ static JobStat newInstance(Services webService, String clusterJobId,\r
+ String jobname, Timestamp start, Timestamp finish, long inputSize,\r
+ long resultSize, boolean isCancelled, boolean isCollected) {\r
+ long startm = ExecutionStatCollector.UNDEFINED;\r
+ long stopm = ExecutionStatCollector.UNDEFINED;\r
+ if (start != null) {\r
+ startm = start.getTime();\r
+ }\r
+ if (finish != null) {\r
+ stopm = finish.getTime();\r
+ }\r
+ return new JobStat(webService, clusterJobId, jobname, startm, stopm,\r
+ inputSize, resultSize, isCancelled, isCollected);\r
+ }\r
+\r
+ void validate() {\r
+ if (webService == null) {\r
+ throw new AssertionError("webService must be defined!:\n " + this);\r
+ }\r
+ if (Util.isEmpty(jobname)) {\r
+ throw new AssertionError("jobname must be defined!:\n" + this);\r
+ }\r
+ }\r
+\r
+ private JobStat(String jobId) {\r
+ assert !Util.isEmpty(jobname);\r
+ this.jobname = jobId;\r
+ }\r
+\r
+ static JobStat newIncompleteStat(String jobname) {\r
+ return new JobStat(jobname);\r
+ }\r
+\r
+ public boolean isClusterJob() {\r
+ return jobname.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX);\r
+ }\r
+\r
+ @Override\r
+ public int hashCode() {\r
+ final int prime = 31;\r
+ int result = 1;\r
+ result = prime * result + ((jobname == null) ? 0 : jobname.hashCode());\r
+ return result;\r
+ }\r
+\r
+ @Override\r
+ public boolean equals(Object obj) {\r
+ if (this == obj)\r
+ return true;\r
+ if (obj == null)\r
+ return false;\r
+ if (getClass() != obj.getClass())\r
+ return false;\r
+ JobStat other = (JobStat) obj;\r
+ if (jobname == null) {\r
+ if (other.jobname != null)\r
+ return false;\r
+ } else if (!jobname.equals(other.jobname))\r
+ return false;\r
+ return true;\r
+ }\r
+\r
+ public int getRuntime() {\r
+ if (start != ExecutionStatCollector.UNDEFINED\r
+ && finish != ExecutionStatCollector.UNDEFINED) {\r
+ return (int) (finish - start) / 1000;\r
+ }\r
+ return ExecutionStatCollector.UNDEFINED;\r
+ }\r
+\r
+ @Override\r
+ public String toString() {\r
+ return getJobReport();\r
+ }\r
+\r
+ String getJobReport() {\r
+ String report = "WS: " + webService + "\n";\r
+ report += "JOB: " + jobname + "\n";\r
+ if (start != ExecutionStatCollector.UNDEFINED) {\r
+ report += "Started " + new Date(start) + "\n";\r
+ }\r
+ if (finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += "Finished " + new Date(finish) + "\n";\r
+ }\r
+ if (start != ExecutionStatCollector.UNDEFINED\r
+ && finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += "Runtime " + getRuntime() + "\n";\r
+ }\r
+ report += "Input size " + inputSize + "\n";\r
+ report += "Result size " + resultSize + "\n";\r
+ report += "ClusterJobID " + clusterJobId + "\n";\r
+ report += "Collected? " + isCollected + "\n";\r
+ report += "Cancelled? " + isCancelled + "\n";\r
+ return report;\r
+ }\r
+\r
+ /**\r
+ * Header Job Started Finished Runtime Input Result\r
+ */\r
+ String getJobReportTabulated() {\r
+ String report = webService + "\t";\r
+ report += jobname + "\t";\r
+ if (start != ExecutionStatCollector.UNDEFINED) {\r
+ report += ExecutionStatCollector.DF.format(new Date(start)) + "\t";\r
+ } else {\r
+ report += ExecutionStatCollector.UNDEFINED + "\t";\r
+ }\r
+ if (finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += ExecutionStatCollector.DF.format(new Date(finish)) + "\t";\r
+ } else {\r
+ report += ExecutionStatCollector.UNDEFINED + "\t";\r
+ }\r
+ if (start != ExecutionStatCollector.UNDEFINED\r
+ && finish != ExecutionStatCollector.UNDEFINED) {\r
+ report += getRuntime() + "\t";\r
+ } else {\r
+ report += ExecutionStatCollector.UNDEFINED + "\t";\r
+ }\r
+ report += inputSize + "\t";\r
+ report += resultSize + "\t";\r
+ report += clusterJobId + "\t";\r
+ report += isCollected + "\t";\r
+ report += isCancelled + "\t";\r
+ return report;\r
+ }\r
+\r
+ public Services getWebService() {\r
+ return webService;\r
+ }\r
+\r
+ public String getClusterJobId() {\r
+ return clusterJobId;\r
+ }\r
+\r
+ public String getJobname() {\r
+ return jobname;\r
+ }\r
+\r
+ public String getEscJobname() {\r
+ String[] parts = jobname.split("#");\r
+ return parts[0] + "%23" + parts[1];\r
+ }\r
+\r
+ public String getStart() {\r
+ if (start != ExecutionStatCollector.UNDEFINED) {\r
+ return SimpleDateFormat.getDateTimeInstance().format(\r
+ new Date(start));\r
+ }\r
+ return "?";\r
+ }\r
+\r
+ public String getFinish() {\r
+ if (finish != ExecutionStatCollector.UNDEFINED) {\r
+ return SimpleDateFormat.getDateTimeInstance().format(\r
+ new Date(finish));\r
+ }\r
+ return "?";\r
+ }\r
+\r
+ public long getInputSize() {\r
+ if (inputSize != ExecutionStatCollector.UNDEFINED) {\r
+ return inputSize / 1000;\r
+ }\r
+ return 0;\r
+ }\r
+\r
+ public long getResultSize() {\r
+ if (resultSize != ExecutionStatCollector.UNDEFINED) {\r
+ return resultSize / 1000;\r
+ }\r
+ return 0;\r
+ }\r
+\r
+ public boolean hasResult() {\r
+ return resultSize != ExecutionStatCollector.UNDEFINED;\r
+ }\r
+\r
+ public boolean hasStarted() {\r
+ return start != ExecutionStatCollector.UNDEFINED;\r
+ }\r
+\r
+ public boolean getIsCollected() {\r
+ return isCollected;\r
+ }\r
+\r
+ public boolean getIsCancelled() {\r
+ return isCancelled;\r
+ }\r
+\r
+ public boolean getIsFinished() {\r
+ return finish != ExecutionStatCollector.UNDEFINED;\r
+ }\r
+\r
+}
\ No newline at end of file
-package compbio.ws.execstat;\r
+package compbio.stat.collector;\r
\r
import java.sql.Connection;\r
import java.sql.DriverManager;\r
import java.sql.Statement;\r
import java.sql.Timestamp;\r
import java.util.ArrayList;\r
-import java.util.Date;\r
import java.util.List;\r
import java.util.Set;\r
\r
+import org.apache.log4j.Logger;\r
+\r
+import compbio.engine.conf.PropertyHelperManager;\r
+import compbio.util.Util;\r
import compbio.ws.client.Services;\r
-import compbio.ws.execstat.StatProcessor.JobStat;\r
\r
+/**\r
+ * The database must be stored in the application root directory and called\r
+ * "ExecutionStatistic"\r
+ * \r
+ * @author pvtroshin\r
+ * \r
+ */\r
public class StatDB {\r
\r
- /* the default framework is embedded */\r
- // private final String framework = "embedded";\r
private static final String driver = "org.apache.derby.jdbc.EmbeddedDriver";\r
private static final String protocol = "jdbc:derby:";\r
private static final String statDBName = "ExecutionStatistic";\r
\r
+ private static final Logger log = Logger.getLogger(StatDB.class);\r
+\r
+ Connection conn;\r
private static Connection getDBConnection() throws SQLException {\r
- // TODO\r
- System.setProperty("derby.system.home", ".");\r
+ String dbpath = PropertyHelperManager.getLocalPath();\r
+ log.info("Looking for JABAWS access statistics database at: " + dbpath);\r
+ System.setProperty("derby.system.home", dbpath);\r
Connection conn = DriverManager.getConnection(protocol + statDBName\r
- + ";create=true");\r
+ + ";create=false");\r
\r
- // We want to control transactions manually. Autocommit is on by\r
- // default in JDBC.\r
- // conn.setAutoCommit(false);\r
+ conn.setAutoCommit(true);\r
+ return conn;\r
+ }\r
+\r
+ public StatDB() throws SQLException {\r
+ this.conn = getDBConnection();\r
+ }\r
+\r
+ /**\r
+ * Connect to test database\r
+ * \r
+ * @param ignored\r
+ * @throws SQLException\r
+ */\r
+ StatDB(boolean ignored) throws SQLException {\r
+ this.conn = getTestDBConnection();\r
+ }\r
+\r
+ private static Connection getTestDBConnection() throws SQLException {\r
+ System.setProperty("derby.system.home", "testsrc/testdata");\r
+ Connection conn = DriverManager.getConnection(protocol + statDBName\r
+ + ";create=false");\r
+ conn.setAutoCommit(true);\r
return conn;\r
}\r
\r
* \r
* @throws SQLException\r
*/\r
- private static void createStatTable() throws SQLException {\r
- Connection conn = getDBConnection();\r
+ private void createStatTable() throws SQLException {\r
+\r
/*\r
* Creating a statement object that we can use for running various SQL\r
* statements commands against the database.\r
conn.close();\r
}\r
\r
- static void insertData(Set<JobStat> jobstatus) throws SQLException {\r
+ void insertData(Set<JobStat> jobstatus) throws SQLException {\r
System.out.println("Inserting " + jobstatus.size());\r
- Connection conn = getDBConnection();\r
+\r
conn.setAutoCommit(false);\r
String insert = "insert into exec_stat (service_name, cluster_job_id, job_id, start, finish, "\r
+ "inputsize, resultsize, isCancelled, isCollected, isClusterJob) "\r
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";\r
PreparedStatement pstm = conn.prepareStatement(insert);\r
for (JobStat js : jobstatus) {\r
+ // Has to present\r
pstm.setString(1, js.webService.toString());\r
- pstm.setString(2, js.clusterJobId);\r
+\r
+ if (!Util.isEmpty(js.clusterJobId)) {\r
+ pstm.setString(2, js.clusterJobId);\r
+ } else {\r
+ pstm.setString(2, null);\r
+ }\r
+ // Has to present\r
pstm.setString(3, js.jobname);\r
- pstm.setTimestamp(4, new Timestamp(js.start));\r
- pstm.setTimestamp(5, new Timestamp(js.finish));\r
+\r
+ if (js.start != ExecutionStatCollector.UNDEFINED) {\r
+ pstm.setTimestamp(4, new Timestamp(js.start));\r
+ } else {\r
+ pstm.setTimestamp(4, null);\r
+ }\r
+ if (js.finish != ExecutionStatCollector.UNDEFINED) {\r
+ pstm.setTimestamp(5, new Timestamp(js.finish));\r
+ } else {\r
+ pstm.setTimestamp(5, null);\r
+ }\r
+ // -1 if UNDEFINED\r
pstm.setLong(6, js.inputSize);\r
+ // -1 if UNDEFINED\r
pstm.setLong(7, js.resultSize);\r
+\r
pstm.setBoolean(8, js.isCancelled);\r
pstm.setBoolean(9, js.isCollected);\r
pstm.setBoolean(10, js.isClusterJob());\r
}\r
conn.commit();\r
pstm.close();\r
- conn.close();\r
}\r
\r
- static List<JobStat> readData(Timestamp from, Timestamp to,\r
+ public List<JobStat> readData(Timestamp from, Timestamp to,\r
Services wservice, Boolean clusterOnly) throws SQLException {\r
- Connection conn = getDBConnection();\r
+\r
String query = "select service_name, cluster_job_id, job_id, start, finish, inputsize, "\r
+ "resultsize, isCancelled, isCollected from exec_stat where start BETWEEN ? and ? ";\r
\r
pstm.setString(3, wservice.toString());\r
}\r
pstm.execute();\r
- List<JobStat> stats = new ArrayList<StatProcessor.JobStat>();\r
+ List<JobStat> stats = new ArrayList<JobStat>();\r
ResultSet rs = pstm.getResultSet();\r
while (rs.next()) {\r
- stats.add(new JobStat(Services.getService(rs.getString(1)), rs\r
- .getString(2), rs.getString(3), rs.getTimestamp(4)\r
- .getTime(), rs.getTimestamp(5).getTime(), rs.getLong(6), rs\r
- .getLong(7), rs.getBoolean(8), rs.getBoolean(9)));\r
+ stats.add(JobStat.newInstance(Services.getService(rs.getString(1)),\r
+ rs.getString(2), rs.getString(3), rs.getTimestamp(4),\r
+ rs.getTimestamp(5), rs.getLong(6), rs.getLong(7),\r
+ rs.getBoolean(8), rs.getBoolean(9)));\r
}\r
rs.close();\r
pstm.close();\r
- conn.close();\r
+\r
return stats;\r
}\r
+ public void removeRecordedJobs(Set<JobStat> fsJobs) throws SQLException {\r
\r
- static void removeRecordedJobs(Set<String> fsJobs) throws SQLException {\r
- Connection conn = getDBConnection();\r
String query = "select job_id from exec_stat";\r
\r
Statement st = conn.createStatement();\r
\r
while (result.next()) {\r
String recordedJob = result.getString(1);\r
- if (fsJobs.contains(recordedJob)) {\r
- fsJobs.remove(recordedJob);\r
+ JobStat recStat = JobStat.newIncompleteStat(recordedJob);\r
+ if (fsJobs.contains(recStat)) {\r
+ fsJobs.remove(recStat);\r
}\r
}\r
result.close();\r
- conn.close();\r
}\r
\r
- void shutdownDBServer() {\r
+ public void shutdownDBServer() {\r
// ## DATABASE SHUTDOWN SECTION ##\r
/***\r
* In embedded mode, an application should shut down Derby. Shutdown\r
* throws the XJ015 exception to confirm success.\r
***/\r
+ try {\r
+ if (conn != null) {\r
+ conn.close();\r
+ }\r
+ } catch (SQLException e) {\r
+ System.err.println("Database commit failed with "\r
+ + e.getLocalizedMessage());\r
+ }\r
boolean gotSQLExc = false;\r
try {\r
DriverManager.getConnection("jdbc:derby:;shutdown=true");\r
}\r
}\r
if (!gotSQLExc) {\r
- System.out.println("Database did not shut down normally");\r
+ System.err.println("Database did not shut down normally");\r
} else {\r
System.out.println("Database shut down normally");\r
}\r
}\r
public static void main(String[] args) throws SQLException {\r
- // createStatTable();\r
+ // new StatDB().createStatTable();\r
// insertData(null);\r
-\r
- Date from = new Date();\r
- from.setMonth(1);\r
- System.out.println(new StatProcessor(readData(\r
- new Timestamp(from.getTime()),\r
- new Timestamp(new Date().getTime()), null, null)).reportStat());\r
-\r
+ /*\r
+ * StatDB statdb = new StatDB(); Date from = new Date();\r
+ * from.setMonth(1); System.out.println(new\r
+ * StatProcessor(statdb.readData( new Timestamp(from.getTime()), new\r
+ * Timestamp(new Date().getTime()), null, null)).reportStat());\r
+ */\r
}\r
}\r
--- /dev/null
+package compbio.stat.collector;\r
+\r
+import java.sql.SQLException;\r
+import java.sql.Timestamp;\r
+import java.util.Calendar;\r
+import java.util.Date;\r
+import java.util.GregorianCalendar;\r
+import java.util.Iterator;\r
+import java.util.Map;\r
+import java.util.TreeMap;\r
+\r
+import compbio.ws.client.Services;\r
+\r
+public class StatManager {\r
+\r
+ static class DateRoller implements Iterator<Date> {\r
+ final Date initDate;\r
+ final Calendar calendar;\r
+\r
+ public DateRoller(Date date) {\r
+ this.initDate = date;\r
+ calendar = GregorianCalendar.getInstance();\r
+ calendar.setTime(date);\r
+ calendar.add(Calendar.MONTH, -12);\r
+ }\r
+\r
+ Date getCurrentDate() {\r
+ return initDate;\r
+ }\r
+\r
+ @Override\r
+ public boolean hasNext() {\r
+ return !calendar.getTime().equals(initDate);\r
+ }\r
+\r
+ @Override\r
+ public Date next() {\r
+ calendar.add(Calendar.MONTH, 1);\r
+ return calendar.getTime();\r
+ }\r
+\r
+ @Override\r
+ public void remove() {\r
+ throw new UnsupportedOperationException();\r
+ }\r
+\r
+ }\r
+\r
+ void getStats() throws SQLException {\r
+ Calendar startTime = Calendar.getInstance();\r
+ startTime.roll(Calendar.YEAR, false);\r
+ Timestamp startDate = new Timestamp(startTime.getTimeInMillis());\r
+ Timestamp stopDate = new Timestamp(new Date().getTime());\r
+ StatDB statdb = null;\r
+\r
+ statdb = new StatDB();\r
+\r
+ // Total\r
+ Map<Services, StatProcessor> stats = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ stats.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, null)));\r
+ }\r
+\r
+ // Cluster\r
+ Map<Services, StatProcessor> statsCluster = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ statsCluster.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, true)));\r
+ }\r
+ // Local\r
+ Map<Services, StatProcessor> statsLocal = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ statsLocal.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, false)));\r
+ }\r
+\r
+ }\r
+}\r
--- /dev/null
+package compbio.stat.collector;\r
+\r
+import java.util.ArrayList;\r
+import java.util.Collections;\r
+import java.util.List;\r
+\r
+import compbio.ws.client.Services;\r
+\r
+public class StatProcessor {\r
+\r
+ List<JobStat> stats;\r
+\r
+ public StatProcessor(List<JobStat> stats) {\r
+ this.stats = stats;\r
+ }\r
+\r
+ public List<JobStat> getClusterJobs() {\r
+ return getJobSubset(true);\r
+ }\r
+\r
+ public List<JobStat> getLocalJobs() {\r
+ return getJobSubset(false);\r
+ }\r
+\r
+ private List<JobStat> getJobSubset(boolean cluster) {\r
+ List<JobStat> clusterjobs = new ArrayList<JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (cluster) {\r
+ if (js.isClusterJob()) {\r
+ clusterjobs.add(js);\r
+ }\r
+ } else {\r
+ if (!js.isClusterJob()) {\r
+ clusterjobs.add(js);\r
+ }\r
+ }\r
+ }\r
+ return clusterjobs;\r
+\r
+ }\r
+\r
+ /*\r
+ * TODO List<JobStat> getNewStat() throws SQLException { Set<String> jobids\r
+ * = new HashSet<String>(); for(JobStat js: stats) { jobids.add(js.jobname);\r
+ * } StatDB.removeRecordedJobs(jobids); List<String> newjobs = new\r
+ * HashSet<String>(); for(String jobid: jobids) { if(newjobs.co)\r
+ * jobids.add(js.jobname); } }\r
+ */\r
+\r
+ /**\r
+ * Not collected. Excludes all cancelled jobs, and jobs with no results as\r
+ * these are reported separately.\r
+ */\r
+ public List<JobStat> getAbandonedJobs() {\r
+ List<JobStat> abJobs = new ArrayList<JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (!js.isCollected && !js.isCancelled && js.hasResult()) {\r
+ abJobs.add(js);\r
+ }\r
+ }\r
+ return abJobs;\r
+ }\r
+\r
+ /**\r
+ * Started & finished but did not produce result\r
+ * \r
+ * @return\r
+ */\r
+ public List<JobStat> getFailedJobs() {\r
+ List<JobStat> failedJobs = new ArrayList<JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (js.hasStarted() && js.getIsFinished() && !js.hasResult()) {\r
+ failedJobs.add(js);\r
+ }\r
+ }\r
+ return failedJobs;\r
+ }\r
+\r
+ public List<JobStat> getCancelledJobs() {\r
+ List<JobStat> abJobs = new ArrayList<JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (js.isCancelled) {\r
+ abJobs.add(js);\r
+ }\r
+ }\r
+ return abJobs;\r
+ }\r
+\r
+ public List<JobStat> sortByRuntime() {\r
+ List<JobStat> abJobs = new ArrayList<JobStat>(stats);\r
+ Collections.sort(abJobs, JobStat.RUNTIME);\r
+ return abJobs;\r
+ }\r
+\r
+ public List<JobStat> sortByStartTime() {\r
+ List<JobStat> abJobs = new ArrayList<JobStat>(stats);\r
+ Collections.sort(abJobs, JobStat.STARTTIME);\r
+ return abJobs;\r
+ }\r
+\r
+ public List<JobStat> sortByResultSize() {\r
+ List<JobStat> abJobs = new ArrayList<JobStat>(stats);\r
+ Collections.sort(abJobs, JobStat.RESULTSIZE);\r
+ return abJobs;\r
+ }\r
+\r
+ public int getJobNumber() {\r
+ return stats.size();\r
+ }\r
+\r
+ public List<JobStat> getJobs() {\r
+ return stats;\r
+ }\r
+\r
+ public StatProcessor getSingleWSStat(Services webService) {\r
+ List<JobStat> wsStat = new ArrayList<JobStat>();\r
+ for (JobStat js : stats) {\r
+ if (js.webService == webService) {\r
+ wsStat.add(js);\r
+ }\r
+ }\r
+ return new StatProcessor(wsStat);\r
+ }\r
+\r
+ public long getTotalRuntime() {\r
+ long counter = 0;\r
+ for (JobStat js : stats) {\r
+ int jobtime = js.getRuntime();\r
+ if (jobtime != ExecutionStatCollector.UNDEFINED) {\r
+ counter += jobtime;\r
+ }\r
+ }\r
+ return counter;\r
+ }\r
+\r
+ public List<JobStat> getIncompleteJobs() {\r
+ List<JobStat> aJobs = new ArrayList<JobStat>();\r
+ for (JobStat js : stats) {\r
+ int jobtime = js.getRuntime();\r
+ if (!js.hasResult()) {\r
+ aJobs.add(js);\r
+ }\r
+ }\r
+ return aJobs;\r
+ }\r
+\r
+ public String reportStat() {\r
+ String report = "Total Jobs: " + getJobNumber() + "\n";\r
+ report += "Abandoned Jobs: " + getAbandonedJobs().size() + "\n";\r
+ report += "Cancelled Jobs: " + getCancelledJobs().size() + "\n";\r
+ report += "Total Runtime (s): " + getTotalRuntime() + "\n";\r
+ report += "Unsuccessful Jobs: " + getIncompleteJobs().size() + "\n";\r
+ if (sortByRuntime().size() > 10) {\r
+ report += "10 longest jobs: \n\n" + sortByRuntime().subList(0, 9)\r
+ + "\n";\r
+ } else {\r
+ report += "longest jobs: \n\n" + sortByRuntime() + "\n";\r
+ }\r
+ if (sortByResultSize().size() > 10)\r
+ report += "10 biggest jobs: \n\n"\r
+ + sortByResultSize().subList(0, 9) + "\n";\r
+ else {\r
+ report += "biggest jobs: \n\n" + sortByResultSize() + "\n";\r
+ }\r
+ return report;\r
+ }\r
+\r
+}\r
--- /dev/null
+package compbio.stat.servlet;\r
+\r
+import java.io.IOException;\r
+import java.sql.SQLException;\r
+import java.sql.Timestamp;\r
+import java.util.Calendar;\r
+import java.util.Date;\r
+import java.util.Map;\r
+import java.util.TreeMap;\r
+\r
+import javax.servlet.RequestDispatcher;\r
+import javax.servlet.ServletException;\r
+import javax.servlet.http.HttpServlet;\r
+import javax.servlet.http.HttpServletRequest;\r
+import javax.servlet.http.HttpServletResponse;\r
+\r
+import compbio.stat.collector.StatDB;\r
+import compbio.stat.collector.StatProcessor;\r
+import compbio.ws.client.Services;\r
+\r
+public class DisplayStat extends HttpServlet {\r
+\r
+ @Override\r
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp)\r
+ throws ServletException, IOException {\r
+ // TODO\r
+ Calendar startTime = Calendar.getInstance();\r
+ startTime.roll(Calendar.YEAR, false);\r
+ Timestamp startDate = new Timestamp(startTime.getTimeInMillis());\r
+ Timestamp stopDate = new Timestamp(new Date().getTime());\r
+ StatDB statdb = null;\r
+ try {\r
+ statdb = new StatDB();\r
+\r
+ Map<Services, StatProcessor> stats = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ stats.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, null)));\r
+ }\r
+\r
+ Map<Services, StatProcessor> statsCluster = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ statsCluster.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, true)));\r
+ }\r
+\r
+ Map<Services, StatProcessor> statsLocal = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ statsLocal.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, false)));\r
+ }\r
+ req.setAttribute("stat", stats);\r
+ req.setAttribute("statTotal", Totals.sumStats(stats));\r
+\r
+ req.setAttribute("statCluster", statsCluster);\r
+ req.setAttribute("statLocal", statsLocal);\r
+ req.setAttribute("startDate", startDate.getTime());\r
+ req.setAttribute("stopDate", stopDate.getTime());\r
+\r
+ RequestDispatcher dispatcher = req\r
+ .getRequestDispatcher("statpages/Statistics.jsp");\r
+ dispatcher.forward(req, resp);\r
+\r
+ } catch (SQLException e) {\r
+ // TODO Auto-generated catch block\r
+ e.printStackTrace();\r
+ }\r
+\r
+ }\r
+\r
+}\r
--- /dev/null
+package compbio.stat.servlet;\r
+\r
+import java.io.IOException;\r
+import java.sql.SQLException;\r
+import java.sql.Timestamp;\r
+\r
+import javax.servlet.RequestDispatcher;\r
+import javax.servlet.ServletException;\r
+import javax.servlet.http.HttpServlet;\r
+import javax.servlet.http.HttpServletRequest;\r
+import javax.servlet.http.HttpServletResponse;\r
+import javax.servlet.http.HttpSession;\r
+\r
+import compbio.engine.conf.PropertyHelperManager;\r
+import compbio.stat.collector.StatDB;\r
+import compbio.stat.collector.StatProcessor;\r
+import compbio.util.PropertyHelper;\r
+import compbio.util.Util;\r
+import compbio.ws.client.Services;\r
+\r
+public class Joblist extends HttpServlet {\r
+\r
+ static final String JT_FAILED = "failed";\r
+ static final String JT_ABANDONED = "abandoned";\r
+ static final String JT_CANCELLED = "cancelled";\r
+ static final String JT_ALL = "all";\r
+ static final String JT_INCOMPLETE = "incomplete";\r
+ /**\r
+ * Input:\r
+ * \r
+ * ws=${ws.key}\r
+ * \r
+ * where=everywhere cluster local\r
+ * \r
+ * type=cancelled all incomplete\r
+ * \r
+ * from=${startDate}\r
+ * \r
+ * to=${stopDate}\r
+ * \r
+ */\r
+ @Override\r
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp)\r
+ throws ServletException, IOException {\r
+ /*\r
+ * Values for this servlet are not user supplied, so do not bother with\r
+ * nice error messages just throw the exception is something is wrong!\r
+ */\r
+ String wsname = req.getParameter("ws");\r
+ Services wservice = Services.getService(wsname);\r
+ if (wservice == null) {\r
+ throw new ServletException(\r
+ "Webservice name 'ws' is not specified or is incorrect. Given value:"\r
+ + wsname);\r
+ }\r
+ String executor = req.getParameter("where");\r
+ if (Util.isEmpty(executor)) {\r
+ throw new ServletException("'Where' is not specified!");\r
+ }\r
+ if (!(executor.equalsIgnoreCase("everywhere")\r
+ || executor.equalsIgnoreCase("local") || executor\r
+ .equalsIgnoreCase("cluster"))) {\r
+ throw new ServletException("Invalid 'where' value '" + executor\r
+ + "' can be one of 'everywhere', 'local', 'cluster'!");\r
+ }\r
+ Boolean where = null;\r
+ if (executor.equalsIgnoreCase("local")) {\r
+ where = false;\r
+ } else if (executor.equalsIgnoreCase("cluster")) {\r
+ where = true;\r
+ }\r
+\r
+ String jobtype = req.getParameter("type");\r
+ if (Util.isEmpty(executor)) {\r
+ throw new ServletException("'type' is not specified!");\r
+ }\r
+ if (!(jobtype.equalsIgnoreCase(JT_CANCELLED)\r
+ || jobtype.equalsIgnoreCase(JT_ALL)\r
+ || jobtype.equalsIgnoreCase(JT_INCOMPLETE)\r
+ || jobtype.equalsIgnoreCase(JT_ABANDONED) || jobtype\r
+ .equalsIgnoreCase(JT_FAILED))) {\r
+ throw new ServletException("Invalid 'jobtype' value '" + jobtype\r
+ + "' can be one of 'cancelled', 'all', 'incomplete', "\r
+ + "'failed', 'abandoned'!");\r
+ }\r
+ String fromDate = req.getParameter("from");\r
+ if (Util.isEmpty(fromDate)) {\r
+ throw new ServletException("'fromDate' is not specified!");\r
+ }\r
+ String toDate = req.getParameter("to");\r
+ if (Util.isEmpty(toDate)) {\r
+ throw new ServletException("'toDate' is not specified!");\r
+ }\r
+\r
+ PropertyHelper helper = PropertyHelperManager.getPropertyHelper();\r
+ String clusterTempDir = helper.getProperty("cluster.tmp.directory")\r
+ .trim();\r
+ String localTempDir = helper.getProperty("local.tmp.directory").trim();\r
+ // TODO include the time slice\r
+ Timestamp startDate = new Timestamp(Long.parseLong(fromDate));\r
+ Timestamp stopDate = new Timestamp(Long.parseLong(toDate));\r
+ StatDB statdb = null;\r
+ try {\r
+ statdb = new StatDB();\r
+ StatProcessor stat = new StatProcessor(statdb.readData(startDate,\r
+ stopDate, wservice, where));\r
+\r
+ HttpSession session = req.getSession();\r
+ if (jobtype.equalsIgnoreCase(JT_CANCELLED)) {\r
+ session.setAttribute("stat",\r
+ new StatProcessor(stat.getCancelledJobs()));\r
+ } else if (jobtype.equalsIgnoreCase(JT_INCOMPLETE)) {\r
+ session.setAttribute("stat",\r
+ new StatProcessor(stat.getIncompleteJobs()));\r
+ } else if (jobtype.equalsIgnoreCase(JT_ALL)) {\r
+ session.setAttribute("stat", stat);\r
+ } else if (jobtype.equalsIgnoreCase(JT_FAILED)) {\r
+ session.setAttribute("stat",\r
+ new StatProcessor(stat.getFailedJobs()));\r
+ } else if (jobtype.equalsIgnoreCase(JT_ABANDONED)) {\r
+ session.setAttribute("stat",\r
+ new StatProcessor(stat.getAbandonedJobs()));\r
+ } else {\r
+ throw new AssertionError("Unrecognised job type: " + jobtype);\r
+ }\r
+ session.setAttribute("clusterTemp", clusterTempDir);\r
+ session.setAttribute("localTemp", localTempDir);\r
+ req.setAttribute("startDate", startDate.getTime());\r
+ req.setAttribute("stopDate", stopDate.getTime());\r
+\r
+ RequestDispatcher dispatcher = req\r
+ .getRequestDispatcher("statpages/Joblist.jsp");\r
+ dispatcher.forward(req, resp);\r
+\r
+ } catch (SQLException e) {\r
+ e.printStackTrace();\r
+ throw new ServletException("SQLException : "\r
+ + e.getLocalizedMessage());\r
+ }\r
+\r
+ }\r
+}\r
--- /dev/null
+package compbio.stat.servlet;\r
+\r
+import javax.servlet.ServletContextEvent;\r
+import javax.servlet.ServletContextListener;\r
+\r
+public class StatisticCollector implements ServletContextListener {\r
+\r
+ @Override\r
+ public void contextDestroyed(ServletContextEvent arg0) {\r
+ // TODO Auto-generated method stub\r
+\r
+ }\r
+\r
+ @Override\r
+ public void contextInitialized(ServletContextEvent arg0) {\r
+ // TODO Auto-generated method stub\r
+\r
+ }\r
+\r
+}\r
--- /dev/null
+package compbio.stat.servlet;\r
+\r
+import java.util.Map;\r
+\r
+import compbio.stat.collector.StatProcessor;\r
+import compbio.ws.client.Services;\r
+\r
+public class Totals {\r
+ int total;\r
+ int incomplete;\r
+ int abandoned;\r
+ int cancelled;\r
+ int failed;\r
+\r
+ public int getTotal() {\r
+ return total;\r
+ }\r
+\r
+ public int getIncomplete() {\r
+ return incomplete;\r
+ }\r
+\r
+ public int getAbandoned() {\r
+ return abandoned;\r
+ }\r
+\r
+ public int getCancelled() {\r
+ return cancelled;\r
+ }\r
+\r
+ public int getFailed() {\r
+ return failed;\r
+ }\r
+\r
+ static Totals sumStats(Map<Services, StatProcessor> stat) {\r
+ Totals total = new Totals();\r
+ for (Map.Entry<Services, StatProcessor> serv : stat.entrySet()) {\r
+ total.total += serv.getValue().getJobNumber();\r
+ total.incomplete += serv.getValue().getIncompleteJobs().size();\r
+ total.abandoned += serv.getValue().getAbandonedJobs().size();\r
+ total.cancelled += serv.getValue().getCancelledJobs().size();\r
+ total.failed += serv.getValue().getFailedJobs().size();\r
+ }\r
+ return total;\r
+ }\r
+}
\ No newline at end of file
--- /dev/null
+package compbio.stat.servlet;\r
+\r
+import java.io.IOException;\r
+import java.sql.SQLException;\r
+import java.sql.Timestamp;\r
+import java.util.Calendar;\r
+import java.util.Date;\r
+import java.util.Map;\r
+import java.util.TreeMap;\r
+\r
+import javax.servlet.RequestDispatcher;\r
+import javax.servlet.ServletException;\r
+import javax.servlet.http.HttpServlet;\r
+import javax.servlet.http.HttpServletRequest;\r
+import javax.servlet.http.HttpServletResponse;\r
+\r
+import compbio.stat.collector.StatDB;\r
+import compbio.stat.collector.StatProcessor;\r
+import compbio.ws.client.Services;\r
+\r
+public class YearStat extends HttpServlet {\r
+\r
+ @Override\r
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp)\r
+ throws ServletException, IOException {\r
+ // TODO\r
+ Calendar startTime = Calendar.getInstance();\r
+ startTime.roll(Calendar.YEAR, false);\r
+ Timestamp startDate = new Timestamp(startTime.getTimeInMillis());\r
+ Timestamp stopDate = new Timestamp(new Date().getTime());\r
+ StatDB statdb = null;\r
+ try {\r
+ statdb = new StatDB();\r
+\r
+ Map<Services, StatProcessor> stats = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ stats.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, null)));\r
+ }\r
+\r
+ Map<Services, StatProcessor> statsCluster = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ statsCluster.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, true)));\r
+ }\r
+\r
+ Map<Services, StatProcessor> statsLocal = new TreeMap<Services, StatProcessor>();\r
+ for (Services service : Services.values()) {\r
+ statsLocal.put(\r
+ service,\r
+ new StatProcessor(statdb.readData(startDate, stopDate,\r
+ service, false)));\r
+ }\r
+ req.setAttribute("stat", stats);\r
+ req.setAttribute("statTotal", Totals.sumStats(stats));\r
+\r
+ req.setAttribute("statCluster", statsCluster);\r
+ req.setAttribute("statLocal", statsLocal);\r
+ req.setAttribute("startDate", startDate.getTime());\r
+ req.setAttribute("stopDate", stopDate.getTime());\r
+\r
+ RequestDispatcher dispatcher = req\r
+ .getRequestDispatcher("statpages/Statistics.jsp");\r
+ dispatcher.forward(req, resp);\r
+\r
+ } catch (SQLException e) {\r
+ // TODO Auto-generated catch block\r
+ e.printStackTrace();\r
+ }\r
+\r
+ }\r
+\r
+}\r
import compbio.data.msa.JABAService;\r
import compbio.data.msa.MsaWS;\r
import compbio.data.msa.SequenceAnnotation;\r
+import compbio.engine.client.Executable;\r
\r
/**\r
* List of web services currently supported by JABAWS version 2\r
\r
public static Services getService(String servName) {\r
servName = servName.trim().toLowerCase();\r
- if (servName.equalsIgnoreCase(MafftWS.toString())) {\r
- return MafftWS;\r
+ for (Services service : Services.values()) {\r
+ if (service.toString().equalsIgnoreCase(servName)) {\r
+ return service;\r
+ }\r
}\r
- if (servName.equalsIgnoreCase(ClustalWS.toString())) {\r
- return ClustalWS;\r
- }\r
- if (servName.equalsIgnoreCase(TcoffeeWS.toString())) {\r
- return TcoffeeWS;\r
- }\r
- if (servName.equalsIgnoreCase(MuscleWS.toString())) {\r
- return MuscleWS;\r
- }\r
- if (servName.equalsIgnoreCase(ProbconsWS.toString())) {\r
- return ProbconsWS;\r
- }\r
- if (servName.equalsIgnoreCase(AAConWS.toString())) {\r
- return AAConWS;\r
- }\r
- if (servName.equalsIgnoreCase(JronnWS.toString())) {\r
- return JronnWS;\r
- }\r
- if (servName.equalsIgnoreCase(DisemblWS.toString())) {\r
- return DisemblWS;\r
- }\r
- if (servName.equalsIgnoreCase(GlobPlotWS.toString())) {\r
- return GlobPlotWS;\r
+ return null;\r
+ }\r
+\r
+ public static Services getService(Class<Executable<?>> runnerClassName) {\r
+ assert runnerClassName != null;\r
+ String sname = runnerClassName.getSimpleName().toLowerCase();\r
+ for (Services service : Services.values()) {\r
+ if (service.toString().toLowerCase().contains(sname)) {\r
+ return service;\r
+ }\r
}\r
return null;\r
}\r
+++ /dev/null
-package compbio.ws.execstat;\r
-\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.Comparator;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import compbio.engine.client.ConfExecutable;\r
-import compbio.ws.client.Services;\r
-\r
-public class StatProcessor {\r
-\r
- List<JobStat> stats;\r
-\r
- StatProcessor(List<JobStat> stats) {\r
- this.stats = stats;\r
- }\r
-\r
- /*\r
- * TODO List<JobStat> getNewStat() throws SQLException { Set<String> jobids\r
- * = new HashSet<String>(); for(JobStat js: stats) { jobids.add(js.jobname);\r
- * } StatDB.removeRecordedJobs(jobids); List<String> newjobs = new\r
- * HashSet<String>(); for(String jobid: jobids) { if(newjobs.co)\r
- * jobids.add(js.jobname); } }\r
- */\r
-\r
- List<JobStat> getAbandonedJobs() {\r
- List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>();\r
- for (JobStat js : stats) {\r
- if (!js.isCollected) {\r
- abJobs.add(js);\r
- }\r
- }\r
- return abJobs;\r
- }\r
-\r
- List<JobStat> getCancelledJobs() {\r
- List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>();\r
- for (JobStat js : stats) {\r
- if (js.isCancelled) {\r
- abJobs.add(js);\r
- }\r
- }\r
- return abJobs;\r
- }\r
-\r
- List<JobStat> sortByRuntime() {\r
- List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>(stats);\r
- Collections.sort(abJobs, JobStat.RUNTIME);\r
- return abJobs;\r
- }\r
-\r
- List<JobStat> sortByStartTime() {\r
- List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>(stats);\r
- Collections.sort(abJobs, JobStat.STARTTIME);\r
- return abJobs;\r
- }\r
-\r
- List<JobStat> sortByResultSize() {\r
- List<JobStat> abJobs = new ArrayList<StatProcessor.JobStat>(stats);\r
- Collections.sort(abJobs, JobStat.RESULTSIZE);\r
- return abJobs;\r
- }\r
-\r
- int getJobNumber() {\r
- return stats.size();\r
- }\r
-\r
- public StatProcessor getSingleWSStat(Services webService) {\r
- List<JobStat> wsStat = new ArrayList<StatProcessor.JobStat>();\r
- for (JobStat js : stats) {\r
- if (js.webService == webService) {\r
- wsStat.add(js);\r
- }\r
- }\r
- return new StatProcessor(wsStat);\r
- }\r
-\r
- long getTotalRuntime() {\r
- long counter = 0;\r
- for (JobStat js : stats) {\r
- int jobtime = js.getRuntime();\r
- if (jobtime != ExecutionStatCollector.UNDEFINED) {\r
- counter += jobtime;\r
- }\r
- }\r
- return counter;\r
- }\r
-\r
- List<JobStat> getUnsuccessfulJobs() {\r
- List<JobStat> aJobs = new ArrayList<StatProcessor.JobStat>();\r
- for (JobStat js : stats) {\r
- int jobtime = js.getRuntime();\r
- if (js.resultSize == ExecutionStatCollector.UNDEFINED) {\r
- aJobs.add(js);\r
- }\r
- }\r
- return aJobs;\r
- }\r
-\r
- public String reportStat() {\r
- String report = "Total Jobs: " + getJobNumber() + "\n";\r
- report += "Abandoned Jobs: " + getAbandonedJobs().size() + "\n";\r
- report += "Cancelled Jobs: " + getCancelledJobs().size() + "\n";\r
- report += "Total Runtime (s): " + getTotalRuntime() + "\n";\r
- report += "Unsuccessful Jobs: " + getUnsuccessfulJobs().size() + "\n";\r
- if (sortByRuntime().size() > 10) {\r
- report += "10 longest jobs: \n\n" + sortByRuntime().subList(0, 9)\r
- + "\n";\r
- } else {\r
- report += "longest jobs: \n\n" + sortByRuntime() + "\n";\r
- }\r
- if (sortByResultSize().size() > 10)\r
- report += "10 biggest jobs: \n\n"\r
- + sortByResultSize().subList(0, 9) + "\n";\r
- else {\r
- report += "biggest jobs: \n\n" + sortByResultSize() + "\n";\r
- }\r
- return report;\r
- }\r
-\r
- static class JobStat {\r
-\r
- static final Comparator<JobStat> RUNTIME = new Comparator<JobStat>() {\r
- @Override\r
- public int compare(JobStat o1, JobStat o2) {\r
- return new Integer(o2.getRuntime()).compareTo(o1.getRuntime());\r
- }\r
- };\r
-\r
- static final Comparator<JobStat> STARTTIME = new Comparator<JobStat>() {\r
- @Override\r
- public int compare(JobStat o1, JobStat o2) {\r
- return new Long(o1.start).compareTo(o2.start);\r
- }\r
- };\r
-\r
- static final Comparator<JobStat> RESULTSIZE = new Comparator<JobStat>() {\r
- @Override\r
- public int compare(JobStat o1, JobStat o2) {\r
- return new Long(o2.resultSize).compareTo(o1.resultSize);\r
- }\r
- };\r
-\r
- Services webService;\r
- String clusterJobId;\r
- String jobname;\r
- long start;\r
- long finish;\r
- long inputSize;\r
- long resultSize;\r
- boolean isCollected;\r
- boolean isCancelled;\r
-\r
- JobStat(Services webService, String clusterJobId, String jobname,\r
- long start, long finish, long inputSize, long resultSize,\r
- boolean isCollected, boolean isCancelled) {\r
- super();\r
- this.webService = webService;\r
- this.clusterJobId = clusterJobId;\r
- this.jobname = jobname;\r
- this.start = start;\r
- this.finish = finish;\r
- this.inputSize = inputSize;\r
- this.resultSize = resultSize;\r
- this.isCollected = isCollected;\r
- this.isCancelled = isCancelled;\r
- }\r
-\r
- public boolean isClusterJob() {\r
- return jobname.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX);\r
- }\r
-\r
- @Override\r
- public int hashCode() {\r
- final int prime = 31;\r
- int result = 1;\r
- result = prime * result + (int) (finish ^ (finish >>> 32));\r
- result = prime * result + (int) (inputSize ^ (inputSize >>> 32));\r
- result = prime * result + (isCancelled ? 1231 : 1237);\r
- result = prime * result + (isCollected ? 1231 : 1237);\r
- result = prime * result\r
- + ((jobname == null) ? 0 : jobname.hashCode());\r
- result = prime * result + (int) (resultSize ^ (resultSize >>> 32));\r
- result = prime * result + (int) (start ^ (start >>> 32));\r
- return result;\r
- }\r
-\r
- @Override\r
- public boolean equals(Object obj) {\r
- if (this == obj)\r
- return true;\r
- if (obj == null)\r
- return false;\r
- if (getClass() != obj.getClass())\r
- return false;\r
- JobStat other = (JobStat) obj;\r
- if (finish != other.finish)\r
- return false;\r
- if (inputSize != other.inputSize)\r
- return false;\r
- if (isCancelled != other.isCancelled)\r
- return false;\r
- if (isCollected != other.isCollected)\r
- return false;\r
- if (jobname == null) {\r
- if (other.jobname != null)\r
- return false;\r
- } else if (!jobname.equals(other.jobname))\r
- return false;\r
- if (resultSize != other.resultSize)\r
- return false;\r
- if (start != other.start)\r
- return false;\r
- return true;\r
- }\r
-\r
- public int getRuntime() {\r
- if (start != ExecutionStatCollector.UNDEFINED\r
- && finish != ExecutionStatCollector.UNDEFINED) {\r
- return (int) (finish - start) / 1000;\r
- }\r
- return ExecutionStatCollector.UNDEFINED;\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return getJobReport();\r
- }\r
-\r
- String getJobReport() {\r
- String report = "WS: " + webService + "\n";\r
- report += "JOB: " + jobname + "\n";\r
- if (start != ExecutionStatCollector.UNDEFINED) {\r
- report += "Started " + new Date(start) + "\n";\r
- }\r
- if (finish != ExecutionStatCollector.UNDEFINED) {\r
- report += "Finished " + new Date(finish) + "\n";\r
- }\r
- if (start != ExecutionStatCollector.UNDEFINED\r
- && finish != ExecutionStatCollector.UNDEFINED) {\r
- report += "Runtime " + getRuntime() + "\n";\r
- }\r
- report += "Input size " + inputSize + "\n";\r
- report += "Result size " + resultSize + "\n";\r
- report += "ClusterJobID " + clusterJobId + "\n";\r
- report += "Collected? " + isCollected + "\n";\r
- report += "Cancelled? " + isCancelled + "\n";\r
- return report;\r
- }\r
-\r
- /**\r
- * Header Job Started Finished Runtime Input Result\r
- */\r
- String getJobReportTabulated() {\r
- String report = webService + "\t";\r
- report += jobname + "\t";\r
- if (start != ExecutionStatCollector.UNDEFINED) {\r
- report += ExecutionStatCollector.DF.format(new Date(start))\r
- + "\t";\r
- } else {\r
- report += ExecutionStatCollector.UNDEFINED + "\t";\r
- }\r
- if (finish != ExecutionStatCollector.UNDEFINED) {\r
- report += ExecutionStatCollector.DF.format(new Date(finish))\r
- + "\t";\r
- } else {\r
- report += ExecutionStatCollector.UNDEFINED + "\t";\r
- }\r
- if (start != ExecutionStatCollector.UNDEFINED\r
- && finish != ExecutionStatCollector.UNDEFINED) {\r
- report += getRuntime() + "\t";\r
- } else {\r
- report += ExecutionStatCollector.UNDEFINED + "\t";\r
- }\r
- report += inputSize + "\t";\r
- report += resultSize + "\t";\r
- report += clusterJobId + "\t";\r
- report += isCollected + "\t";\r
- report += isCancelled + "\t";\r
- return report;\r
- }\r
-\r
- }\r
-\r
-}\r