From 451b60b868fc003d2b02a6f453461f365eb83f62 Mon Sep 17 00:00:00 2001 From: pvtroshin Date: Mon, 23 May 2011 17:16:06 +0000 Subject: [PATCH] Adding JABA web services usage statistics web application. Stat database is to follow git-svn-id: link to svn.lifesci.dundee.ac.uk/svn/barton/ptroshin/JABA2@4153 e3abac25-378b-4346-85de-24260fe3988d --- .../collector}/ExecutionStatCollector.java | 84 ++++-- webservices/compbio/stat/collector/JobStat.java | 261 ++++++++++++++++++ .../{ws/execstat => stat/collector}/StatDB.java | 139 +++++++--- .../compbio/stat/collector/StatManager.java | 85 ++++++ .../compbio/stat/collector/StatProcessor.java | 168 ++++++++++++ webservices/compbio/stat/servlet/DisplayStat.java | 77 ++++++ webservices/compbio/stat/servlet/Joblist.java | 142 ++++++++++ .../compbio/stat/servlet/StatisticCollector.java | 20 ++ webservices/compbio/stat/servlet/Totals.java | 46 ++++ webservices/compbio/stat/servlet/YearStat.java | 77 ++++++ webservices/compbio/ws/client/Services.java | 40 +-- webservices/compbio/ws/execstat/StatProcessor.java | 287 -------------------- 12 files changed, 1049 insertions(+), 377 deletions(-) rename webservices/compbio/{ws/execstat => stat/collector}/ExecutionStatCollector.java (70%) create mode 100644 webservices/compbio/stat/collector/JobStat.java rename webservices/compbio/{ws/execstat => stat/collector}/StatDB.java (56%) create mode 100644 webservices/compbio/stat/collector/StatManager.java create mode 100644 webservices/compbio/stat/collector/StatProcessor.java create mode 100644 webservices/compbio/stat/servlet/DisplayStat.java create mode 100644 webservices/compbio/stat/servlet/Joblist.java create mode 100644 webservices/compbio/stat/servlet/StatisticCollector.java create mode 100644 webservices/compbio/stat/servlet/Totals.java create mode 100644 webservices/compbio/stat/servlet/YearStat.java delete mode 100644 webservices/compbio/ws/execstat/StatProcessor.java diff --git a/webservices/compbio/ws/execstat/ExecutionStatCollector.java b/webservices/compbio/stat/collector/ExecutionStatCollector.java similarity index 70% rename from webservices/compbio/ws/execstat/ExecutionStatCollector.java rename to webservices/compbio/stat/collector/ExecutionStatCollector.java index 945076e..cef65f9 100644 --- a/webservices/compbio/ws/execstat/ExecutionStatCollector.java +++ b/webservices/compbio/stat/collector/ExecutionStatCollector.java @@ -1,4 +1,4 @@ -package compbio.ws.execstat; +package compbio.stat.collector; import java.io.File; import java.io.FileFilter; @@ -11,12 +11,15 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import org.apache.log4j.Logger; import compbio.engine.client.ConfExecutable; +import compbio.engine.client.Executable; import compbio.engine.conf.PropertyHelperManager; import compbio.metadata.JobStatus; +import compbio.runner.msa.ClustalW; import compbio.util.FileUtil; import compbio.util.PropertyHelper; import compbio.ws.client.Services; @@ -60,6 +63,30 @@ public class ExecutionStatCollector { static PropertyHelper ph = PropertyHelperManager.getPropertyHelper(); + final private List stats; + + public ExecutionStatCollector(String workDirectory) { + File[] files = FileUtil.getFiles(workDirectory, directories); + stats = new ArrayList(); + for (File file : files) { + JobDirectory jd = new JobDirectory(file); + stats.add(jd.getJobStat()); + // System.out.println(jd.getJobStat().getJobReportTabulated()); + } + } + + public StatProcessor getStats() { + return new StatProcessor(stats); + } + + public void writeStatToDB() throws SQLException { + Set rjobs = new HashSet(stats); + StatDB statdb = new StatDB(); + statdb.removeRecordedJobs(rjobs); + statdb.insertData(rjobs); + statdb.conn.close(); + } + static String getClusterJobDir() { String clusterdir = ph.getProperty("cluster.tmp.directory"); if (clusterdir != null) { @@ -97,9 +124,9 @@ public class ExecutionStatCollector { String workDir = PropertyHelperManager.getLocalPath() + getLocalJobDir().trim(); System.out.println(workDir); - File[] files = FileUtil.getFiles("H:/www-jws2/job_dir/local_jobsout", + File[] files = FileUtil.getFiles("Y:\\fc\\www-jws2\\jaba\\jobsout", directories); - List stats = new ArrayList(); + List stats = new ArrayList(); for (File file : files) { JobDirectory jd = new JobDirectory(file); stats.add(jd.getJobStat()); @@ -110,10 +137,11 @@ public class ExecutionStatCollector { System.out.println(); System.out.println("!!!!!!!!!!!!!!!!!!"); System.out.println(); - System.out.println(sp.getSingleWSStat(Services.TcoffeeWS).reportStat()); - StatDB.insertData(new HashSet(sp - .getSingleWSStat(Services.TcoffeeWS).stats)); + Set rjobs = new HashSet(sp.stats); + StatDB statdb = new StatDB(); + statdb.removeRecordedJobs(rjobs); + statdb.insertData(rjobs); } static FileFilter directories = new FileFilter() { @@ -197,33 +225,43 @@ public class ExecutionStatCollector { return ftime; } - String getWSName() { + @SuppressWarnings("unchecked") + Class> getWSRunnerName() { String name = jobdir.getName().split("#")[0]; - if (name.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX)) { - assert ConfExecutable.CLUSTER_TASK_ID_PREFIX.length() == 1; - name = name.substring(1); - } - if (name.startsWith("ClustalW")) { - name = name.trim().substring(name.length() - 1); + try { + if (name.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX)) { + assert ConfExecutable.CLUSTER_TASK_ID_PREFIX.length() == 1; + name = name.substring(1); + } + name = ClustalW.class.getPackage().getName() + "." + name; + return (Class>) Class.forName(name); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + throw new RuntimeException( + "Cannot match the directory name to the executable! Executable name is " + + name); } - return name; } - Services getService() { - return Services.getService(getWSName() + "WS"); + private Services getService() { + return Services.getService(getWSRunnerName()); } + // Mafft, Muscle, Tcoffee, Clustal task:fasta.in result:fasta.out // Probcons task:fasta.in result:alignment.out /* * TODO replace with Universal names for WS! */ long getResultSize() { - String name = getWSName(); + Class> name = getWSRunnerName(); File f = null; - if (name.equalsIgnoreCase("Probcons")) { + if (name.getSimpleName().equalsIgnoreCase("Probcons")) { f = files.get("alignment.out"); + } else if (name.getSimpleName().equalsIgnoreCase("ClustalW")) { + f = files.get("output.txt"); + } else { + f = files.get("fasta.out"); } - f = files.get("fasta.out"); if (f != null) { return f.length(); } @@ -238,11 +276,11 @@ public class ExecutionStatCollector { return UNDEFINED; } - StatProcessor.JobStat getJobStat() { - return new StatProcessor.JobStat(getService(), getClusterJobID(), + JobStat getJobStat() { + return JobStat.newInstance(getService(), getClusterJobID(), jobdir.getName(), getStartTime(), getFinishedTime(), - getInputSize(), getResultSize(), isCollected(), - isCancelled()); + getInputSize(), getResultSize(), isCancelled(), + isCollected()); } @Override diff --git a/webservices/compbio/stat/collector/JobStat.java b/webservices/compbio/stat/collector/JobStat.java new file mode 100644 index 0000000..67767dc --- /dev/null +++ b/webservices/compbio/stat/collector/JobStat.java @@ -0,0 +1,261 @@ +package compbio.stat.collector; + +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.Comparator; +import java.util.Date; + +import compbio.engine.client.ConfExecutable; +import compbio.util.Util; +import compbio.ws.client.Services; + +public class JobStat { + + static final Comparator RUNTIME = new Comparator() { + @Override + public int compare(JobStat o1, JobStat o2) { + return new Integer(o2.getRuntime()).compareTo(o1.getRuntime()); + } + }; + + static final Comparator STARTTIME = new Comparator() { + @Override + public int compare(JobStat o1, JobStat o2) { + return new Long(o1.start).compareTo(o2.start); + } + }; + + static final Comparator RESULTSIZE = new Comparator() { + @Override + public int compare(JobStat o1, JobStat o2) { + return new Long(o2.resultSize).compareTo(o1.resultSize); + } + }; + + Services webService; + String clusterJobId; + String jobname; + long start; + long finish; + long inputSize; + long resultSize; + boolean isCollected; + boolean isCancelled; + + private JobStat(Services webService, String clusterJobId, String jobname, + long start, long finish, long inputSize, long resultSize, + boolean isCancelled, boolean isCollected) { + super(); + this.webService = webService; + this.clusterJobId = clusterJobId; + this.jobname = jobname; + this.start = start; + this.finish = finish; + this.inputSize = inputSize; + this.resultSize = resultSize; + this.isCancelled = isCancelled; + this.isCollected = isCollected; + validate(); + } + + static JobStat newInstance(Services webService, String clusterJobId, + String jobname, long start, long finish, long inputSize, + long resultSize, boolean isCancelled, boolean isCollected) { + return new JobStat(webService, clusterJobId, jobname, start, finish, + inputSize, resultSize, isCancelled, isCollected); + } + + static JobStat newInstance(Services webService, String clusterJobId, + String jobname, Timestamp start, Timestamp finish, long inputSize, + long resultSize, boolean isCancelled, boolean isCollected) { + long startm = ExecutionStatCollector.UNDEFINED; + long stopm = ExecutionStatCollector.UNDEFINED; + if (start != null) { + startm = start.getTime(); + } + if (finish != null) { + stopm = finish.getTime(); + } + return new JobStat(webService, clusterJobId, jobname, startm, stopm, + inputSize, resultSize, isCancelled, isCollected); + } + + void validate() { + if (webService == null) { + throw new AssertionError("webService must be defined!:\n " + this); + } + if (Util.isEmpty(jobname)) { + throw new AssertionError("jobname must be defined!:\n" + this); + } + } + + private JobStat(String jobId) { + assert !Util.isEmpty(jobname); + this.jobname = jobId; + } + + static JobStat newIncompleteStat(String jobname) { + return new JobStat(jobname); + } + + public boolean isClusterJob() { + return jobname.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((jobname == null) ? 0 : jobname.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + JobStat other = (JobStat) obj; + if (jobname == null) { + if (other.jobname != null) + return false; + } else if (!jobname.equals(other.jobname)) + return false; + return true; + } + + public int getRuntime() { + if (start != ExecutionStatCollector.UNDEFINED + && finish != ExecutionStatCollector.UNDEFINED) { + return (int) (finish - start) / 1000; + } + return ExecutionStatCollector.UNDEFINED; + } + + @Override + public String toString() { + return getJobReport(); + } + + String getJobReport() { + String report = "WS: " + webService + "\n"; + report += "JOB: " + jobname + "\n"; + if (start != ExecutionStatCollector.UNDEFINED) { + report += "Started " + new Date(start) + "\n"; + } + if (finish != ExecutionStatCollector.UNDEFINED) { + report += "Finished " + new Date(finish) + "\n"; + } + if (start != ExecutionStatCollector.UNDEFINED + && finish != ExecutionStatCollector.UNDEFINED) { + report += "Runtime " + getRuntime() + "\n"; + } + report += "Input size " + inputSize + "\n"; + report += "Result size " + resultSize + "\n"; + report += "ClusterJobID " + clusterJobId + "\n"; + report += "Collected? " + isCollected + "\n"; + report += "Cancelled? " + isCancelled + "\n"; + return report; + } + + /** + * Header Job Started Finished Runtime Input Result + */ + String getJobReportTabulated() { + String report = webService + "\t"; + report += jobname + "\t"; + if (start != ExecutionStatCollector.UNDEFINED) { + report += ExecutionStatCollector.DF.format(new Date(start)) + "\t"; + } else { + report += ExecutionStatCollector.UNDEFINED + "\t"; + } + if (finish != ExecutionStatCollector.UNDEFINED) { + report += ExecutionStatCollector.DF.format(new Date(finish)) + "\t"; + } else { + report += ExecutionStatCollector.UNDEFINED + "\t"; + } + if (start != ExecutionStatCollector.UNDEFINED + && finish != ExecutionStatCollector.UNDEFINED) { + report += getRuntime() + "\t"; + } else { + report += ExecutionStatCollector.UNDEFINED + "\t"; + } + report += inputSize + "\t"; + report += resultSize + "\t"; + report += clusterJobId + "\t"; + report += isCollected + "\t"; + report += isCancelled + "\t"; + return report; + } + + public Services getWebService() { + return webService; + } + + public String getClusterJobId() { + return clusterJobId; + } + + public String getJobname() { + return jobname; + } + + public String getEscJobname() { + String[] parts = jobname.split("#"); + return parts[0] + "%23" + parts[1]; + } + + public String getStart() { + if (start != ExecutionStatCollector.UNDEFINED) { + return SimpleDateFormat.getDateTimeInstance().format( + new Date(start)); + } + return "?"; + } + + public String getFinish() { + if (finish != ExecutionStatCollector.UNDEFINED) { + return SimpleDateFormat.getDateTimeInstance().format( + new Date(finish)); + } + return "?"; + } + + public long getInputSize() { + if (inputSize != ExecutionStatCollector.UNDEFINED) { + return inputSize / 1000; + } + return 0; + } + + public long getResultSize() { + if (resultSize != ExecutionStatCollector.UNDEFINED) { + return resultSize / 1000; + } + return 0; + } + + public boolean hasResult() { + return resultSize != ExecutionStatCollector.UNDEFINED; + } + + public boolean hasStarted() { + return start != ExecutionStatCollector.UNDEFINED; + } + + public boolean getIsCollected() { + return isCollected; + } + + public boolean getIsCancelled() { + return isCancelled; + } + + public boolean getIsFinished() { + return finish != ExecutionStatCollector.UNDEFINED; + } + +} \ No newline at end of file diff --git a/webservices/compbio/ws/execstat/StatDB.java b/webservices/compbio/stat/collector/StatDB.java similarity index 56% rename from webservices/compbio/ws/execstat/StatDB.java rename to webservices/compbio/stat/collector/StatDB.java index 2c66ea0..dd7e2b7 100644 --- a/webservices/compbio/ws/execstat/StatDB.java +++ b/webservices/compbio/stat/collector/StatDB.java @@ -1,4 +1,4 @@ -package compbio.ws.execstat; +package compbio.stat.collector; import java.sql.Connection; import java.sql.DriverManager; @@ -8,30 +8,61 @@ import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.util.ArrayList; -import java.util.Date; import java.util.List; import java.util.Set; +import org.apache.log4j.Logger; + +import compbio.engine.conf.PropertyHelperManager; +import compbio.util.Util; import compbio.ws.client.Services; -import compbio.ws.execstat.StatProcessor.JobStat; +/** + * The database must be stored in the application root directory and called + * "ExecutionStatistic" + * + * @author pvtroshin + * + */ public class StatDB { - /* the default framework is embedded */ - // private final String framework = "embedded"; private static final String driver = "org.apache.derby.jdbc.EmbeddedDriver"; private static final String protocol = "jdbc:derby:"; private static final String statDBName = "ExecutionStatistic"; + private static final Logger log = Logger.getLogger(StatDB.class); + + Connection conn; private static Connection getDBConnection() throws SQLException { - // TODO - System.setProperty("derby.system.home", "."); + String dbpath = PropertyHelperManager.getLocalPath(); + log.info("Looking for JABAWS access statistics database at: " + dbpath); + System.setProperty("derby.system.home", dbpath); Connection conn = DriverManager.getConnection(protocol + statDBName - + ";create=true"); + + ";create=false"); - // We want to control transactions manually. Autocommit is on by - // default in JDBC. - // conn.setAutoCommit(false); + conn.setAutoCommit(true); + return conn; + } + + public StatDB() throws SQLException { + this.conn = getDBConnection(); + } + + /** + * Connect to test database + * + * @param ignored + * @throws SQLException + */ + StatDB(boolean ignored) throws SQLException { + this.conn = getTestDBConnection(); + } + + private static Connection getTestDBConnection() throws SQLException { + System.setProperty("derby.system.home", "testsrc/testdata"); + Connection conn = DriverManager.getConnection(protocol + statDBName + + ";create=false"); + conn.setAutoCommit(true); return conn; } @@ -43,8 +74,8 @@ public class StatDB { * * @throws SQLException */ - private static void createStatTable() throws SQLException { - Connection conn = getDBConnection(); + private void createStatTable() throws SQLException { + /* * Creating a statement object that we can use for running various SQL * statements commands against the database. @@ -67,22 +98,41 @@ public class StatDB { conn.close(); } - static void insertData(Set jobstatus) throws SQLException { + void insertData(Set jobstatus) throws SQLException { System.out.println("Inserting " + jobstatus.size()); - Connection conn = getDBConnection(); + conn.setAutoCommit(false); String insert = "insert into exec_stat (service_name, cluster_job_id, job_id, start, finish, " + "inputsize, resultsize, isCancelled, isCollected, isClusterJob) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; PreparedStatement pstm = conn.prepareStatement(insert); for (JobStat js : jobstatus) { + // Has to present pstm.setString(1, js.webService.toString()); - pstm.setString(2, js.clusterJobId); + + if (!Util.isEmpty(js.clusterJobId)) { + pstm.setString(2, js.clusterJobId); + } else { + pstm.setString(2, null); + } + // Has to present pstm.setString(3, js.jobname); - pstm.setTimestamp(4, new Timestamp(js.start)); - pstm.setTimestamp(5, new Timestamp(js.finish)); + + if (js.start != ExecutionStatCollector.UNDEFINED) { + pstm.setTimestamp(4, new Timestamp(js.start)); + } else { + pstm.setTimestamp(4, null); + } + if (js.finish != ExecutionStatCollector.UNDEFINED) { + pstm.setTimestamp(5, new Timestamp(js.finish)); + } else { + pstm.setTimestamp(5, null); + } + // -1 if UNDEFINED pstm.setLong(6, js.inputSize); + // -1 if UNDEFINED pstm.setLong(7, js.resultSize); + pstm.setBoolean(8, js.isCancelled); pstm.setBoolean(9, js.isCollected); pstm.setBoolean(10, js.isClusterJob()); @@ -90,12 +140,11 @@ public class StatDB { } conn.commit(); pstm.close(); - conn.close(); } - static List readData(Timestamp from, Timestamp to, + public List readData(Timestamp from, Timestamp to, Services wservice, Boolean clusterOnly) throws SQLException { - Connection conn = getDBConnection(); + String query = "select service_name, cluster_job_id, job_id, start, finish, inputsize, " + "resultsize, isCancelled, isCollected from exec_stat where start BETWEEN ? and ? "; @@ -118,22 +167,21 @@ public class StatDB { pstm.setString(3, wservice.toString()); } pstm.execute(); - List stats = new ArrayList(); + List stats = new ArrayList(); ResultSet rs = pstm.getResultSet(); while (rs.next()) { - stats.add(new JobStat(Services.getService(rs.getString(1)), rs - .getString(2), rs.getString(3), rs.getTimestamp(4) - .getTime(), rs.getTimestamp(5).getTime(), rs.getLong(6), rs - .getLong(7), rs.getBoolean(8), rs.getBoolean(9))); + stats.add(JobStat.newInstance(Services.getService(rs.getString(1)), + rs.getString(2), rs.getString(3), rs.getTimestamp(4), + rs.getTimestamp(5), rs.getLong(6), rs.getLong(7), + rs.getBoolean(8), rs.getBoolean(9))); } rs.close(); pstm.close(); - conn.close(); + return stats; } + public void removeRecordedJobs(Set fsJobs) throws SQLException { - static void removeRecordedJobs(Set fsJobs) throws SQLException { - Connection conn = getDBConnection(); String query = "select job_id from exec_stat"; Statement st = conn.createStatement(); @@ -141,20 +189,28 @@ public class StatDB { while (result.next()) { String recordedJob = result.getString(1); - if (fsJobs.contains(recordedJob)) { - fsJobs.remove(recordedJob); + JobStat recStat = JobStat.newIncompleteStat(recordedJob); + if (fsJobs.contains(recStat)) { + fsJobs.remove(recStat); } } result.close(); - conn.close(); } - void shutdownDBServer() { + public void shutdownDBServer() { // ## DATABASE SHUTDOWN SECTION ## /*** * In embedded mode, an application should shut down Derby. Shutdown * throws the XJ015 exception to confirm success. ***/ + try { + if (conn != null) { + conn.close(); + } + } catch (SQLException e) { + System.err.println("Database commit failed with " + + e.getLocalizedMessage()); + } boolean gotSQLExc = false; try { DriverManager.getConnection("jdbc:derby:;shutdown=true"); @@ -164,20 +220,19 @@ public class StatDB { } } if (!gotSQLExc) { - System.out.println("Database did not shut down normally"); + System.err.println("Database did not shut down normally"); } else { System.out.println("Database shut down normally"); } } public static void main(String[] args) throws SQLException { - // createStatTable(); + // new StatDB().createStatTable(); // insertData(null); - - Date from = new Date(); - from.setMonth(1); - System.out.println(new StatProcessor(readData( - new Timestamp(from.getTime()), - new Timestamp(new Date().getTime()), null, null)).reportStat()); - + /* + * StatDB statdb = new StatDB(); Date from = new Date(); + * from.setMonth(1); System.out.println(new + * StatProcessor(statdb.readData( new Timestamp(from.getTime()), new + * Timestamp(new Date().getTime()), null, null)).reportStat()); + */ } } diff --git a/webservices/compbio/stat/collector/StatManager.java b/webservices/compbio/stat/collector/StatManager.java new file mode 100644 index 0000000..ede4d0c --- /dev/null +++ b/webservices/compbio/stat/collector/StatManager.java @@ -0,0 +1,85 @@ +package compbio.stat.collector; + +import java.sql.SQLException; +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.Iterator; +import java.util.Map; +import java.util.TreeMap; + +import compbio.ws.client.Services; + +public class StatManager { + + static class DateRoller implements Iterator { + final Date initDate; + final Calendar calendar; + + public DateRoller(Date date) { + this.initDate = date; + calendar = GregorianCalendar.getInstance(); + calendar.setTime(date); + calendar.add(Calendar.MONTH, -12); + } + + Date getCurrentDate() { + return initDate; + } + + @Override + public boolean hasNext() { + return !calendar.getTime().equals(initDate); + } + + @Override + public Date next() { + calendar.add(Calendar.MONTH, 1); + return calendar.getTime(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + } + + void getStats() throws SQLException { + Calendar startTime = Calendar.getInstance(); + startTime.roll(Calendar.YEAR, false); + Timestamp startDate = new Timestamp(startTime.getTimeInMillis()); + Timestamp stopDate = new Timestamp(new Date().getTime()); + StatDB statdb = null; + + statdb = new StatDB(); + + // Total + Map stats = new TreeMap(); + for (Services service : Services.values()) { + stats.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, null))); + } + + // Cluster + Map statsCluster = new TreeMap(); + for (Services service : Services.values()) { + statsCluster.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, true))); + } + // Local + Map statsLocal = new TreeMap(); + for (Services service : Services.values()) { + statsLocal.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, false))); + } + + } +} diff --git a/webservices/compbio/stat/collector/StatProcessor.java b/webservices/compbio/stat/collector/StatProcessor.java new file mode 100644 index 0000000..8b197f8 --- /dev/null +++ b/webservices/compbio/stat/collector/StatProcessor.java @@ -0,0 +1,168 @@ +package compbio.stat.collector; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import compbio.ws.client.Services; + +public class StatProcessor { + + List stats; + + public StatProcessor(List stats) { + this.stats = stats; + } + + public List getClusterJobs() { + return getJobSubset(true); + } + + public List getLocalJobs() { + return getJobSubset(false); + } + + private List getJobSubset(boolean cluster) { + List clusterjobs = new ArrayList(); + for (JobStat js : stats) { + if (cluster) { + if (js.isClusterJob()) { + clusterjobs.add(js); + } + } else { + if (!js.isClusterJob()) { + clusterjobs.add(js); + } + } + } + return clusterjobs; + + } + + /* + * TODO List getNewStat() throws SQLException { Set jobids + * = new HashSet(); for(JobStat js: stats) { jobids.add(js.jobname); + * } StatDB.removeRecordedJobs(jobids); List newjobs = new + * HashSet(); for(String jobid: jobids) { if(newjobs.co) + * jobids.add(js.jobname); } } + */ + + /** + * Not collected. Excludes all cancelled jobs, and jobs with no results as + * these are reported separately. + */ + public List getAbandonedJobs() { + List abJobs = new ArrayList(); + for (JobStat js : stats) { + if (!js.isCollected && !js.isCancelled && js.hasResult()) { + abJobs.add(js); + } + } + return abJobs; + } + + /** + * Started & finished but did not produce result + * + * @return + */ + public List getFailedJobs() { + List failedJobs = new ArrayList(); + for (JobStat js : stats) { + if (js.hasStarted() && js.getIsFinished() && !js.hasResult()) { + failedJobs.add(js); + } + } + return failedJobs; + } + + public List getCancelledJobs() { + List abJobs = new ArrayList(); + for (JobStat js : stats) { + if (js.isCancelled) { + abJobs.add(js); + } + } + return abJobs; + } + + public List sortByRuntime() { + List abJobs = new ArrayList(stats); + Collections.sort(abJobs, JobStat.RUNTIME); + return abJobs; + } + + public List sortByStartTime() { + List abJobs = new ArrayList(stats); + Collections.sort(abJobs, JobStat.STARTTIME); + return abJobs; + } + + public List sortByResultSize() { + List abJobs = new ArrayList(stats); + Collections.sort(abJobs, JobStat.RESULTSIZE); + return abJobs; + } + + public int getJobNumber() { + return stats.size(); + } + + public List getJobs() { + return stats; + } + + public StatProcessor getSingleWSStat(Services webService) { + List wsStat = new ArrayList(); + for (JobStat js : stats) { + if (js.webService == webService) { + wsStat.add(js); + } + } + return new StatProcessor(wsStat); + } + + public long getTotalRuntime() { + long counter = 0; + for (JobStat js : stats) { + int jobtime = js.getRuntime(); + if (jobtime != ExecutionStatCollector.UNDEFINED) { + counter += jobtime; + } + } + return counter; + } + + public List getIncompleteJobs() { + List aJobs = new ArrayList(); + for (JobStat js : stats) { + int jobtime = js.getRuntime(); + if (!js.hasResult()) { + aJobs.add(js); + } + } + return aJobs; + } + + public String reportStat() { + String report = "Total Jobs: " + getJobNumber() + "\n"; + report += "Abandoned Jobs: " + getAbandonedJobs().size() + "\n"; + report += "Cancelled Jobs: " + getCancelledJobs().size() + "\n"; + report += "Total Runtime (s): " + getTotalRuntime() + "\n"; + report += "Unsuccessful Jobs: " + getIncompleteJobs().size() + "\n"; + if (sortByRuntime().size() > 10) { + report += "10 longest jobs: \n\n" + sortByRuntime().subList(0, 9) + + "\n"; + } else { + report += "longest jobs: \n\n" + sortByRuntime() + "\n"; + } + if (sortByResultSize().size() > 10) + report += "10 biggest jobs: \n\n" + + sortByResultSize().subList(0, 9) + "\n"; + else { + report += "biggest jobs: \n\n" + sortByResultSize() + "\n"; + } + return report; + } + +} diff --git a/webservices/compbio/stat/servlet/DisplayStat.java b/webservices/compbio/stat/servlet/DisplayStat.java new file mode 100644 index 0000000..9f88069 --- /dev/null +++ b/webservices/compbio/stat/servlet/DisplayStat.java @@ -0,0 +1,77 @@ +package compbio.stat.servlet; + +import java.io.IOException; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.Date; +import java.util.Map; +import java.util.TreeMap; + +import javax.servlet.RequestDispatcher; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import compbio.stat.collector.StatDB; +import compbio.stat.collector.StatProcessor; +import compbio.ws.client.Services; + +public class DisplayStat extends HttpServlet { + + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) + throws ServletException, IOException { + // TODO + Calendar startTime = Calendar.getInstance(); + startTime.roll(Calendar.YEAR, false); + Timestamp startDate = new Timestamp(startTime.getTimeInMillis()); + Timestamp stopDate = new Timestamp(new Date().getTime()); + StatDB statdb = null; + try { + statdb = new StatDB(); + + Map stats = new TreeMap(); + for (Services service : Services.values()) { + stats.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, null))); + } + + Map statsCluster = new TreeMap(); + for (Services service : Services.values()) { + statsCluster.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, true))); + } + + Map statsLocal = new TreeMap(); + for (Services service : Services.values()) { + statsLocal.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, false))); + } + req.setAttribute("stat", stats); + req.setAttribute("statTotal", Totals.sumStats(stats)); + + req.setAttribute("statCluster", statsCluster); + req.setAttribute("statLocal", statsLocal); + req.setAttribute("startDate", startDate.getTime()); + req.setAttribute("stopDate", stopDate.getTime()); + + RequestDispatcher dispatcher = req + .getRequestDispatcher("statpages/Statistics.jsp"); + dispatcher.forward(req, resp); + + } catch (SQLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + +} diff --git a/webservices/compbio/stat/servlet/Joblist.java b/webservices/compbio/stat/servlet/Joblist.java new file mode 100644 index 0000000..7328e28 --- /dev/null +++ b/webservices/compbio/stat/servlet/Joblist.java @@ -0,0 +1,142 @@ +package compbio.stat.servlet; + +import java.io.IOException; +import java.sql.SQLException; +import java.sql.Timestamp; + +import javax.servlet.RequestDispatcher; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpSession; + +import compbio.engine.conf.PropertyHelperManager; +import compbio.stat.collector.StatDB; +import compbio.stat.collector.StatProcessor; +import compbio.util.PropertyHelper; +import compbio.util.Util; +import compbio.ws.client.Services; + +public class Joblist extends HttpServlet { + + static final String JT_FAILED = "failed"; + static final String JT_ABANDONED = "abandoned"; + static final String JT_CANCELLED = "cancelled"; + static final String JT_ALL = "all"; + static final String JT_INCOMPLETE = "incomplete"; + /** + * Input: + * + * ws=${ws.key} + * + * where=everywhere cluster local + * + * type=cancelled all incomplete + * + * from=${startDate} + * + * to=${stopDate} + * + */ + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) + throws ServletException, IOException { + /* + * Values for this servlet are not user supplied, so do not bother with + * nice error messages just throw the exception is something is wrong! + */ + String wsname = req.getParameter("ws"); + Services wservice = Services.getService(wsname); + if (wservice == null) { + throw new ServletException( + "Webservice name 'ws' is not specified or is incorrect. Given value:" + + wsname); + } + String executor = req.getParameter("where"); + if (Util.isEmpty(executor)) { + throw new ServletException("'Where' is not specified!"); + } + if (!(executor.equalsIgnoreCase("everywhere") + || executor.equalsIgnoreCase("local") || executor + .equalsIgnoreCase("cluster"))) { + throw new ServletException("Invalid 'where' value '" + executor + + "' can be one of 'everywhere', 'local', 'cluster'!"); + } + Boolean where = null; + if (executor.equalsIgnoreCase("local")) { + where = false; + } else if (executor.equalsIgnoreCase("cluster")) { + where = true; + } + + String jobtype = req.getParameter("type"); + if (Util.isEmpty(executor)) { + throw new ServletException("'type' is not specified!"); + } + if (!(jobtype.equalsIgnoreCase(JT_CANCELLED) + || jobtype.equalsIgnoreCase(JT_ALL) + || jobtype.equalsIgnoreCase(JT_INCOMPLETE) + || jobtype.equalsIgnoreCase(JT_ABANDONED) || jobtype + .equalsIgnoreCase(JT_FAILED))) { + throw new ServletException("Invalid 'jobtype' value '" + jobtype + + "' can be one of 'cancelled', 'all', 'incomplete', " + + "'failed', 'abandoned'!"); + } + String fromDate = req.getParameter("from"); + if (Util.isEmpty(fromDate)) { + throw new ServletException("'fromDate' is not specified!"); + } + String toDate = req.getParameter("to"); + if (Util.isEmpty(toDate)) { + throw new ServletException("'toDate' is not specified!"); + } + + PropertyHelper helper = PropertyHelperManager.getPropertyHelper(); + String clusterTempDir = helper.getProperty("cluster.tmp.directory") + .trim(); + String localTempDir = helper.getProperty("local.tmp.directory").trim(); + // TODO include the time slice + Timestamp startDate = new Timestamp(Long.parseLong(fromDate)); + Timestamp stopDate = new Timestamp(Long.parseLong(toDate)); + StatDB statdb = null; + try { + statdb = new StatDB(); + StatProcessor stat = new StatProcessor(statdb.readData(startDate, + stopDate, wservice, where)); + + HttpSession session = req.getSession(); + if (jobtype.equalsIgnoreCase(JT_CANCELLED)) { + session.setAttribute("stat", + new StatProcessor(stat.getCancelledJobs())); + } else if (jobtype.equalsIgnoreCase(JT_INCOMPLETE)) { + session.setAttribute("stat", + new StatProcessor(stat.getIncompleteJobs())); + } else if (jobtype.equalsIgnoreCase(JT_ALL)) { + session.setAttribute("stat", stat); + } else if (jobtype.equalsIgnoreCase(JT_FAILED)) { + session.setAttribute("stat", + new StatProcessor(stat.getFailedJobs())); + } else if (jobtype.equalsIgnoreCase(JT_ABANDONED)) { + session.setAttribute("stat", + new StatProcessor(stat.getAbandonedJobs())); + } else { + throw new AssertionError("Unrecognised job type: " + jobtype); + } + session.setAttribute("clusterTemp", clusterTempDir); + session.setAttribute("localTemp", localTempDir); + req.setAttribute("startDate", startDate.getTime()); + req.setAttribute("stopDate", stopDate.getTime()); + + RequestDispatcher dispatcher = req + .getRequestDispatcher("statpages/Joblist.jsp"); + dispatcher.forward(req, resp); + + } catch (SQLException e) { + e.printStackTrace(); + throw new ServletException("SQLException : " + + e.getLocalizedMessage()); + } + + } +} diff --git a/webservices/compbio/stat/servlet/StatisticCollector.java b/webservices/compbio/stat/servlet/StatisticCollector.java new file mode 100644 index 0000000..3ababa3 --- /dev/null +++ b/webservices/compbio/stat/servlet/StatisticCollector.java @@ -0,0 +1,20 @@ +package compbio.stat.servlet; + +import javax.servlet.ServletContextEvent; +import javax.servlet.ServletContextListener; + +public class StatisticCollector implements ServletContextListener { + + @Override + public void contextDestroyed(ServletContextEvent arg0) { + // TODO Auto-generated method stub + + } + + @Override + public void contextInitialized(ServletContextEvent arg0) { + // TODO Auto-generated method stub + + } + +} diff --git a/webservices/compbio/stat/servlet/Totals.java b/webservices/compbio/stat/servlet/Totals.java new file mode 100644 index 0000000..8894eb4 --- /dev/null +++ b/webservices/compbio/stat/servlet/Totals.java @@ -0,0 +1,46 @@ +package compbio.stat.servlet; + +import java.util.Map; + +import compbio.stat.collector.StatProcessor; +import compbio.ws.client.Services; + +public class Totals { + int total; + int incomplete; + int abandoned; + int cancelled; + int failed; + + public int getTotal() { + return total; + } + + public int getIncomplete() { + return incomplete; + } + + public int getAbandoned() { + return abandoned; + } + + public int getCancelled() { + return cancelled; + } + + public int getFailed() { + return failed; + } + + static Totals sumStats(Map stat) { + Totals total = new Totals(); + for (Map.Entry serv : stat.entrySet()) { + total.total += serv.getValue().getJobNumber(); + total.incomplete += serv.getValue().getIncompleteJobs().size(); + total.abandoned += serv.getValue().getAbandonedJobs().size(); + total.cancelled += serv.getValue().getCancelledJobs().size(); + total.failed += serv.getValue().getFailedJobs().size(); + } + return total; + } +} \ No newline at end of file diff --git a/webservices/compbio/stat/servlet/YearStat.java b/webservices/compbio/stat/servlet/YearStat.java new file mode 100644 index 0000000..bf975ec --- /dev/null +++ b/webservices/compbio/stat/servlet/YearStat.java @@ -0,0 +1,77 @@ +package compbio.stat.servlet; + +import java.io.IOException; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.Date; +import java.util.Map; +import java.util.TreeMap; + +import javax.servlet.RequestDispatcher; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import compbio.stat.collector.StatDB; +import compbio.stat.collector.StatProcessor; +import compbio.ws.client.Services; + +public class YearStat extends HttpServlet { + + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) + throws ServletException, IOException { + // TODO + Calendar startTime = Calendar.getInstance(); + startTime.roll(Calendar.YEAR, false); + Timestamp startDate = new Timestamp(startTime.getTimeInMillis()); + Timestamp stopDate = new Timestamp(new Date().getTime()); + StatDB statdb = null; + try { + statdb = new StatDB(); + + Map stats = new TreeMap(); + for (Services service : Services.values()) { + stats.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, null))); + } + + Map statsCluster = new TreeMap(); + for (Services service : Services.values()) { + statsCluster.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, true))); + } + + Map statsLocal = new TreeMap(); + for (Services service : Services.values()) { + statsLocal.put( + service, + new StatProcessor(statdb.readData(startDate, stopDate, + service, false))); + } + req.setAttribute("stat", stats); + req.setAttribute("statTotal", Totals.sumStats(stats)); + + req.setAttribute("statCluster", statsCluster); + req.setAttribute("statLocal", statsLocal); + req.setAttribute("startDate", startDate.getTime()); + req.setAttribute("stopDate", stopDate.getTime()); + + RequestDispatcher dispatcher = req + .getRequestDispatcher("statpages/Statistics.jsp"); + dispatcher.forward(req, resp); + + } catch (SQLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + +} diff --git a/webservices/compbio/ws/client/Services.java b/webservices/compbio/ws/client/Services.java index a0c1f56..7f06da7 100644 --- a/webservices/compbio/ws/client/Services.java +++ b/webservices/compbio/ws/client/Services.java @@ -26,6 +26,7 @@ import javax.xml.ws.Service; import compbio.data.msa.JABAService; import compbio.data.msa.MsaWS; import compbio.data.msa.SequenceAnnotation; +import compbio.engine.client.Executable; /** * List of web services currently supported by JABAWS version 2 @@ -36,32 +37,21 @@ public enum Services { public static Services getService(String servName) { servName = servName.trim().toLowerCase(); - if (servName.equalsIgnoreCase(MafftWS.toString())) { - return MafftWS; + for (Services service : Services.values()) { + if (service.toString().equalsIgnoreCase(servName)) { + return service; + } } - if (servName.equalsIgnoreCase(ClustalWS.toString())) { - return ClustalWS; - } - if (servName.equalsIgnoreCase(TcoffeeWS.toString())) { - return TcoffeeWS; - } - if (servName.equalsIgnoreCase(MuscleWS.toString())) { - return MuscleWS; - } - if (servName.equalsIgnoreCase(ProbconsWS.toString())) { - return ProbconsWS; - } - if (servName.equalsIgnoreCase(AAConWS.toString())) { - return AAConWS; - } - if (servName.equalsIgnoreCase(JronnWS.toString())) { - return JronnWS; - } - if (servName.equalsIgnoreCase(DisemblWS.toString())) { - return DisemblWS; - } - if (servName.equalsIgnoreCase(GlobPlotWS.toString())) { - return GlobPlotWS; + return null; + } + + public static Services getService(Class> runnerClassName) { + assert runnerClassName != null; + String sname = runnerClassName.getSimpleName().toLowerCase(); + for (Services service : Services.values()) { + if (service.toString().toLowerCase().contains(sname)) { + return service; + } } return null; } diff --git a/webservices/compbio/ws/execstat/StatProcessor.java b/webservices/compbio/ws/execstat/StatProcessor.java deleted file mode 100644 index 16b3234..0000000 --- a/webservices/compbio/ws/execstat/StatProcessor.java +++ /dev/null @@ -1,287 +0,0 @@ -package compbio.ws.execstat; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; -import java.util.List; - -import compbio.engine.client.ConfExecutable; -import compbio.ws.client.Services; - -public class StatProcessor { - - List stats; - - StatProcessor(List stats) { - this.stats = stats; - } - - /* - * TODO List getNewStat() throws SQLException { Set jobids - * = new HashSet(); for(JobStat js: stats) { jobids.add(js.jobname); - * } StatDB.removeRecordedJobs(jobids); List newjobs = new - * HashSet(); for(String jobid: jobids) { if(newjobs.co) - * jobids.add(js.jobname); } } - */ - - List getAbandonedJobs() { - List abJobs = new ArrayList(); - for (JobStat js : stats) { - if (!js.isCollected) { - abJobs.add(js); - } - } - return abJobs; - } - - List getCancelledJobs() { - List abJobs = new ArrayList(); - for (JobStat js : stats) { - if (js.isCancelled) { - abJobs.add(js); - } - } - return abJobs; - } - - List sortByRuntime() { - List abJobs = new ArrayList(stats); - Collections.sort(abJobs, JobStat.RUNTIME); - return abJobs; - } - - List sortByStartTime() { - List abJobs = new ArrayList(stats); - Collections.sort(abJobs, JobStat.STARTTIME); - return abJobs; - } - - List sortByResultSize() { - List abJobs = new ArrayList(stats); - Collections.sort(abJobs, JobStat.RESULTSIZE); - return abJobs; - } - - int getJobNumber() { - return stats.size(); - } - - public StatProcessor getSingleWSStat(Services webService) { - List wsStat = new ArrayList(); - for (JobStat js : stats) { - if (js.webService == webService) { - wsStat.add(js); - } - } - return new StatProcessor(wsStat); - } - - long getTotalRuntime() { - long counter = 0; - for (JobStat js : stats) { - int jobtime = js.getRuntime(); - if (jobtime != ExecutionStatCollector.UNDEFINED) { - counter += jobtime; - } - } - return counter; - } - - List getUnsuccessfulJobs() { - List aJobs = new ArrayList(); - for (JobStat js : stats) { - int jobtime = js.getRuntime(); - if (js.resultSize == ExecutionStatCollector.UNDEFINED) { - aJobs.add(js); - } - } - return aJobs; - } - - public String reportStat() { - String report = "Total Jobs: " + getJobNumber() + "\n"; - report += "Abandoned Jobs: " + getAbandonedJobs().size() + "\n"; - report += "Cancelled Jobs: " + getCancelledJobs().size() + "\n"; - report += "Total Runtime (s): " + getTotalRuntime() + "\n"; - report += "Unsuccessful Jobs: " + getUnsuccessfulJobs().size() + "\n"; - if (sortByRuntime().size() > 10) { - report += "10 longest jobs: \n\n" + sortByRuntime().subList(0, 9) - + "\n"; - } else { - report += "longest jobs: \n\n" + sortByRuntime() + "\n"; - } - if (sortByResultSize().size() > 10) - report += "10 biggest jobs: \n\n" - + sortByResultSize().subList(0, 9) + "\n"; - else { - report += "biggest jobs: \n\n" + sortByResultSize() + "\n"; - } - return report; - } - - static class JobStat { - - static final Comparator RUNTIME = new Comparator() { - @Override - public int compare(JobStat o1, JobStat o2) { - return new Integer(o2.getRuntime()).compareTo(o1.getRuntime()); - } - }; - - static final Comparator STARTTIME = new Comparator() { - @Override - public int compare(JobStat o1, JobStat o2) { - return new Long(o1.start).compareTo(o2.start); - } - }; - - static final Comparator RESULTSIZE = new Comparator() { - @Override - public int compare(JobStat o1, JobStat o2) { - return new Long(o2.resultSize).compareTo(o1.resultSize); - } - }; - - Services webService; - String clusterJobId; - String jobname; - long start; - long finish; - long inputSize; - long resultSize; - boolean isCollected; - boolean isCancelled; - - JobStat(Services webService, String clusterJobId, String jobname, - long start, long finish, long inputSize, long resultSize, - boolean isCollected, boolean isCancelled) { - super(); - this.webService = webService; - this.clusterJobId = clusterJobId; - this.jobname = jobname; - this.start = start; - this.finish = finish; - this.inputSize = inputSize; - this.resultSize = resultSize; - this.isCollected = isCollected; - this.isCancelled = isCancelled; - } - - public boolean isClusterJob() { - return jobname.startsWith(ConfExecutable.CLUSTER_TASK_ID_PREFIX); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (int) (finish ^ (finish >>> 32)); - result = prime * result + (int) (inputSize ^ (inputSize >>> 32)); - result = prime * result + (isCancelled ? 1231 : 1237); - result = prime * result + (isCollected ? 1231 : 1237); - result = prime * result - + ((jobname == null) ? 0 : jobname.hashCode()); - result = prime * result + (int) (resultSize ^ (resultSize >>> 32)); - result = prime * result + (int) (start ^ (start >>> 32)); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - JobStat other = (JobStat) obj; - if (finish != other.finish) - return false; - if (inputSize != other.inputSize) - return false; - if (isCancelled != other.isCancelled) - return false; - if (isCollected != other.isCollected) - return false; - if (jobname == null) { - if (other.jobname != null) - return false; - } else if (!jobname.equals(other.jobname)) - return false; - if (resultSize != other.resultSize) - return false; - if (start != other.start) - return false; - return true; - } - - public int getRuntime() { - if (start != ExecutionStatCollector.UNDEFINED - && finish != ExecutionStatCollector.UNDEFINED) { - return (int) (finish - start) / 1000; - } - return ExecutionStatCollector.UNDEFINED; - } - - @Override - public String toString() { - return getJobReport(); - } - - String getJobReport() { - String report = "WS: " + webService + "\n"; - report += "JOB: " + jobname + "\n"; - if (start != ExecutionStatCollector.UNDEFINED) { - report += "Started " + new Date(start) + "\n"; - } - if (finish != ExecutionStatCollector.UNDEFINED) { - report += "Finished " + new Date(finish) + "\n"; - } - if (start != ExecutionStatCollector.UNDEFINED - && finish != ExecutionStatCollector.UNDEFINED) { - report += "Runtime " + getRuntime() + "\n"; - } - report += "Input size " + inputSize + "\n"; - report += "Result size " + resultSize + "\n"; - report += "ClusterJobID " + clusterJobId + "\n"; - report += "Collected? " + isCollected + "\n"; - report += "Cancelled? " + isCancelled + "\n"; - return report; - } - - /** - * Header Job Started Finished Runtime Input Result - */ - String getJobReportTabulated() { - String report = webService + "\t"; - report += jobname + "\t"; - if (start != ExecutionStatCollector.UNDEFINED) { - report += ExecutionStatCollector.DF.format(new Date(start)) - + "\t"; - } else { - report += ExecutionStatCollector.UNDEFINED + "\t"; - } - if (finish != ExecutionStatCollector.UNDEFINED) { - report += ExecutionStatCollector.DF.format(new Date(finish)) - + "\t"; - } else { - report += ExecutionStatCollector.UNDEFINED + "\t"; - } - if (start != ExecutionStatCollector.UNDEFINED - && finish != ExecutionStatCollector.UNDEFINED) { - report += getRuntime() + "\t"; - } else { - report += ExecutionStatCollector.UNDEFINED + "\t"; - } - report += inputSize + "\t"; - report += resultSize + "\t"; - report += clusterJobId + "\t"; - report += isCollected + "\t"; - report += isCancelled + "\t"; - return report; - } - - } - -} -- 1.7.10.2