<classpathentry kind="src" path="webservices"/>\r
<classpathentry excluding="testdata/" kind="src" path="testsrc"/>\r
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>\r
- <classpathentry kind="lib" path="WEB-INF/lib/log4j-1.2.15.jar"/>\r
<classpathentry kind="lib" path="testsrc/lib/testng-5.10-jdk15.jar"/>\r
<classpathentry kind="lib" path="lib/servlet-api.jar"/>\r
<classpathentry kind="lib" path="WEB-INF/lib/drmaa.jar"/>\r
<classpathentry kind="lib" path="WEB-INF/lib/compbio-annotations-1.0.jar"/>\r
<classpathentry kind="lib" path="WEB-INF/lib/derby.jar"/>\r
<classpathentry kind="lib" path="WEB-INF/lib/compbio-util-1.4.jar"/>\r
+ <classpathentry kind="lib" path="WEB-INF/lib/log4j-1.2.15.jar"/>\r
<classpathentry kind="output" path="WEB-INF/classes"/>\r
</classpath>\r
TODO: \r
\r
+Cluster stats: \r
+ -Remove hyperlinks from tasks which workdirs were removed \r
+ -graph generation \r
+ -user documentation\r
+ -use the same name for output for different executables\r
+\r
add to help text: To disable a web service remove it from WEB-INF/sun-jaxws.xml descriptor\r
\r
add to help: VirtualBox 4.0.4 works fine with JABAWS update links \r
import org.apache.log4j.Logger;\r
\r
import compbio.engine.client.Executable;\r
+import compbio.engine.client.PathValidator;\r
import compbio.metadata.JobStatus;\r
import compbio.util.FileUtil;\r
import compbio.ws.client.Services;\r
\r
static SimpleDateFormat DF = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss");\r
\r
+ final private File workDirectory;\r
final private List<JobStat> stats;\r
/**\r
* Consider the job that has been working for longer than timeOutInHours\r
public ExecutionStatCollector(String workDirectory, int timeOutInHours) {\r
log.info("Starting stat collector for directory: " + workDirectory);\r
log.info("Maximum allowed runtime(h): " + timeOutInHours);\r
- File[] files = FileUtil.getFiles(workDirectory, directories);\r
+ if (!PathValidator.isValidDirectory(workDirectory)) {\r
+ throw new IllegalArgumentException("workDirectory '"\r
+ + workDirectory + "' does not exist!");\r
+ }\r
+ this.workDirectory = new File(workDirectory);\r
stats = new ArrayList<JobStat>();\r
- assert timeOutInHours > 0;\r
- this.timeOutInHours = timeOutInHours;\r
- for (File file : files) {\r
- JobDirectory jd = new JobDirectory(file);\r
- JobStat jstat = jd.getJobStat();\r
- // Do not record stats on the job that has not completed yet\r
- if (hasCompleted(jd)) {\r
- stats.add(jstat);\r
- } else {\r
- log.debug("Skipping the job: " + jstat);\r
- log.debug("As it has not completed yet");\r
- }\r
- // System.out.println(jd.getJobStat().getJobReportTabulated());\r
+ if (timeOutInHours <= 0) {\r
+ throw new IllegalArgumentException(\r
+ "Timeout value must be greater than 0! Given value: "\r
+ + timeOutInHours);\r
}\r
+ this.timeOutInHours = timeOutInHours;\r
}\r
\r
boolean hasCompleted(JobDirectory jd) {\r
return ((System.currentTimeMillis() - jd.jobdir.lastModified()) / (1000 * 60 * 60)) > timeOutInHours;\r
}\r
\r
- public StatProcessor getStats() {\r
+ StatProcessor getStats() {\r
return new StatProcessor(stats);\r
}\r
\r
- public void writeStatToDB() throws SQLException {\r
+ void writeStatToDB() throws SQLException {\r
Set<JobStat> rjobs = new HashSet<JobStat>(stats);\r
StatDB statdb = new StatDB();\r
log.debug("Removing records that has already been recorded");\r
*/\r
\r
/**\r
- * \r
- * @param args\r
- * @throws IOException\r
- * @throws SQLException\r
+ * Not in use\r
*/\r
public static void main(String[] args) throws IOException, SQLException {\r
\r
File jobdir;\r
Map<String, File> files = new HashMap<String, File>();\r
\r
- public JobDirectory(File directory) {\r
+ JobDirectory(File directory) {\r
this.jobdir = directory;\r
for (File f : jobdir.listFiles()) {\r
files.put(f.getName(), f);\r
}\r
}\r
\r
- public boolean hasStatus(JobStatus status) {\r
+ boolean hasStatus(JobStatus status) {\r
return files.containsKey(status.toString());\r
}\r
\r
clustjobId = FileUtil.readFileToString(jobid);\r
}\r
} catch (IOException ioe) {\r
- ioe.printStackTrace();\r
- // TODO LOG\r
+ log.error(\r
+ "IO Exception while reading the content of JOBID file for job "\r
+ + jobid, ioe);\r
}\r
return clustjobId.trim();\r
}\r
return false;\r
return true;\r
}\r
+ }\r
\r
+ private void collectStatistics() {\r
+ File[] files = workDirectory.listFiles(directories);\r
+ for (File file : files) {\r
+ JobDirectory jd = new JobDirectory(file);\r
+ JobStat jstat = jd.getJobStat();\r
+ // Do not record stats on the job that has not completed yet\r
+ if (hasCompleted(jd)) {\r
+ stats.add(jstat);\r
+ } else {\r
+ log.debug("Skipping the job: " + jstat);\r
+ log.debug("As it has not completed yet");\r
+ }\r
+ // System.out.println(jd.getJobStat().getJobReportTabulated());\r
+ }\r
}\r
\r
@Override\r
public void run() {\r
log.info("Started updating statistics at " + new Date());\r
\r
+ collectStatistics();\r
+\r
StatProcessor local_stats = getStats();\r
log.info("Found " + local_stats.getJobNumber() + " jobs!");\r
try {\r
}\r
log.info("Finished updating statistics at " + new Date());\r
}\r
-\r
}\r
+ "isCollected SMALLINT NOT NULL, "\r
+ "isClusterJob SMALLINT NOT NULL)";\r
// We create a table...\r
- System.out.println(create);\r
+ log.debug(create);\r
s.execute(create);\r
s.close();\r
conn.close();\r
pstm.executeUpdate();\r
}\r
conn.commit();\r
+ conn.setAutoCommit(true);\r
pstm.close();\r
}\r
\r
conn.close();\r
}\r
} catch (SQLException e) {\r
- System.err.println("Database commit failed with "\r
- + e.getLocalizedMessage());\r
+ log.warn("Database commit failed with " + e.getLocalizedMessage());\r
}\r
boolean gotSQLExc = false;\r
try {\r
}\r
}\r
if (!gotSQLExc) {\r
- System.err.println("Database did not shut down normally");\r
+ log.warn("Database did not shut down normally");\r
} else {\r
- System.out.println("Database shut down normally");\r
+ log.info("Database shut down normally");\r
}\r
}\r
public static void main(String[] args) throws SQLException {\r
import javax.servlet.http.HttpServletRequest;\r
import javax.servlet.http.HttpServletResponse;\r
\r
+import org.apache.log4j.Logger;\r
+\r
import compbio.stat.servlet.util.StatCollection;\r
import compbio.stat.servlet.util.Totals;\r
\r
public class DisplayStat extends HttpServlet {\r
\r
+ private final static Logger log = Logger.getLogger(DisplayStat.class);\r
+\r
@Override\r
protected void doGet(HttpServletRequest req, HttpServletResponse resp)\r
throws ServletException, IOException {\r
String datetime = req.getParameter("datetime");\r
- System.out.println("? " + datetime);\r
+\r
Date fromDate = new Date(Long.parseLong(datetime));\r
Calendar toCal = GregorianCalendar.getInstance();\r
toCal.setTime(fromDate);\r
StatCollection stats = StatCollection.newStatCollecton(fromDate,\r
toCal.getTime());\r
\r
- System.out.println("stats: " + stats);\r
+ log.trace("Stats: " + stats);\r
req.setAttribute("stat", stats);\r
req.setAttribute("statTotal", Totals.sumStats(stats.getAllStat()));\r
req.setAttribute("statTotalCluster",\r
\r
req.setAttribute("startDate", fromDate);\r
req.setAttribute("stopDate", toCal.getTime());\r
- System.out.println(fromDate + " " + toCal.getTime());\r
+ log.trace("from " + fromDate + " to " + toCal.getTime());\r
RequestDispatcher dispatcher = req\r
.getRequestDispatcher("statpages/Statistics.jsp");\r
dispatcher.forward(req, resp);\r
\r
} catch (SQLException e) {\r
- e.printStackTrace();\r
+ log.error(e.getMessage(), e);\r
throw new ServletException(e);\r
}\r
\r
.trim();\r
clusterTempDir = new File(clusterTempDir).getName();\r
String localTempDir = helper.getProperty("local.tmp.directory").trim();\r
- // TODO include the time slice\r
+\r
Timestamp startDate = new Timestamp(Long.parseLong(fromDate));\r
Timestamp stopDate = new Timestamp(Long.parseLong(toDate));\r
StatDB statdb = null;\r
\r
log.info("Initializing statistics collector");\r
executor = Executors.newScheduledThreadPool(2);\r
- // FIXME Nullpointer if jobsout is not available?\r
+\r
if (collectClusterStats()) {\r
- // TODO remove work out of the constructor Tomcat takes ages to\r
- // start!\r
+\r
ExecutionStatCollector clusterCollector = new ExecutionStatCollector(\r
clusterWorkDir, clusterMaxRuntime);\r
clustercf = executor.scheduleAtFixedRate(clusterCollector, 60,\r
} else {\r
log.info("Local statistics collector is disabled or not configured! ");\r
}\r
-\r
}\r
\r
static String getClusterJobDir() {\r