<classpathentry kind="lib" path="WEB-INF/lib/jackson-core-asl-1.9.13.jar"/>
<classpathentry kind="lib" path="WEB-INF/lib/jackson-mapper-asl-1.9.13.jar"/>
<classpathentry kind="lib" path="WEB-INF/lib/displaytag-1.2.jar"/>
+ <classpathentry kind="lib" path="WEB-INF/lib/jabaws-core-2.1.0.jar" sourcepath="WEB-INF/lib/jabaws-core-src-2.1.0.jar"/>
<classpathentry kind="lib" path="WEB-INF/lib/jstl-1.2.jar"/>
<classpathentry kind="con" path="org.eclipse.jst.server.core.container/org.eclipse.jst.server.tomcat.runtimeTarget/Apache Tomcat v7.0">
<attributes>
<wb-resource deploy-path="/META-INF" source-path="/META-INF"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/datadb"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/server"/>
+ <wb-resource deploy-path="/WEB-INF/classes" source-path="/engine"/>
+ <wb-resource deploy-path="/WEB-INF/classes" source-path="/log"/>
<property name="java-output-path" value="WEB-INF/classes"/>
<property name="context-root" value="proteocache"/>
</wb-module>
--- /dev/null
+#################################################################################
+# Cassandra host or IP
+# test server is 10.0.115.190
+cassandra.host=localhost
+
+#################################################################################
+# Jpred sources
+cassandra.jpred.web=true
+cassandra.jpred.local=false
\ No newline at end of file
import java.util.ArrayList;
import java.util.Map;
+import org.apache.log4j.Logger;
+
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Host;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.BoundStatement;
+
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.util.PropertyHelper;
+import compbio.util.Util;
public class CassandraNativeConnector {
private static Cluster cluster;
private static Session session;
+ private static final PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
+ private static Logger log = Logger.getLogger(CassandraNativeConnector.class);
+
+ public static String CASSANDRA_HOSTNAME = "localhost";
+ public static boolean READ_WEB_JPRED = false;
+ public static boolean READ_LOCALFILE_JPRED = false;
+
+ private static boolean initBooleanValue(String key) {
+ assert key != null;
+ String status = ph.getProperty(key);
+ log.debug("Loading property: " + key + " with value: " + status);
+ if (Util.isEmpty(status)) {
+ return false;
+ }
+ return new Boolean(status.trim()).booleanValue();
+ }
+
/*
- * connect to the cluster and look weather the dababase has any data inside
+ * connect to the cluster and look whether all tables exist
*/
public void Connect() {
- // local cassandra cluster
- cluster = Cluster.builder().addContactPoint("localhost").build();
- // distributed cassandra cluster
- /* cluster = Cluster.builder().addContactPoint("10.0.115.190").build(); */
+
+ String cassandrahostname = ph.getProperty("cassandra.host");
+ if (null != cassandrahostname) {
+ CASSANDRA_HOSTNAME = cassandrahostname;
+ }
+ READ_WEB_JPRED = initBooleanValue("cassandra.jpred.web");
+ READ_LOCALFILE_JPRED = initBooleanValue("cassandra.jpred.local");
+
+ cluster = Cluster.builder().addContactPoint(CASSANDRA_HOSTNAME).build();
+
Metadata metadata = cluster.getMetadata();
System.out.printf("Connected to cluster: %s\n", metadata.getClusterName());
for (Host host : metadata.getAllHosts()) {
System.out.printf("Datatacenter: %s; Host: %s; Rack: %s\n", host.getDatacenter(), host.getAddress(), host.getRack());
}
-
session = cluster.connect();
+ CreateTables();
+ System.out.println("Cassandra connected");
+ }
+
+ private void CreateTables() {
session.execute("CREATE KEYSPACE IF NOT EXISTS ProteinKeyspace WITH replication = {'class':'SimpleStrategy', 'replication_factor':3};");
- session.execute("CREATE COLUMNFAMILY IF NOT EXISTS ProteinKeyspace.ProteinRow (Protein ascii, JobID ascii, Predictions map<ascii,ascii>, PRIMARY KEY(JobID));");
- session.execute("CREATE COLUMNFAMILY IF NOT EXISTS ProteinKeyspace.ProteinLog "
- + "(JobID ascii, DataBegin ascii, DataEnd ascii, ip ascii, FinalStatus ascii, ExecutionStatus ascii, Protein ascii, PRIMARY KEY(JobID));");
- session.execute("CREATE COLUMNFAMILY IF NOT EXISTS ProteinKeyspace.ProteinData (jobtime bigint, JobID ascii, Protein ascii, PRIMARY KEY(JobID));");
+ session.execute("USE ProteinKeyspace");
- session.execute("CREATE INDEX IF NOT EXISTS ProteinSeq ON ProteinKeyspace.ProteinRow (protein);");
- session.execute("CREATE INDEX IF NOT EXISTS JobDateStamp ON ProteinKeyspace.ProteinData (jobtime);");
+ session.execute("CREATE COLUMNFAMILY IF NOT EXISTS ProteinRow "
+ + "(Protein ascii, JobID ascii, Predictions map<ascii,ascii>, PRIMARY KEY(JobID));");
+ session.execute("CREATE COLUMNFAMILY IF NOT EXISTS ProteinLog "
+ + "(JobID ascii, DataBegin ascii, DataEnd ascii, ip ascii, FinalStatus ascii, "
+ + "ExecutionStatus ascii, Protein ascii, PRIMARY KEY(JobID));");
+ session.execute("CREATE COLUMNFAMILY IF NOT EXISTS ProteinData "
+ + "(jobtime bigint, JobID ascii, Protein ascii, PRIMARY KEY(JobID));");
+ session.execute("CREATE COLUMNFAMILY IF NOT EXISTS JpredArchive "
+ + "(JobID ascii, Protein varchar, IP ascii, StartTime bigint, ExecTime int, alignment map<ascii,ascii>, "
+ + "predictions map<ascii,ascii>, archive blob, LOG varchar, PRIMARY KEY(JobID));");
- System.out.println("Cassandra connected");
+ session.execute("CREATE INDEX IF NOT EXISTS ProteinSeq ON ProteinRow (protein);");
+ session.execute("CREATE INDEX IF NOT EXISTS JobDateStamp ON ProteinData (jobtime);");
}
/*
* parsing data source and filling the database
*/
public void Parsing() throws IOException {
- if (true) {
+ if (READ_WEB_JPRED) {
// if (source.equals("http")) {
// get data from real Jpred production server
System.out.println("Parsing web data source......");
String datasrc = "http://www.compbio.dundee.ac.uk/www-jpred/results/usage-new/alljobs.dat";
String prefix = "http://www.compbio.dundee.ac.uk/www-jpred/results";
JpredParserHTTP parser = new JpredParserHTTP(prefix);
- parser.Parsing(datasrc, 4);
+ parser.Parsing(datasrc, 5);
}
- if (false) {
+ if (READ_LOCALFILE_JPRED) {
// if (source.equals("file")) {
// get irtifical data generated for the DB stress tests
System.out.println("Parsing local file data source......");
System.out.println("Cassandra has been shut down");
}
+ public boolean JobisNotInsterted(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM ProteinLog WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ public boolean JobisNotArchived(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM JpredArchive WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
/*
- * inserting data into the db
+ * inserting data into the tables for queries
*/
- public void InsertData(long jobtime, String startdate, String enddate, String ip, String jobid, String statusEx, String statusFinal,
- String protein, List<FastaSequence> predictions) {
- String check1 = "SELECT * FROM ProteinKeyspace.ProteinLog WHERE JobID = '" + jobid + "';";
- ResultSet results1 = session.execute(check1);
- if (results1.isExhausted()) {
- String com1 = "INSERT INTO ProteinKeyspace.ProteinLog "
- + "(JobID, IP, DataBegin, DataEnd, FinalStatus, ExecutionStatus, Protein)" + " VALUES ('" + jobid + "','" + ip + "','"
- + startdate + "','" + enddate + "','" + statusFinal + "','" + statusEx + "','" + protein + "');";
- session.execute(com1);
- String com2 = "INSERT INTO ProteinKeyspace.ProteinData " + "(jobtime, JobID, Protein)" + " VALUES (" + jobtime + ",'" + jobid
+ public int FormQueryTables(long jobtime, String startdate, String enddate, String ip, String jobid, String statusEx,
+ String statusFinal, String protein, List<FastaSequence> predictions) {
+ if (JobisNotInsterted(jobid)) {
+ String com1 = "INSERT INTO ProteinLog " + "(JobID, IP, DataBegin, DataEnd, FinalStatus, ExecutionStatus, Protein)"
+ + " VALUES ('" + jobid + "','" + ip + "','" + startdate + "','" + enddate + "','" + statusFinal + "','" + statusEx
+ "','" + protein + "');";
+ session.execute(com1);
+
+ String com2 = "INSERT INTO ProteinData " + "(jobtime, JobID, Protein)" + " VALUES (" + jobtime + ",'" + jobid + "','" + protein
+ + "');";
session.execute(com2);
+
String allpredictions = "";
for (FastaSequence pred : predictions) {
String predictionname = pred.getId();
if (null != allpredictions) {
final_prediction = allpredictions.substring(0, allpredictions.length() - 1);
}
- String check2 = "SELECT * FROM ProteinKeyspace.ProteinRow WHERE JobID = '" + jobid + "' ALLOW FILTERING;";
+
+ String check2 = "SELECT * FROM ProteinRow WHERE JobID = '" + jobid + "';";
ResultSet results2 = session.execute(check2);
if (results2.isExhausted()) {
- String com3 = "INSERT INTO ProteinKeyspace.ProteinRow " + "(Protein, JobID, Predictions)" + " VALUES ('"
- + protein + "','" + jobid + "',{" + final_prediction + "});";
+ String com3 = "INSERT INTO ProteinRow " + "(Protein, JobID, Predictions)" + " VALUES ('" + protein + "','" + jobid + "',{"
+ + final_prediction + "});";
session.execute(com3);
}
- String check3 = "SELECT * FROM ProteinKeyspace.ProteinRow WHERE JobID = '" + jobid + "';";
+ return 1;
}
+ return 0;
+ }
+
+ /*
+ * insert data from a real Jpred job: timing+IP, Execution Status, Final
+ * status, protein sequence, predictions, alignment, LOG and tar.gz files
+ */
+ public int ArchiveData(long starttime, long exectime, String ip, String jobid, String statusEx, String statusFinal, String protein,
+ List<FastaSequence> predictions, List<FastaSequence> seqs, String LogFile, String archivepath) {
+ if (JobisNotArchived(jobid)) {
+ String log = LogFile.replaceAll("'", "");
+ session.execute("INSERT INTO JpredArchive (JobID, Protein, IP, StartTime, ExecTime,LOG) VALUES ('" + jobid + "','" + protein
+ + "','" + ip + "'," + starttime + "," + exectime + ",'" + log + "');");
+ if (false) {
+ PreparedStatement statement = session.prepare("INSERT INTO JpredArchive (JobID, archive) VALUES (?,?);");
+ BoundStatement boundStatement = new BoundStatement(statement);
+ session.execute(boundStatement.bind(jobid, archivepath));
+ }
+
+ for (FastaSequence p : predictions) {
+ session.execute("UPDATE JpredArchive SET predictions = predictions + {'" + p.getId() + "':'"
+ + p.getSequence().replaceAll("\n", "") + "'} WHERE JobID = '" + jobid + "';");
+ }
+
+ for (FastaSequence s : seqs) {
+ session.execute("UPDATE JpredArchive SET alignment = alignment + {'" + s.getId() + "':'"
+ + s.getSequence().replaceAll("\n", "") + "'} WHERE JobID = '" + jobid + "';");
+ }
+ return 1;
+ }
+ return 0;
}
/*
*/
public List<Pair<String, String>> ReadProteinDataTable() {
final long startTime = System.currentTimeMillis();
- String com = "SELECT DataBegin,DataEnd FROM ProteinKeyspace.ProteinLog;";
+ String com = "SELECT DataBegin,DataEnd FROM ProteinLog;";
System.out.println("Command: " + com);
ResultSet results = session.execute(com);
final long queryTime = System.currentTimeMillis();
List<Row> rows = results.all();
- System.out.println ("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
List<Pair<String, String>> res = new ArrayList<Pair<String, String>>();
int c = 0;
for (Row r : rows) {
- Pair<String, String> pair = new Pair<String, String>(r.getString("DataBegin"),r.getString("DataEnd"));
+ Pair<String, String> pair = new Pair<String, String>(r.getString("DataBegin"), r.getString("DataEnd"));
res.add(pair);
++c;
}
final long endTime = System.currentTimeMillis();
- System.out.println (c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
return res;
}
-
+
/*
* getting data from the db ProteinData
*/
public Integer ReadDateTable(long queryDate) {
final long startTime = System.currentTimeMillis();
- String com = "SELECT jobtime, JobID FROM ProteinKeyspace.ProteinData WHERE jobtime = " + queryDate + ";";
+ String com = "SELECT jobtime, JobID FROM ProteinData WHERE jobtime = " + queryDate + ";";
System.out.println("Command: " + com);
ResultSet results = session.execute(com);
- if (results.isExhausted())
- return null;
final long queryTime = System.currentTimeMillis();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ if (results.isExhausted())
+ return 0;
List<Row> rows = results.all();
- System.out.println ("Query time is " + (queryTime - startTime) + " msec");
+ final long endTime = System.currentTimeMillis();
+ System.out.println("Processing time is " + (endTime - queryTime) + " msec");
return rows.size();
}
*/
public List<StructureProteinPrediction> ReadWholeSequence(String queryProtein) {
final long startTime = System.currentTimeMillis();
- String com = "SELECT JobID, Predictions FROM ProteinKeyspace.ProteinRow WHERE Protein = '" + queryProtein + "';";
+ String com = "SELECT JobID, Predictions FROM ProteinRow WHERE Protein = '" + queryProtein + "';";
System.out.println("Command: " + com);
ResultSet results = session.execute(com);
if (results.isExhausted())
return null;
final long queryTime = System.currentTimeMillis();
List<Row> rows = results.all();
- System.out.println ("Query time is " + (queryTime - startTime) + " msec");
- System.out.println (" rows analysed, " + rows.size());
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println(" rows analysed, " + rows.size());
List<StructureProteinPrediction> res = new ArrayList<StructureProteinPrediction>();
int c = 0;
for (Row r : rows) {
- StructureProteinPrediction structure = new StructureProteinPrediction(queryProtein, r.getString("JobID"), r.getMap("Predictions", String.class, String.class));
+ StructureProteinPrediction structure = new StructureProteinPrediction(queryProtein, r.getString("JobID"), r.getMap(
+ "Predictions", String.class, String.class));
res.add(structure);
++c;
}
final long endTime = System.currentTimeMillis();
- System.out.println (c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
return res;
}
-
+
/*
* getting part of protein sequence from the db ProteinRow
*/
- public List<StructureProteinPrediction> ReadPartOfSequence(String queryProtein) {
+ public List<StructureProteinPrediction> ReadPartOfSequence(String queryProtein) {
final long startTime = System.currentTimeMillis();
- String com = "SELECT * FROM ProteinKeyspace.ProteinRow;";
+ String com = "SELECT * FROM ProteinRow;";
System.out.println("Command: " + com);
ResultSet results = session.execute(com);
if (results.isExhausted())
return null;
final long queryTime = System.currentTimeMillis();
List<Row> rows = results.all();
- System.out.println ("Query time is " + (queryTime - startTime) + " msec");
- System.out.println (" rows analysed, " + rows.size());
- List<StructureProteinPrediction> res = new ArrayList<StructureProteinPrediction>();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println(" rows analysed, " + rows.size());
+ List<StructureProteinPrediction> res = new ArrayList<StructureProteinPrediction>();
int c = 0;
for (Row r : rows) {
String prot = r.getString("Protein");
if (prot.matches("(.*)" + queryProtein + "(.*)")) {
- // System.out.println(prot);
- StructureProteinPrediction structure = new StructureProteinPrediction(prot, r.getString("JobID"), r.getMap("Predictions", String.class, String.class));
+ StructureProteinPrediction structure = new StructureProteinPrediction(prot, r.getString("JobID"), r.getMap("Predictions",
+ String.class, String.class));
res.add(structure);
++c;
}
}
final long endTime = System.currentTimeMillis();
- System.out.println (c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
return res;
}
-
+
/*
* getting protein sequences by counter
*/
- public Map<String, Integer> ReadProteinDataByCounter() {
+ public Map<String, Integer> ReadProteinDataByCounter() {
final long startTime = System.currentTimeMillis();
- String com = "SELECT Protein FROM ProteinKeyspace.ProteinRow;";
+ String com = "SELECT Protein FROM ProteinRow;";
System.out.println("Command: " + com);
ResultSet results = session.execute(com);
if (results.isExhausted())
return null;
final long queryTime = System.currentTimeMillis();
List<Row> rows = results.all();
- System.out.println ("Query time is " + (queryTime - startTime) + " msec");
- System.out.println (" rows analysed, " + rows.size());
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println(" rows analysed, " + rows.size());
Map<String, Integer> res = new HashMap<String, Integer>();
int c = 0;
for (Row r : rows) {
String protein = r.getString("Protein");
- if (res.containsKey(protein))
+ if (res.containsKey(protein))
res.put(protein, res.get(protein) + 1);
else
res.put(protein, 1);
}
final long endTime = System.currentTimeMillis();
- System.out.println (c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
return res;
}
-
-
+
/*
* getting protein sequences by counter
*/
if (results1.isExhausted())
return null;
Row row1 = results1.one();
- StructureJobLog res = new StructureJobLog(row.getString("Protein"), row.getString("JobID"), row.getString("DataBegin"), row.getString("DataEnd"), row.getString("ip"), row1.getMap("Predictions", String.class, String.class));
- System.out.println ("Query time is " + (queryTime - startTime) + " msec");
+ StructureJobLog res = new StructureJobLog(row.getString("Protein"), row.getString("JobID"), row.getString("DataBegin"),
+ row.getString("DataEnd"), row.getString("ip"), row1.getMap("Predictions", String.class, String.class));
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
final long endTime = System.currentTimeMillis();
- System.out.println (" rows analysed, execution time is " + (endTime - startTime) + " msec");
+ System.out.println(" rows analysed, execution time is " + (endTime - startTime) + " msec");
return res;
}
-
+
/*
* getting earlest date of jobs from the db
*/
public long getEarliestDateInDB() {
final long startTime = System.currentTimeMillis();
- String com = "SELECT jobtime,JobID FROM ProteinKeyspace.ProteinData;";
+ String com = "SELECT jobtime,JobID FROM ProteinData;";
System.out.println("Command: " + com);
ResultSet results = session.execute(com);
final long queryTime = System.currentTimeMillis();
- System.out.println ("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
Calendar cal = Calendar.getInstance();
long res = cal.getTimeInMillis();
++c;
}
final long endTime = System.currentTimeMillis();
- System.out.println (c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
return res;
}
-
+
}
package compbio.cassandra;
import java.io.BufferedReader;
+import java.io.DataInputStream;
+import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
public class JpredParserHTTP implements JpredParser {
private CassandraNativeConnector cc = new CassandraNativeConnector();
private String dirprefix;
+ private List<FastaSequence> alignment;
+ private List<FastaSequence> predictions;
+ private String jnetpred;
JpredParserHTTP() {
dirprefix = "http://www.compbio.dundee.ac.uk/www-jpred/results";
}
}
- private int ParsingForDate(String input, String date) {
+ private String parsePredictions(final InputStream stream, String jobid) throws FileNotFoundException {
+ final FastaReader fr = new FastaReader(stream);
+ String query = "";
+ alignment = new ArrayList<FastaSequence>();
+ predictions = new ArrayList<FastaSequence>();
+ while (fr.hasNext()) {
+ final FastaSequence fs = fr.next();
+ String seqid = fs.getId();
+ String seq = fs.getSequence().replaceAll("\n", "");
+ if (seqid.equals("QUERY") || seqid.equals(jobid)) {
+ query = seq;
+ alignment.add(fs);
+ } else if (seqid.equals("jnetpred") || seqid.equals("Lupas_21") || seqid.equals("Lupas_14") || seqid.equals("Lupas_28")
+ || seqid.equals("JNETSOL25") || seqid.equals("JNETSOL5") || seqid.equals("JNETSOL0") || seqid.equals("JNETCONF")
+ || seqid.equals("JNETHMM") || seqid.equals("JNETPSSM")) {
+ predictions.add(fs);
+ if (seqid.equals("jnetpred"))
+ jnetpred = seq;
+ } else {
+ alignment.add(fs);
+ }
+ }
+ return query;
+ }
+
+ private String parseLogFile(final InputStream stream) throws IOException {
+ String out = "";
+ BufferedReader buffer = new BufferedReader(new InputStreamReader(stream));
+ String line;
+ while (null != (line = buffer.readLine())) {
+ out += line;
+ }
+ return out;
+ }
+
+ private List<Byte> parseArchiveFile(final InputStream stream) throws IOException {
+ DataInputStream data_in = new DataInputStream(stream);
+ List<Byte> out = new ArrayList<Byte>();
+ while (true) {
+ try {
+ out.add(data_in.readByte());
+ } catch (EOFException eof) {
+ break;
+ }
+ }
+ return out;
+ }
+
+ private void ParsingForDate(String input, String date) {
int totalcount = 0;
int countNoData = 0;
int countUnclearFASTAid = 0;
int countinserted = 0;
int counAlignments = 0;
int countStrange = 0;
- int njobs = 0;
System.out.println("Inserting jobs for " + date);
try {
String line;
while ((line = alljobs.readLine()) != null) {
- if (line.matches(date + "(.*)jp_[^\\s]+")) {
+ if (line.matches(date + ":(.*)jp_[^\\s]+")) {
String[] table = line.split("\\s+");
// Format of a record:
// starttime endtime ip email jobid (directory)
- // 013/10/25:21:55:7 2013/10/25:21:59:13 201.239.98.172 unknown_email jp_J9HBCBT
+ // 013/10/25:21:55:7 2013/10/25:21:59:13 201.239.98.172
+ // unknown_email jp_J9HBCBT
String id = table[table.length - 1];
totalcount++;
- String datalink = dirprefix + "/" + id + "/" + id + ".concise.fasta";
- URL urltable = new URL(datalink);
- HttpURLConnection httpConnection = (HttpURLConnection) urltable.openConnection();
- int responsecode = httpConnection.getResponseCode();
- if (199 < responsecode && responsecode < 300) {
- try {
- final FastaReader fr = new FastaReader(urltable.openStream());
- final List<FastaSequence> seqs = new ArrayList<FastaSequence>();
- String newprotein = "";
- while (fr.hasNext()) {
- final FastaSequence fs = fr.next();
- if (fs.getId().equals("QUERY") || fs.getId().equals(id))
- newprotein = fs.getSequence().replaceAll("\n", "");
- else if (fs.getId().equals("jnetpred") || fs.getId().equals("JNETPRED")) {
- seqs.add(fs);
- }
- }
- if (newprotein.equals("")) {
- countUnclearFASTAid++;
- } else {
- SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd");
- String dateInString1 = table[0].substring(0, table[0].indexOf(":"));
- long dateWork1 = 0;
- try {
- Date dat1 = formatter.parse(dateInString1);
- dateWork1 = dat1.getTime();
- } catch (ParseException e) {
- e.printStackTrace();
+ if (cc.JobisNotInsterted(id)) {
+ URL dataurl = new URL(dirprefix + "/" + id + "/" + id + ".concise.fasta");
+ URL archiveurl = new URL(dirprefix + "/" + id + "/" + id + ".tar.gz");
+ URL logurl = new URL(dirprefix + "/" + id + "/LOG");
+ HttpURLConnection httpConnection1 = (HttpURLConnection) dataurl.openConnection();
+ HttpURLConnection httpConnection2 = (HttpURLConnection) logurl.openConnection();
+ HttpURLConnection httpConnection3 = (HttpURLConnection) archiveurl.openConnection();
+ int response1 = httpConnection1.getResponseCode();
+ int response2 = httpConnection2.getResponseCode();
+ if (199 < response1 && response1 < 300) {
+ try {
+ String protein = parsePredictions(dataurl.openStream(), id);
+ if (protein.equals("")) {
+ countUnclearFASTAid++;
+ } else {
+ SimpleDateFormat dateformatter = new SimpleDateFormat("yyyy/MM/dd");
+ SimpleDateFormat timeformatter = new SimpleDateFormat("yyyy/MM/dd:H:m:s");
+ String startdatestring = table[0].substring(0, table[0].indexOf(":"));
+ try {
+ Date startdate = dateformatter.parse(startdatestring);
+ Date starttime = timeformatter.parse(table[0]);
+ Date endtime = timeformatter.parse(table[1]);
+ String ip = table[2];
+ String execstatus = "OK";
+ String finalstatus = "OK";
+ countinsertions += cc.FormQueryTables(startdate.getTime(), table[0], table[1], ip, id, execstatus,
+ finalstatus, protein, predictions);
+
+ long exectime = (endtime.getTime() - starttime.getTime()) / 1000;
+ String log = "";
+ if (199 < response2 && response2 < 300) {
+ log = parseLogFile(logurl.openStream());
+ }
+ cc.ArchiveData(startdate.getTime(), exectime, ip, id, execstatus, finalstatus, protein,
+ predictions, alignment, log, archiveurl.toString());
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
}
- cc.InsertData(dateWork1, table[0], table[1], table[2], id, "OK", "OK", newprotein, seqs);
- ++countinsertions;
- ++njobs;
- // flush every 50 insertions
- // if (0 == countinsertions % 50) {
- // cc.flushData();
- // njobs -= 50;
- // }
+ } catch (IOException e) {
+ e.printStackTrace();
}
- } catch (IOException e) {
- e.printStackTrace();
+ } else {
+ countNoData++;
}
+ httpConnection1.disconnect();
+ httpConnection2.disconnect();
+ httpConnection3.disconnect();
} else {
- countNoData++;
+ ++countinserted;
}
} else {
if (line.matches(date + "(.*)Sequence0/(.*)")) {
} catch (IOException e) {
e.printStackTrace();
}
- return njobs;
+ ;
}
}
System.out.println("Execution Time = " + execTime + " ms");
}
- private int ParsingForDate(List<String> input, String date) {
+ private void ParsingForDate(List<String> input, String date) {
int totalcount = 0;
int countNoData = 0;
int countUnclearFASTAid = 0;
int countinserted = 0;
int counAlignments = 0;
int countStrange = 0;
- int njobs = 0;
System.out.println("Inserting jobs for " + date);
for (String in : input) {
- if (in.matches(date + "(.*)jp_[^\\s]+")) {
+ if (in.matches(date + ":(.*)jp_[^\\s]+")) {
String[] table = in.split("\\s+");
String starttime = table[0];
String finishtime = table[1];
} catch (ParseException e) {
e.printStackTrace();
}
- cc.InsertData(insertdate, starttime, finishtime, ip, id, "OK", "OK", newprotein, seqs);
- ++countinsertions;
- ++njobs;
+ countinsertions += cc.FormQueryTables(insertdate, starttime, finishtime, ip, id, "OK", "OK", newprotein, seqs);
}
fr.close();
} catch (IOException e) {
System.out.println(" " + countUnclearFASTAid + " jobs with unclear FASTA protein id in *.concise.fasta");
System.out.println(" " + countinsertions + " new job insertions\n");
}
- return njobs;
}
}
--- /dev/null
+/* Copyright (c) 2009 Peter Troshin\r
+ * \r
+ * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 1.0 \r
+ * \r
+ * This library is free software; you can redistribute it and/or modify it under the terms of the\r
+ * Apache License version 2 as published by the Apache Software Foundation\r
+ * \r
+ * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without\r
+ * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache \r
+ * License for more details.\r
+ * \r
+ * A copy of the license is in apache_license.txt. It is also available here:\r
+ * @see: http://www.apache.org/licenses/LICENSE-2.0.txt\r
+ * \r
+ * Any republication or derived work distributed in source code form\r
+ * must include this copyright and license notice.\r
+ */\r
+\r
+package compbio.engine;\r
+\r
+import java.io.File;\r
+import java.io.IOException;\r
+import java.net.URISyntaxException;\r
+import java.net.URL;\r
+\r
+import org.apache.log4j.Logger;\r
+\r
+import compbio.util.PropertyHelper;\r
+import compbio.util.Util;\r
+\r
+public final class ProteoCachePropertyHelperManager {\r
+\r
+ private static Logger log = Logger.getLogger(ProteoCachePropertyHelperManager.class);\r
+ private static PropertyHelper ph = null;\r
+\r
+ /**\r
+ * Ways to fix path problem: \r
+ * 1) find a path to WEB-INF directory based on the path to a known class. \r
+ * Then prepend this absolute path to the rest of paths \r
+ * pros: no input from user \r
+ * cons: relocation of the source may cause problems \r
+ * \r
+ * 2) Require users to add configuration directories to the class\r
+ * path and then load entries from it. \r
+ * pros: \r
+ * cons: Many paths needs to be added. Put significant burden on the user. \r
+ * Hard to tell web appl server to add these entries to its class path. \r
+ * \r
+ * 3) Ask for project source directory explicitly in the configuration. \r
+ * pros:\r
+ * cons: similar to 1, but this initial configuration file must reside in \r
+ * well known location! Why ask users what can be found automatically?\r
+ * \r
+ * 4) Have everything in the location already in class path for tomcat. \r
+ * pros:\r
+ * cons: only classes and lib/*.jar are added, Eclipse will remove non \r
+ * classses from classes dir.\r
+ * \r
+ * Try 1 - succeed.\r
+ * \r
+ * @return an instance\r
+ */\r
+ public static PropertyHelper getPropertyHelper() {\r
+ if (ph == null) {\r
+ try {\r
+ File MainPropertyFile = getResourceFromClasspath("conf/Proteocache.properties");\r
+ ph = new PropertyHelper(MainPropertyFile);\r
+ } catch (IOException e) {\r
+ log.warn("Cannot read property files! Reason: " + e.getLocalizedMessage(), e.getCause());\r
+ }\r
+ }\r
+ return ph;\r
+ }\r
+\r
+ static File getResourceFromClasspath(String resourceName) {\r
+ assert !Util.isEmpty(resourceName);\r
+ String locPath = getLocalPath();\r
+ File prop = new File(locPath + resourceName);\r
+ if (!prop.exists()) {\r
+ log.warn("Could not find a resource " + resourceName + " in the classpath!");\r
+ }\r
+ return prop;\r
+ }\r
+\r
+ /**\r
+ * Method return the absolute path to the project root directory. It assumes\r
+ * the following structure of the project:\r
+ * project-root: \r
+ * conf/settings\r
+ * binaries \r
+ * WEB-INF/classes/compbio/engine/conf/PropertyHelperManager.class\r
+ * If the structure changes it must be reflected in this method\r
+ * \r
+ * @return the local path\r
+ * @throws RuntimeException\r
+ * if cannot determine the local path\r
+ */\r
+ public static String getLocalPath() {\r
+ String clname = ProteoCachePropertyHelperManager.class.getSimpleName();\r
+ URL url = ProteoCachePropertyHelperManager.class.getResource(clname + ".class");\r
+ File f = null;\r
+ try {\r
+ f = new File(url.toURI());\r
+ // Iterate up the hierarchy to find a root project directory\r
+ for (int i = 0; i < 5; i++) {\r
+ f = f.getParentFile();\r
+ }\r
+ } catch (URISyntaxException e) {\r
+ String mes = "Could not find resources path! Problems locating PropertyHelperManager class! ";\r
+ log.error(mes + e.getLocalizedMessage(), e.getCause());\r
+ throw new RuntimeException(mes + e.getLocalizedMessage(), e.getCause());\r
+ } catch (IllegalArgumentException e) {\r
+ // Classes are in the jar file, using different method to determine\r
+ // the path new File(INCORRECT URL) throws it\r
+ String mes = "It looks like classes are in the jar file. " \r
+ + "Attempting a different method to determinine the path to the resources";\r
+ log.debug(mes + e.getLocalizedMessage(), e.getCause());\r
+ try {\r
+ f = new File(ProteoCachePropertyHelperManager.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath());\r
+\r
+ // Iterate up the hierarchy to find a root project directory\r
+ // This time there is not need to walk up all class packages\r
+ // WEB_APPL_NAME\WEB-INF\lib\JAR-FILE-NAME\r
+ // jws2-1.0\WEB-INF\lib\full-jws2-1.0.jar\r
+ for (int i = 0; i < 3; i++) {\r
+ f = f.getParentFile();\r
+ }\r
+ } catch (URISyntaxException e1) {\r
+ log.error("Could not find resources path! " + e1.getLocalizedMessage(), e1.getCause());\r
+ throw new RuntimeException("Could not find resources path! ", e1.getCause());\r
+ }\r
+ }\r
+ log.debug("Project directory is: " + f.getAbsolutePath());\r
+ return f.getAbsolutePath() + File.separator;\r
+ }\r
+\r
+ public static int getIntProperty(String propValue) {\r
+ if (!Util.isEmpty(propValue)) {\r
+ return Integer.parseInt(propValue.trim());\r
+ }\r
+ return -1;\r
+ }\r
+\r
+ public static boolean getBooleanProperty(String propValue) {\r
+ if (!Util.isEmpty(propValue)) {\r
+ propValue = propValue.trim();\r
+ return Boolean.parseBoolean(propValue);\r
+ }\r
+ return false;\r
+ }}\r
-\r
-## CHANGE THIS (The root directory where to store all the log files) \r
-#logDir = .\r
-\r
-## Uncomment to enable JWS2 activity logging to standard out (to the console if available)\r
-## for possible log levels please refer to Log4j documentation http://logging.apache.org/log4j/1.2/manual.html \r
+## Uncomment to enable ProteoCache activity logging\r
## Valid log levels are:\r
## TRACE - log everything from below including very detailed messages (useful for debugging only)\r
## DEBUG - log everything from below including some minor events (useful for debugging only)\r
#log4j.appender.rootfile.layout.ConversionPattern=%t %d %m%n\r
\r
## Uncomment to enable JWS2 activity logging to the file\r
+#logDir = .\r
#log4j.logger.compbio=TRACE, ACTIVITY\r
#log4j.appender.ACTIVITY=org.apache.log4j.RollingFileAppender\r
################log4j.appender.ACTIVITY.File=${logDir}/activity.log\r
#log4j.appender.C.layout.ConversionPattern=%m%n\r
\r
##################################################################################################################################\r
-# log4j.properties from runner/\r
-\r
#log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
#log4j.appender.stdout.Target=System.out\r
#log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
\r
log4j.rootLogger=ERROR, R\r
log4j.appender.R=org.apache.log4j.FileAppender\r
-log4j.appender.R.File=${catalina.base}/logs/JABAWSErrorFile.log\r
+log4j.appender.R.File=${catalina.base}/logs/errors.log\r
log4j.appender.R.layout=org.apache.log4j.PatternLayout\r
log4j.appender.R.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
\r
# %d{ABSOLUTE} %5p %c{1}:%L -\r
-log4j.logger.compbio=WARN, B\r
+log4j.logger.compbio=DEBIG, B\r
log4j.appender.B=org.apache.log4j.FileAppender\r
-log4j.appender.B.File=${catalina.base}/logs/engine.log\r
+log4j.appender.B.File=${catalina.base}/logs/debugging.log\r
log4j.appender.B.layout=org.apache.log4j.PatternLayout\r
log4j.appender.B.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
\r
#log4j.appender.C.layout=org.apache.log4j.PatternLayout\r
#log4j.appender.C.layout.ConversionPattern=%m%n\r
# %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
-\r
-#log4j.logger.RunnerLogger=ERROR, D\r
-#log4j.appender.D=org.apache.log4j.ConsoleAppender \r
-#log4j.appender.D.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.D.layout.ConversionPattern=%r [%t] %p %c %x - %m%n\r
+++ /dev/null
-\r
-# change this \r
-logDir =/homes/www-protcache/logs\r
-\r
-log4j.rootLogger=ERROR, stdout\r
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
-log4j.appender.stdout.Target=System.out\r
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.stdout.layout.ConversionPattern=%m%n\r
-\r
-log4j.logger.compbio=TRACE, ACTIVITY\r
-log4j.appender.ACTIVITY=org.apache.log4j.RollingFileAppender\r
-log4j.appender.ACTIVITY.File=${logDir}/activity.log\r
-log4j.appender.ACTIVITY.MaxFileSize=10MB\r
-log4j.appender.ACTIVITY.MaxBackupIndex=10000\r
-log4j.appender.ACTIVITY.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.ACTIVITY.layout.ConversionPattern=%d{MM-dd@HH:mm:ss} %-5p %3x - %m%n\r
-\r
-log4j.logger.ClustalWS-stats=INFO, STAT\r
-log4j.logger.MuscleWS-stats=INFO, STAT\r
-log4j.logger.TcoffeeWS-stats=INFO, STAT\r
-log4j.logger.MafftWS-stats=INFO, STAT\r
-log4j.logger.ProbconsWS-stats=INFO, STAT\r
-log4j.appender.STAT=org.apache.log4j.RollingFileAppender\r
-log4j.appender.STAT.File=${logDir}/wsaccess.log\r
-log4j.appender.STAT.MaxFileSize=10MB\r
-log4j.appender.STAT.MaxBackupIndex=10000\r
-log4j.appender.STAT.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.STAT.layout.ConversionPattern=%t %d %m%n\r
-\r
-\r
-# Uncomment for separate local engine execution log\r
-#log4j.logger.compbio.engine.local.LocalExecutorService=INFO, C\r
-#log4j.appender.C=org.apache.log4j.FileAppender\r
-#log4j.appender.C.File=LocalExecutorService.log\r
-#log4j.appender.C.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.C.layout.ConversionPattern=%m%n\r
-\r
-#Parameter combinator logger (testing only)\r
-#log4j.logger.RunnerLogger=ERROR, RUNNER\r
-#log4j.appender.RUNNER=org.apache.log4j.FileAppender\r
-#log4j.appender.RUNNER.File=RunnerParam.log \r
-#log4j.appender.RUNNER.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.RUNNER.layout.ConversionPattern=%r [%t] %p %c %x - %m%n\r
-\r
-\r
-#Not used - collection of patterns\r
-# %d{ABSOLUTE} %5p %c{1}:%L -\r
-#log4j.appender.C.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n\r
-# %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
-#log4j.appender.ClustalWS-stats.layout.ConversionPattern=%p %d %t %C{4} %m%n\r
+++ /dev/null
-\r
-## CHANGE THIS (The root directory where to store all the log files) \r
-#logDir = .\r
-\r
-## Uncomment to enable JWS2 activity logging to standard out (to the console if available)\r
-## for possible log levels please refer to Log4j documentation http://logging.apache.org/log4j/1.2/manual.html \r
-## Valid log levels are:\r
-## TRACE - log everything from below including very detailed messages (useful for debugging only)\r
-## DEBUG - log everything from below including some minor events (useful for debugging only)\r
-## INFO - log everything from below including some information messages\r
-## WARN - log error and warnings\r
-## ERROR - log errors and fatal events only \r
-## FATAL - log fatal events only\r
-\r
-#log4j.rootLogger=ERROR, stdout\r
-#log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
-#log4j.appender.stdout.Target=System.out\r
-#log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.stdout.layout.ConversionPattern=%m%n\r
-\r
-## Uncomment to enable JWS2 activity logging to the file\r
-#log4j.logger.compbio=ERROR, ACTIVITY\r
-#log4j.appender.ACTIVITY=org.apache.log4j.RollingFileAppender\r
-#log4j.appender.ACTIVITY.File=${logDir}/activity.log\r
-#log4j.appender.ACTIVITY.MaxFileSize=10MB\r
-#log4j.appender.ACTIVITY.MaxBackupIndex=10000\r
-#log4j.appender.ACTIVITY.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.ACTIVITY.layout.ConversionPattern=%d{MM-dd@HH:mm:ss} %-5p %3x - %m%n\r
-\r
-## Uncomment for web access logging. Please do not change the log level!\r
-#log4j.logger.ClustalWS-stats=INFO, STAT\r
-#log4j.logger.MuscleWS-stats=INFO, STAT\r
-#log4j.logger.TcoffeeWS-stats=INFO, STAT\r
-#log4j.logger.MafftWS-stats=INFO, STAT\r
-#log4j.logger.ProbconsWS-stats=INFO, STAT\r
-#log4j.appender.STAT=org.apache.log4j.RollingFileAppender\r
-#log4j.appender.STAT.File=${logDir}/wsaccess.log\r
-#log4j.appender.STAT.MaxFileSize=10MB\r
-#log4j.appender.STAT.MaxBackupIndex=10000\r
-#log4j.appender.STAT.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.STAT.layout.ConversionPattern=%t %d %m%n\r
-\r
-## Uncomment for separate local engine execution log (debugging only)\r
-#log4j.logger.compbio.engine.local.LocalExecutorService=INFO, C\r
-#log4j.appender.C=org.apache.log4j.FileAppender\r
-#log4j.appender.C.File=LocalExecutorService.log\r
-#log4j.appender.C.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.C.layout.ConversionPattern=%m%n\r
-\r
-## Parameter combinator logger (testing only)\r
-#log4j.logger.RunnerLogger=ERROR, RUNNER\r
-#log4j.appender.RUNNER=org.apache.log4j.FileAppender\r
-#log4j.appender.RUNNER.File=RunnerParam.log \r
-#log4j.appender.RUNNER.layout=org.apache.log4j.PatternLayout\r
-#log4j.appender.RUNNER.layout.ConversionPattern=%r [%t] %p %c %x - %m%n\r
-\r
-\r
-## NOT IN USE - collection of patterns choose the one that suits you\r
-# %d{ABSOLUTE} %5p %c{1}:%L -\r
-#log4j.appender.C.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n\r
-# %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
-#log4j.appender.ClustalWS-stats.layout.ConversionPattern=%p %d %t %C{4} %m%n\r
+++ /dev/null
-\r
-## CHANGE THIS (The root directory where to store all the log files) \r
-#logDir = .\r
-\r
-## Uncomment to enable JWS2 activity logging to standard out (to the console if available)\r
-## for possible log levels please refer to Log4j documentation http://logging.apache.org/log4j/1.2/manual.html \r
-## Valid log levels are:\r
-## TRACE - log everything from below including very detailed messages (useful for debugging only)\r
-## DEBUG - log everything from below including some minor events (useful for debugging only)\r
-## INFO - log everything from below including some information messages\r
-## WARN - log error and warnings\r
-## ERROR - log errors and fatal events only \r
-## FATAL - log fatal events only\r
-\r
-##################################################################################################################################\r
-log4j.rootLogger=ERROR, R\r
-log4j.appender.R=org.apache.log4j.FileAppender\r
-log4j.appender.R.File=logs/StatDBcleaningErrorFile.log\r
-log4j.appender.R.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.R.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
-\r
-# %d{ABSOLUTE} %5p %c{1}:%L -\r
-log4j.logger.compbio=WARN, B\r
-log4j.appender.B=org.apache.log4j.FileAppender\r
-log4j.appender.B.File=logs/StatDBcleaningWarningFile.log\r
-log4j.appender.B.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.B.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
import javax.servlet.annotation.WebListener;
import compbio.cassandra.CassandraNativeConnector;
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.util.PropertyHelper;
/**
* Application Lifecycle Listener implementation class ContextListener
public class ContextListener implements ServletContextListener {
private ScheduledExecutorService webjob_scheduler;
CassandraNativeConnector db = new CassandraNativeConnector();
+ static PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
/**
* @see ServletContextListener#contextInitialized(ServletContextEvent)
--- /dev/null
+<%@page import="java.util.ArrayList"%>
+<%@ page trimDirectiveWhitespaces="true"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt"%>
+<%@ taglib uri="http://displaytag.sf.net" prefix="dt"%>
+<c:choose>
+ <c:when test="${flag == 'AllDate'}">
+ <h3>Time execution for the whole period</h3>
+ </c:when>
+ <c:otherwise>
+ <h3>Time execution for the interval: ${data1} - ${data2}</h3>
+ </c:otherwise>
+</c:choose>
+<h3>Time execution: ${timeExecution} ms</h3>
+
+<c:set var="sum" value="0" />
+<c:forEach items="${result}" var="res" varStatus="loop">
+ <c:choose>
+ <c:when test="${loop.last}">
+ <c:forEach items="${res.timeTotalExec}" var="total">
+ <c:set var="sum" value="${sum + total}" />
+ </c:forEach>
+ </c:when>
+ </c:choose>
+</c:forEach>
+
+<h3>Total number of jobs: ${sum}</h3>
+
+<table border="1" style="border-collapse: collapse; white-space: nowrap">
+ <thead>
+ <tr>
+ <th style="text-align: centre; width: 150px">Date</th>
+ <th style="text-align: centre; width: 150px">less then 30 s</th>
+ <th style="text-align: centre; width: 150px">30 s - 60 s</th>
+ <th style="text-align: centre; width: 150px">1 min - 2 min</th>
+ <th style="text-align: centre; width: 150px">2 min - 10 min</th>
+ <th style="text-align: centre; width: 150px">more then 10 min</th>
+ </tr>
+ </thead>
+ <tbody>
+ <c:forEach items="${result}" var="res" varStatus="loop">
+ <c:choose>
+ <c:when test="${loop.last}">
+ <tr style="font-weight: bolder;">
+ <td>Total:</td>
+ <c:forEach items="${res.timeTotalExec}" var="total">
+ <td style="text-align: right">${total}</td>
+ </c:forEach>
+ </tr>
+ </c:when>
+ </c:choose>
+ </c:forEach>
+
+ <c:forEach items="${result}" var="res" varStatus="loop">
+ <c:choose>
+ <c:when test="${not loop.last}">
+ <tr>
+ <td>${res.date}</td>
+ <c:forEach items="${res.timeRez}" var="time">
+ <td style="text-align: right">${time}</td>
+ </c:forEach>
+ </tr>
+ </c:when>
+ </c:choose>
+ </c:forEach>
+ </tbody>
+</table>
--- /dev/null
+<%@page import="java.util.ArrayList"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt"%>
+<%@ taglib uri="http://displaytag.sf.net" prefix="dt"%>
+<c:choose>
+ <c:when test="${flag == 'AllDate'}">
+ <h3>Jobs statistics for the whole period</h3>
+ </c:when>
+ <c:otherwise>
+ <h3>
+ Jobs statistics for the Period:
+ <c:out value="${data1}" />
+ to
+ <c:out value="${data2}" />
+ </h3>
+ </c:otherwise>
+</c:choose>
+<h3>Time execution: ${timeExecution} ms</h3>
+
+<c:set var="sum" value="0" />
+<c:forEach items="${result}" var="res">
+ <c:set var="tot" value="${res.total}" />
+ <c:set var="sum" value="${sum + tot}" />
+</c:forEach>
+
+
+<table border="1" style="border-collapse: collapse; white-space: nowrap">
+ <thead>
+ <tr>
+ <th rowspan="2" style="text-align: centre">Date</th>
+ <th colspan="4" style="text-align: centre">Number of Proteins</th>
+ </tr>
+ <tr>
+ <th style="text-align: centre">Total</th>
+ <th style="text-align: centre">Failed</th>
+ <th style="text-align: centre">Cancelled</th>
+ <th style="text-align: centre">Abandoned</th>
+ </tr>
+ </thead>
+ <tbody>
+
+ <tr style="font-weight: bolder;">
+ <td>Total:</td>
+ <td style="text-align: right">${sum}</td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ </tr>
+
+ <c:set var="sum" value="0" />
+ <c:forEach items="${result}" var="res">
+ <tr>
+ <td>${res.date}</td>
+ <td style="text-align: right"><c:out value="${res.total}" /></td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ </tr>
+ </c:forEach>
+
+ </tbody>
+</table>
\ No newline at end of file