--- /dev/null
+ProteoCache is a project for caching data from Jpred
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>compbio</groupId>
+ <artifactId>proteocache</artifactId>
+ <packaging>war</packaging>
+ <version>0.1.0-SNAPSHOT</version>
+ <name>proteocache</name>
+ <url>http://maven.apache.org</url>
+
+
+
+
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+package compbio.cassandra;
+
+import java.util.Calendar;
+
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Host;
+import com.datastax.driver.core.Metadata;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+
+import com.datastax.driver.core.Session;
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.util.PropertyHelper;
+
+public class CassandraNativeConnector {
+ private static Cluster cluster;
+ private static Session session;
+ private static final PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
+ private static Logger log = Logger.getLogger(CassandraNativeConnector.class);
+
+ public static String CASSANDRA_HOSTNAME = "localhost";
+
+ public static Session getSession () {
+ return session;
+ }
+
+ /*
+ * connect to the cluster and look whether all tables exist
+ */
+ public void Connect() {
+
+ String cassandrahostname = ph.getProperty("cassandra.host");
+ if (null != cassandrahostname) {
+ CASSANDRA_HOSTNAME = cassandrahostname;
+ }
+
+ cluster = Cluster.builder().addContactPoint(CASSANDRA_HOSTNAME).build();
+
+ Metadata metadata = cluster.getMetadata();
+ System.out.printf("Connected to cluster: %s\n", metadata.getClusterName());
+ for (Host host : metadata.getAllHosts()) {
+ System.out.printf("Datatacenter: %s; Host: %s; Rack: %s\n", host.getDatacenter(), host.getAddress(), host.getRack());
+ }
+ session = cluster.connect();
+ CreateMainTables();
+ System.out.println("Cassandra connected");
+ }
+
+ private void CreateMainTables() {
+ session.execute("CREATE KEYSPACE IF NOT EXISTS ProteinKeyspace WITH replication = {'class':'SimpleStrategy', 'replication_factor':3};");
+ session.execute("USE ProteinKeyspace");
+
+ session.execute("CREATE TABLE IF NOT EXISTS MainParameters "
+ + "(Name ascii, Value ascii, PRIMARY KEY(Name));");
+
+ session.execute("CREATE TABLE IF NOT EXISTS ProteinRow "
+ + "(Protein ascii, JobID ascii, Predictions map<ascii,ascii>, PRIMARY KEY(JobID));");
+
+ session.execute("CREATE TABLE IF NOT EXISTS ProteinLog "
+ + "(JobID ascii, DataBegin ascii, DataEnd ascii, ip ascii, FinalStatus ascii, "
+ + "ExecutionStatus ascii, Protein ascii, PRIMARY KEY(JobID));");
+
+ session.execute("CREATE TABLE IF NOT EXISTS ProteinData "
+ + "(jobtime bigint, JobID ascii, Protein ascii, PRIMARY KEY(JobID));");
+
+ session.execute("CREATE TABLE IF NOT EXISTS JpredArchive "
+ + "(JobID ascii, Protein varchar, IP ascii, StartTime bigint, ExecTime int, alignment map<ascii,ascii>, "
+ + "predictions map<ascii,ascii>, ArchiveLink varchar, LOG varchar, PRIMARY KEY(JobID));");
+
+ session.execute("CREATE TABLE IF NOT EXISTS JobDateInfo "
+ + "(jobday bigint, Total bigint, Program varchar, Version varchar, PRIMARY KEY(jobday));");
+
+ session.execute("CREATE INDEX IF NOT EXISTS ProteinSeq ON ProteinRow (protein);");
+ session.execute("CREATE INDEX IF NOT EXISTS JobDateStamp ON ProteinData (jobtime);");
+ }
+
+ public void Closing() {
+ session.shutdown();
+ cluster.shutdown();
+ System.out.println("Cassandra has been shut down");
+ }
+
+ /*
+ * getting earlest date of jobs from the db
+ */
+ public static long getEarliestDateInDB() {
+ String com = "SELECT * FROM MainParameters WHERE Name = 'EarliestJobDate';";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+
+ if (!results.isExhausted()) {
+ Row r = results.one();
+ return Long.parseLong(r.getString("Value"));
+ }
+ Calendar cal = Calendar.getInstance();
+ return cal.getTimeInMillis();
+ }
+
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.util.Calendar;
+import java.util.Date;
+
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.BoundStatement;
+import com.datastax.driver.core.exceptions.QueryExecutionException;
+
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.cassandra.CassandraNativeConnector;
+
+public class CassandraNewTableWriter {
+ private Session session;
+ private static Logger log = Logger.getLogger(CassandraNativeConnector.class);
+
+ public CassandraNewTableWriter() {
+ Session inis = CassandraNativeConnector.getSession();
+ setSession(inis);
+ }
+
+ public void setSession(Session s) {
+ assert s != null;
+ session = s;
+ }
+
+ public boolean JobisNotInsterted(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM ProteinLog WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ public boolean JobisNotArchived(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM JpredArchive WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ /*
+ * fill new table
+ */
+ public void FillNewTable() {
+ final long StartTime = System.currentTimeMillis();
+ long erldate = CassandraNativeConnector.getEarliestDateInDB();
+ Calendar runnicCal = Calendar.getInstance();
+ runnicCal.setTime(new Date(erldate));
+ Calendar endcal = Calendar.getInstance();
+ long endTime = endcal.getTime().getTime();
+ for (Date date = runnicCal.getTime(); date.getTime() < endTime; runnicCal.add(Calendar.DATE, 1), date = runnicCal.getTime()) {
+ try {
+ ResultSet results = session.execute("SELECT * FROM ProteinData WHERE jobtime = " + date.getTime() + ";");
+ session.execute("INSERT INTO JobDateInfo " + "(jobday, Total)" + " VALUES (" + date.getTime() + "," + results.all().size()
+ + ");");
+ } catch (QueryExecutionException e) {
+ e.printStackTrace();
+ }
+ }
+ System.out.println("Table JobDateInfo filled: total time is " + (System.currentTimeMillis() - StartTime) + " msec");
+ }
+
+ /*
+ * fill a table with the database global parameters
+ */
+ public void FillMainDBParameters() {
+ Date bubu = new Date(CassandraNativeConnector.getEarliestDateInDB());
+ System.out.println("Old EarliestJobDate is " + bubu.toString());
+
+ String query1 = "SELECT jobtime FROM ProteinData LIMIT 2000000;";
+ System.out.println("Query db: " + query1);
+ ResultSet results = session.execute(query1);
+ Calendar endcal = Calendar.getInstance();
+ long newearliestday = endcal.getTime().getTime();
+ while (!results.isExhausted()) {
+ Row r = results.one();
+ long day = r.getLong("jobtime");
+ if (day < newearliestday) {
+ newearliestday = day;
+ }
+ }
+ String com = "INSERT INTO MainParameters " + "(Name, Value)" + " VALUES ('EarliestJobDate','" + String.valueOf(newearliestday)
+ + "');";
+ session.execute(com);
+ Date gaga = new Date(newearliestday);
+ System.out.println("New EarliestJobDate is " + gaga.toString());
+ }
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.datastax.driver.core.ResultSet;
+
+public class CassandraReader {
+ private Session session;
+ private static Logger log = Logger.getLogger(CassandraNativeConnector.class);
+
+ public CassandraReader() {
+ Session inis = CassandraNativeConnector.getSession();
+ setSession (inis);
+ }
+
+ public void setSession(Session s) {
+ assert s != null;
+ session = s;
+ }
+
+ /*
+ * getting data from the db
+ */
+ public List<Pair<String, String>> ReadProteinDataTable() {
+ final long startTime = System.currentTimeMillis();
+ String com = "SELECT DataBegin,DataEnd FROM ProteinLog;";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+ final long queryTime = System.currentTimeMillis();
+ List<Row> rows = results.all();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+
+ List<Pair<String, String>> res = new ArrayList<Pair<String, String>>();
+ int c = 0;
+ for (Row r : rows) {
+ Pair<String, String> pair = new Pair<String, String>(r.getString("DataBegin"), r.getString("DataEnd"));
+ res.add(pair);
+ ++c;
+ }
+ final long endTime = System.currentTimeMillis();
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ return res;
+ }
+
+ /*
+ * getting data from the db JobDateInfo
+ */
+ public long ReadDateTable(long queryDate) {
+ ResultSet results = session.execute("SELECT Total FROM JobDateInfo WHERE jobday = " + queryDate + ";");
+ if (results.isExhausted())
+ return 0;
+ Row therow = results.one();
+ long res = therow.getLong("Total");
+ if (!results.isExhausted()) {
+ Date date = new Date (queryDate);
+ log.warn("CassandraReader.ReadDateTable: date row for " + date.toString () + " ("+ queryDate + ") duplicated ");
+ }
+ return res;
+ }
+ /*
+ * getting whole protein sequence from the db ProteinRow
+ */
+ public List<StructureProteinPrediction> ReadWholeSequence(String queryProtein) {
+ final long startTime = System.currentTimeMillis();
+ String com = "SELECT JobID, Predictions FROM ProteinRow WHERE Protein = '" + queryProtein + "';";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+ if (results.isExhausted())
+ return null;
+ final long queryTime = System.currentTimeMillis();
+ List<Row> rows = results.all();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println(" rows analysed, " + rows.size());
+ List<StructureProteinPrediction> res = new ArrayList<StructureProteinPrediction>();
+ int c = 0;
+ for (Row r : rows) {
+ StructureProteinPrediction structure = new StructureProteinPrediction(queryProtein, r.getString("JobID"), r.getMap(
+ "Predictions", String.class, String.class));
+ res.add(structure);
+ ++c;
+ }
+ final long endTime = System.currentTimeMillis();
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ return res;
+ }
+
+ /*
+ * getting part of protein sequence from the db ProteinRow
+ */
+ public List<StructureProteinPrediction> ReadPartOfSequence(String queryProtein) {
+ final long startTime = System.currentTimeMillis();
+ String com = "SELECT * FROM ProteinRow;";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+ if (results.isExhausted())
+ return null;
+ final long queryTime = System.currentTimeMillis();
+ List<Row> rows = results.all();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println(" rows analysed, " + rows.size());
+ List<StructureProteinPrediction> res = new ArrayList<StructureProteinPrediction>();
+ int c = 0;
+ for (Row r : rows) {
+ String prot = r.getString("Protein");
+ if (prot.matches("(.*)" + queryProtein + "(.*)")) {
+ StructureProteinPrediction structure = new StructureProteinPrediction(prot, r.getString("JobID"), r.getMap("Predictions",
+ String.class, String.class));
+ res.add(structure);
+ ++c;
+ }
+ }
+ final long endTime = System.currentTimeMillis();
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ return res;
+ }
+
+ /*
+ * getting protein sequences by counter
+ */
+ public Map<String, Integer> ReadProteinSequenceByCounter() {
+ final long startTime = System.currentTimeMillis();
+ String com = "SELECT Protein FROM ProteinRow;";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+ if (results.isExhausted())
+ return null;
+ final long queryTime = System.currentTimeMillis();
+ List<Row> rows = results.all();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ System.out.println(" rows analysed, " + rows.size());
+ Map<String, Integer> res = new HashMap<String, Integer>();
+ int c = 0;
+ for (Row r : rows) {
+ String protein = r.getString("Protein");
+ if (res.containsKey(protein))
+ res.put(protein, res.get(protein) + 1);
+ else
+ res.put(protein, 1);
+ }
+ final long endTime = System.currentTimeMillis();
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ return res;
+ }
+
+ /*
+ * getting protein sequences by counter
+ */
+ public StructureJobLog ReadJobLog(String jobid) {
+ final long startTime = System.currentTimeMillis();
+ String com = "SELECT * FROM ProteinLog WHERE JobID = '" + jobid + "';";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+ if (results.isExhausted())
+ return null;
+ final long queryTime = System.currentTimeMillis();
+ Row row = results.one();
+ String com1 = "SELECT * FROM ProteinRow WHERE JobID = '" + jobid + "' ALLOW FILTERING;";
+ System.out.println("Command: " + com1);
+ ResultSet results1 = session.execute(com1);
+ if (results1.isExhausted())
+ return null;
+ Row row1 = results1.one();
+ StructureJobLog res = new StructureJobLog(row.getString("Protein"), row.getString("JobID"), row.getString("DataBegin"),
+ row.getString("DataEnd"), row.getString("ip"), row1.getMap("Predictions", String.class, String.class));
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+ final long endTime = System.currentTimeMillis();
+ System.out.println(" rows analysed, execution time is " + (endTime - startTime) + " msec");
+ return res;
+ }
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.datastax.driver.core.ResultSet;
+
+import compbio.engine.JpredJob;
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.util.PropertyHelper;
+
+public class CassandraWriter {
+ private Session session;
+ private static final PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
+ private static Logger log = Logger.getLogger(CassandraNativeConnector.class);
+
+ CassandraWriter() {
+ Session inis = CassandraNativeConnector.getSession();
+ setSession(inis);
+ }
+
+ public void setSession(Session s) {
+ assert s != null;
+ session = s;
+ }
+
+ public boolean JobisNotInsterted(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM ProteinLog WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ public boolean JobisNotArchived(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM JpredArchive WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ /*
+ * inserting data into the tables for queries
+ */
+ public int FormQueryTables(JpredJob job) {
+ if (JobisNotInsterted(job.getJobID())) {
+ String id = job.getJobID();
+ String ip = job.getIP();
+ String protein = job.getProtein();
+ String finalstatus = job.getFinalStatus();
+ String execstatus = job.getExecutionStatus();
+ String com1 = "INSERT INTO ProteinLog " + "(JobID, IP, DataBegin, DataEnd, FinalStatus, ExecutionStatus, Protein)"
+ + " VALUES ('" + id + "','" + ip + "','" + job.getStartingTimeStr() + "','" + job.getEndTimeStr() + "','" + finalstatus
+ + "','" + execstatus + "','" + protein + "');";
+ session.execute(com1);
+
+ String com2 = "INSERT INTO ProteinData " + "(jobtime, JobID, Protein)" + " VALUES (" + job.getStartingDate() + ",'" + id
+ + "','" + protein + "');";
+ session.execute(com2);
+
+ String allpredictions = "";
+ List<FastaSequence> pr = job.getPredictions();
+ for (FastaSequence pred : pr) {
+ String predictionname = pred.getId();
+ String prediction = pred.getSequence().replaceAll("\n", "");
+ allpredictions += "'" + predictionname + "':'" + prediction + "',";
+ }
+ String final_prediction = "";
+ if (!allpredictions.equals("")) {
+ final_prediction = allpredictions.substring(0, allpredictions.length() - 1);
+ }
+
+ String check2 = "SELECT * FROM ProteinRow WHERE JobID = '" + job.getJobID() + "';";
+ ResultSet results2 = session.execute(check2);
+ if (results2.isExhausted()) {
+ String com3 = "INSERT INTO ProteinRow " + "(Protein, JobID, Predictions)" + " VALUES ('" + protein + "','" + id + "',{"
+ + final_prediction + "});";
+ session.execute(com3);
+ }
+
+ // update some internal query tables
+ String check3 = "SELECT * FROM MainParameters WHERE Name = 'EarliestJobDate';";
+ ResultSet results3 = session.execute(check3);
+ boolean updateparameter = true;
+ if (!results3.isExhausted()) {
+ Row r = results3.one();
+ if (job.getStartingDate() >= Long.parseLong(r.getString("Value")))
+ updateparameter = false;
+ }
+ if (updateparameter) {
+ String com = "INSERT INTO MainParameters " + "(Name, Value)" + " VALUES ('EarliestJobDate','" + job.getStartingDateStr()
+ + "');";
+ session.execute(com);
+ }
+ String check4 = "SELECT * FROM JobDateInfo WHERE jobday = " + job.getStartingDate() + ";";
+ ResultSet results4 = session.execute(check4);
+ updateparameter = true;
+ int njobs = 1;
+ if (!results4.isExhausted()) {
+ Row r = results4.one();
+ njobs += r.getLong("Total");
+ }
+ String com = "INSERT INTO JobDateInfo " + "(jobday, Total)" + " VALUES (" + job.getStartingDate() + "," + njobs + ");";
+ session.execute(com);
+
+ return 1;
+ }
+ return 0;
+ }
+
+ /*
+ * insert data from a real Jpred job: timing+IP, Execution Status, Final
+ * status, protein sequence, predictions, alignment, LOG and tar.gz files
+ */
+ public int ArchiveData(JpredJob job, String archivepath) {
+ if (JobisNotArchived(job.getJobID())) {
+ String id = job.getJobID();
+ String log = job.getLog().replaceAll("'", "");
+ String com = "INSERT INTO JpredArchive (JobID, Protein, IP, StartTime, ExecTime,LOG, ArchiveLink) VALUES ('" + id + "','"
+ + job.getProtein() + "','" + job.getIP() + "'," + job.getStartingTime() + "," + job.getExecutionTime() + ",'" + log
+ + "','" + archivepath + "');";
+ session.execute(com);
+
+ List<FastaSequence> predictions = job.getPredictions();
+ for (FastaSequence p : predictions) {
+ session.execute("UPDATE JpredArchive SET predictions = predictions + {'" + p.getId() + "':'"
+ + p.getSequence().replaceAll("\n", "") + "'} WHERE JobID = '" + id + "';");
+ }
+
+ List<FastaSequence> seqs = job.getAlignment();
+ for (FastaSequence s : seqs) {
+ session.execute("UPDATE JpredArchive SET alignment = alignment + {'" + s.getId() + "':'"
+ + s.getSequence().replaceAll("\n", "") + "'} WHERE JobID = '" + id + "';");
+ }
+ return 1;
+ }
+ return 0;
+ }
+
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Collections;
+
+public class DataBase {
+ private String date;
+ private int total;
+ private int totalJobs;
+ private int totalId;
+ private String id;
+ private String prot;
+ private String jpred;
+ private List<String> subProt;
+ private List<Integer> timeRez;
+ private List<Integer> timeTotalExec;
+ private StructureJobLog logInfo;
+
+ public DataBase() {
+ }
+
+ public DataBase(String dat, int total) {
+ this.date = dat;
+ this.total = total;
+ }
+
+ public void setDate(String dat) {
+ this.date = dat;
+ }
+
+ public String getDate() {
+ return date;
+ }
+
+ public void setTotal(int tot) {
+ this.total = tot;
+ }
+
+ public int getTotal() {
+ return total;
+ }
+
+
+ public void setTotalJobs(int totalJobs) {
+ this.totalJobs = totalJobs;
+ }
+
+ public int getTotalJobs() {
+ return totalJobs;
+ }
+
+ public void setTotalId(int totId) {
+ this.totalId = totId;
+ }
+
+ public int getTotalId() {
+ return totalId;
+ }
+
+ public void setProt(String prot) {
+ this.prot = prot;
+ }
+
+ public String getProt() {
+ return prot;
+ }
+
+ public void setJpred(String jpred) {
+ this.jpred = jpred;
+ }
+
+ public String getJpred() {
+ return jpred;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setSubProt(List<String> subProt) {
+ this.subProt = subProt;
+ }
+
+ public List<String> getSubProt() {
+ return subProt;
+ }
+
+ public void setTimeRez(List<Integer> timeRez) {
+ this.timeRez = timeRez;
+ }
+
+ public List<Integer> getTimeRez() {
+ return timeRez;
+ }
+
+ public void setTimeTotalExec(List<Integer> timeTotalExec) {
+ this.timeTotalExec = timeTotalExec;
+ }
+
+ public List<Integer> getTimeTotalExec() {
+ return timeTotalExec;
+ }
+
+ public void setLogInfo(StructureJobLog logInfo){
+ this.logInfo = logInfo;
+ }
+
+ public StructureJobLog getLogInfo() {
+ return logInfo;
+ }
+
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.Iterator;
+import java.util.Scanner;
+
+//import compbio.util.Util;
+
+/**
+ * Reads files with FASTA formatted sequences. All the information in the FASTA
+ * header is preserved including trailing white spaces. All the white spaces are
+ * removed from the sequence.
+ *
+ * Examples of the correct input:
+ *
+ * <pre>
+ *
+ * >zedpshvyzg
+ * GCQDKNNIAELNEIMGTTRSPSDWQHMKGASPRAEIGLTGKKDSWWRHCCSKEFNKTPPPIHPDMKRWGWMWNRENFEKFLIDNFLNPPCPRLMLTKGTWWRHEDLCHEIFWSTLRWLCLGNQSFSAMIWGHLCECHRMIWWESNEHMFWLKFRRALKKMNSNGPCMGPDNREWMITNRMGKEFCGPAFAGDCQSCWRKCHKTNKICFNEKKGTPTKIDHEQKDIMDILKDIDNHRNWKQCQLWLLTSKSTDQESTTMLTWSTWRDFFIIIKQPFDHKCRGALDANGDFQIAAELKWPAPMIILRQNQKTMHDKSCHHFFTNRCPLMHTTRANDKQCSWHTRKQFICQQDFTTWQHRPDTHRILPSWCMSTRRKNHIKNTPALAFSTCEMGDLPNGWAPGTIILQRQFTQAIKLPQETTGWPRCDPKFDHWNMSKWLRQLLGRDDEMIPPQCD
+ *
+ * >xovkactesa
+ * CPLSKWWNRRAFLSHTANHWMILMTWEGPHDGESKMRIAMMKWSPCKPTMSHFRCGLDAWAEPIRQIACESTFRM
+ * FCTTPRPIHKLTEMWGHMNGWTGAFCRQLECEWMMPPRHPHPCTSTFNNNKKRLIGQIPNEGKQLFINFQKPQHG
+ * FSESDIWIWKDNPTAWHEGLTIAGIGDGQHCWNWMPMPWSGAPTSNALIEFWTWLGMIGTRCKTQGMWWDAMNHH
+ * DQFELSANAHIAAHHMEKKMILKPDDRNLGDDTWMPPGKIWMRMFAKNTNACWPEGCRDDNEEDDCGTHNLHRMC
+ *
+ * >ntazzewyvv
+ * CGCKIF D D NMKDNNRHG TDIKKHGFMH IRHPE KRDDC FDNHCIMPKHRRWGLWD
+ * EASINM AQQWRSLPPSRIMKLNG HGCDCMHSHMEAD DTKQSGIKGTFWNG HDAQWLCRWG
+ * EFITEA WWGRWGAITFFHAH ENKNEIQECSDQNLKE SRTTCEIID TCHLFTRHLDGW
+ * RCEKCQANATHMTW ACTKSCAEQW FCAKELMMN
+ * W KQMGWRCKIFRKLFRDNCWID FELPWWPICFCCKGLSTKSHSAHDGDQCRRW WPDCARDWLGPGIRGEF
+ * FCTHICQQLQRNFWCGCFRWNIEKRMFEIFDDNMAAHWKKCMHFKFLIRIHRHGPITMKMTWCRSGCCFGKTRRLPDSSFISAFLDPKHHRDGSGMMMWSSEMRSCAIPDPQQAWNQGKWIGQIKDWNICFAWPIRENQQCWATPHEMPSGFHFILEKWDALAHPHMHIRQKKCWAWAFLSLMSSTHSDMATFQWAIPGHNIWSNWDNIICGWPRI
+ *
+ * > 12 d t y wi k jbke
+ * KLSHHDCD
+ * N
+ * H
+ * HSKCTEPHCGNSHQMLHRDP
+ * CCDQCQSWEAENWCASMRKAILF
+ *
+ * </pre>
+ *
+ * @author Peter Troshin
+ * @version 1.0 April 2011
+ *
+ */
+public class FastaReader implements Iterator<FastaSequence> {
+
+ private final Scanner input;
+ /**
+ * Delimiter for the scanner
+ */
+ private final String DELIM = ">";
+
+ /**
+ * Header data can contain non-ASCII symbols and read in UTF8
+ *
+ * @param inputFile
+ * the file containing the list of FASTA formatted sequences to
+ * read from
+ * @throws FileNotFoundException
+ * if the input file is not found
+ * @throws IllegalStateException
+ * if the close method was called on this instance
+ *
+ */
+ public FastaReader(final String inputFile) throws FileNotFoundException {
+ input = new Scanner(new File(inputFile), "UTF8");
+ input.useDelimiter(DELIM);
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+
+ @Override
+ public void run() {
+ if (input != null) {
+ input.close();
+ }
+ }
+ });
+ }
+
+ /**
+ * This class will not close the incoming stream! So the client should do
+ * so.
+ *
+ * @param inputStream
+ * @throws FileNotFoundException
+ */
+ public FastaReader(final InputStream inputStream)
+ throws FileNotFoundException {
+ input = new Scanner(inputStream);
+ input.useDelimiter(DELIM);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @throws IllegalStateException
+ * if the close method was called on this instance
+ */
+ @Override
+ public boolean hasNext() {
+ return input.hasNext();
+ }
+
+ /**
+ * Reads the next FastaSequence from the input
+ *
+ * @throws AssertionError
+ * if the header or the sequence is missing
+ * @throws IllegalStateException
+ * if the close method was called on this instance
+ * @throws MismatchException
+ * - if there were no more FastaSequence's.
+ */
+ @Override
+ public FastaSequence next() {
+ String fastaHeader = input.next();
+ while (fastaHeader.indexOf("\n") < 0 && input.hasNext()) {
+ fastaHeader = fastaHeader.concat(">");
+ fastaHeader = fastaHeader.concat(input.next());
+ }
+ return FastaReader.toFastaSequence(fastaHeader);
+ }
+
+ /**
+ * Not implemented
+ */
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * Call this method to close the connection to the input file if you want to
+ * free up the resources. The connection will be closed on the JVM shutdown
+ * if this method was not called explicitly. No further reading on this
+ * instance of the FastaReader will be possible after calling this method.
+ */
+ public void close() {
+ input.close();
+ }
+
+ private static FastaSequence toFastaSequence(final String singleFastaEntry) {
+
+ // assert !Util.isEmpty(singleFastaEntry) :
+ // "Empty String where FASTA sequence is expected!";
+
+ int nlineidx = singleFastaEntry.indexOf("\n");
+ if (nlineidx < 0) {
+ throw new AssertionError(
+ "The FASTA sequence must contain the header information"
+ + " separated by the new line from the sequence. Given sequence does not appear to "
+ + "contain the header! Given data:\n "
+ + singleFastaEntry);
+ }
+ String header = singleFastaEntry.substring(0, nlineidx);
+
+ // Get rid of the new line chars (should cover common cases)
+ header = header.replaceAll("\r", "");
+
+ String sequence = singleFastaEntry.substring(nlineidx);
+
+ /*
+ * if (Util.isEmpty(sequence)) { throw new AssertionError(
+ * "Empty sequences are not allowed! Please make sure the " +
+ * " data is in the FASTA format! Given data:\n " + singleFastaEntry); }
+ */
+ return new FastaSequence(header, sequence);
+ }
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+
+//import compbio.util.SysPrefs;
+//import compbio.util.annotation.Immutable;
+
+/**
+ * A FASTA formatted sequence. Please note that this class does not make any
+ * assumptions as to what sequence it stores e.g. it could be nucleotide,
+ * protein or even gapped alignment sequence! The only guarantee it makes is
+ * that the sequence does not contain white space characters e.g. spaces, new
+ * lines etc
+ *
+ * @author pvtroshin
+ *
+ * @version 1.0 September 2009
+ */
+
+@XmlAccessorType(XmlAccessType.FIELD)
+//@Immutable
+public class FastaSequence {
+
+ /**
+ * Sequence id
+ */
+ private String id;
+
+ // TODO what about gapped sequence here! should be indicated
+ /**
+ * Returns the string representation of sequence
+ */
+ private String sequence;
+
+ FastaSequence() {
+ // Default constructor for JaxB
+ }
+
+ /**
+ * Upon construction the any whitespace characters are removed from the
+ * sequence
+ *
+ * @param id
+ * @param sequence
+ */
+ public FastaSequence(String id, String sequence) {
+ this.id = id;
+ this.sequence = sequence;
+ }
+
+ /**
+ * Gets the value of id
+ *
+ * @return the value of id
+ */
+ public String getId() {
+ return this.id;
+ }
+
+ /**
+ * Gets the value of sequence
+ *
+ * @return the value of sequence
+ */
+ public String getSequence() {
+ return this.sequence;
+ }
+
+ public static int countMatchesInSequence(final String theString,
+ final String theRegExp) {
+ final Pattern p = Pattern.compile(theRegExp);
+ final Matcher m = p.matcher(theString);
+ int cnt = 0;
+ while (m.find()) {
+ cnt++;
+ }
+ return cnt;
+ }
+
+ public String getFormattedFasta() {
+ return getFormatedSequence(80);
+ }
+
+ /**
+ *
+ * @return one line name, next line sequence, no matter what the sequence
+ * length is
+ */
+/* public String getOnelineFasta() {
+ String fasta = ">" + getId() + SysPrefs.newlinechar;
+ fasta += getSequence() + SysPrefs.newlinechar;
+ return fasta;
+ }
+
+ /**
+ * Format sequence per width letter in one string. Without spaces.
+ *
+ * @return multiple line formated sequence, one line width letters length
+ *
+ */
+ public String getFormatedSequence(final int width) {
+ if (sequence == null) {
+ return "";
+ }
+
+ assert width >= 0 : "Wrong width parameter ";
+
+ final StringBuilder sb = new StringBuilder(sequence);
+ // int tail = nrOfWindows % WIN_SIZE;
+ // final int turns = (nrOfWindows - tail) / WIN_SIZE;
+
+ int tailLen = sequence.length() % width;
+ // add up inserted new line chars
+ int nchunks = (sequence.length() - tailLen) / width;
+ int nlineCharcounter = 0;
+ int insPos = 0;
+ for (int i = 1; i <= nchunks; i++) {
+ insPos = width * i + nlineCharcounter;
+ // to prevent inserting new line in the very end of a sequence then
+ // it would have failed.
+ if (sb.length() <= insPos) {
+ break;
+ }
+ sb.insert(insPos, "\n");
+ nlineCharcounter++;
+ }
+ // sb.insert(insPos + tailLen, "\n");
+ return sb.toString();
+ }
+
+ /**
+ *
+ * @return sequence length
+ */
+ public int getLength() {
+ return this.sequence.length();
+ }
+
+ /**
+ * Same as oneLineFasta
+ */
+// @Override
+// public String toString() {
+// return this.getOnelineFasta();
+ // }
+
+ @Override
+ public int hashCode() {
+ final int prime = 17;
+ int result = 1;
+ result = prime * result + ((id == null) ? 0 : id.hashCode());
+ result = prime * result
+ + ((sequence == null) ? 0 : sequence.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (!(obj instanceof FastaSequence)) {
+ return false;
+ }
+ FastaSequence fs = (FastaSequence) obj;
+ if (!fs.getId().equals(this.getId())) {
+ return false;
+ }
+ if (!fs.getSequence().equalsIgnoreCase(this.getSequence())) {
+ return false;
+ }
+ return true;
+ }
+
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.io.IOException;
+
+public interface JpredParser {
+
+ /*
+ * Defines a source file with metainformation of Jpred Jobs
+ **/
+ void setSource (String newsourceprefix);
+
+ /*
+ * Makes real parsing of the source file
+ **/
+ void Parsing(String source, int nDays) throws IOException;
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+
+import compbio.cassandra.JpredParser;
+import compbio.engine.JpredJob;
+
+public class JpredParserHTTP implements JpredParser {
+ private CassandraWriter cw = new CassandraWriter();
+ private String dirprefix;
+ private List<FastaSequence> alignment;
+ private List<FastaSequence> predictions;
+ private int countNoData;
+
+ public JpredParserHTTP() {
+ dirprefix = "http://www.compbio.dundee.ac.uk/www-jpred/results";
+ }
+
+ public JpredParserHTTP(String sourceurl) {
+ dirprefix = sourceurl;
+ }
+
+ public void setSource(String newsourceprefix) {
+ dirprefix = newsourceprefix;
+ }
+
+ public void Parsing(String source, int nDays) throws IOException {
+ Calendar cal = Calendar.getInstance();
+ cal.add(Calendar.DATE, -nDays);
+ for (int i = 0; i < nDays; ++i) {
+ cal.add(Calendar.DATE, 1);
+ String date = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DATE);
+ ParsingForDate(source, date);
+ }
+ }
+
+ /*
+ * The method parses the Jpred output concise file in the FASTA format If
+ * there is a record with ID = QUERY or jobid, this a "one protein" job
+ * otherwise this is an alignment job
+ */
+ private String parsePredictions(final InputStream stream, String jobid) throws FileNotFoundException {
+ final FastaReader fr = new FastaReader(stream);
+ String protein = "";
+ alignment = new ArrayList<FastaSequence>();
+ predictions = new ArrayList<FastaSequence>();
+ while (fr.hasNext()) {
+ final FastaSequence fs = fr.next();
+ String seqid = fs.getId();
+ String seq = fs.getSequence().replaceAll("\n", "");
+ if (seqid.equals("jnetpred") || seqid.equals("Lupas_21") || seqid.equals("Lupas_14") || seqid.equals("Lupas_28")
+ || seqid.equals("JNETSOL25") || seqid.equals("JNETSOL5") || seqid.equals("JNETSOL0") || seqid.equals("JNETCONF")
+ || seqid.equals("JNETHMM") || seqid.equals("JNETPSSM") || seqid.equals("JNETCONF")) {
+ predictions.add(fs);
+ } else {
+ alignment.add(fs);
+ if (seqid.equals("QUERY") || seqid.equals(jobid))
+ protein = seq;
+ }
+ }
+ return protein;
+ }
+
+ private String parseLogFile(final InputStream stream) throws IOException {
+ String out = "";
+ BufferedReader buffer = new BufferedReader(new InputStreamReader(stream));
+ String line;
+ while (null != (line = buffer.readLine())) {
+ out += line;
+ }
+ return out;
+ }
+
+ private int analyseJob(String[] jobinfo) throws IOException {
+ boolean running = true;
+ boolean ConcisefileExists = false;
+ boolean LogfileExists = false;
+ JpredJob job = new JpredJob (jobinfo[jobinfo.length - 1], jobinfo[0], jobinfo[1]);
+ job.setIP(jobinfo[2]);
+ Date currDate = new Date();
+ String maindir = dirprefix + "/" + job.getJobID() + "/";
+
+ //System.out.println("analyzing job " + job.getJobID());
+ try {
+ URL dirurl = new URL(maindir);
+ HttpURLConnection httpConnection_dirurl = (HttpURLConnection) dirurl.openConnection();
+ if (httpConnection_dirurl.getResponseCode() < 199 || 300 <= httpConnection_dirurl.getResponseCode()) {
+ return 0;
+ }
+ URL conciseurl = new URL(maindir + job.getJobID() + ".concise.fasta");
+ URL archiveurl = new URL(maindir + job.getJobID() + ".tar.gz");
+ URL logurl = new URL(maindir + "LOG");
+ HttpURLConnection httpConnection_conciseurl = (HttpURLConnection) conciseurl.openConnection();
+ HttpURLConnection httpConnection_logurl = (HttpURLConnection) logurl.openConnection();
+ HttpURLConnection httpConnection_archiveurl = (HttpURLConnection) archiveurl.openConnection();
+ if (199 < httpConnection_conciseurl.getResponseCode() && httpConnection_conciseurl.getResponseCode() < 300) {
+ ConcisefileExists = true;
+ running = false;
+ try {
+ job.setProtein(parsePredictions(conciseurl.openStream(), job.getJobID()));
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ } else {
+ // The job still can be running of failed...
+ ++countNoData;
+ }
+ if (199 < httpConnection_logurl.getResponseCode() && httpConnection_logurl.getResponseCode() < 300) {
+ LogfileExists = true;
+ job.setLog(parseLogFile(logurl.openStream()));
+ } else {
+ // The job has not been started at all...
+ job.setExecutionStatus("FAIL");
+ job.setFinalStatus("STOPPED");
+ running = false;
+ }
+ if (job.getLog().matches("(.*)TIMEOUT\\syour\\sjob\\stimed\\sout(.*)")) {
+ // blast job was too long (more than 3600 secs by default)...
+ job.setExecutionStatus("FAIL");
+ job.setFinalStatus("TIMEDOUT");
+ running = false;
+ } else if (job.getLog().matches("(.*)Jpred\\serror:\\sDied\\sat(.*)")) {
+ // an internal Jpred error...
+ job.setExecutionStatus("FAIL");
+ job.setFinalStatus("JPREDERROR");
+ running = false;
+ } else if ((currDate.getTime() - job.getEndTime()) / 1000 > 3601 && LogfileExists && !ConcisefileExists) {
+ // the job was stopped with unknown reason...
+ job.setExecutionStatus("FAIL");
+ job.setFinalStatus("STOPPED");
+ running = false;
+ }
+
+ httpConnection_conciseurl.disconnect();
+ httpConnection_logurl.disconnect();
+ httpConnection_archiveurl.disconnect();
+ } catch (MalformedURLException e) {
+ e.printStackTrace();
+ }
+
+ if (!running) {
+ job.setAlignment(alignment);
+ job.setPredictions(predictions);
+ cw.FormQueryTables(job);
+ cw.ArchiveData(job, "undefined");
+ return 1;
+ }
+
+ return 0;
+ }
+
+ private void ParsingForDate(String input, String date) {
+ int totalcount = 0;
+ int countinsertions = 0;
+ int countinserted = 0;
+ int countNotanalyzed = 0;
+ countNoData = 0;
+
+ System.out.println("Inserting jobs for " + date);
+ try {
+ URL url = new URL(input);
+ URLConnection conn = url.openConnection();
+ BufferedReader alljobs = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+ String line;
+
+ while ((line = alljobs.readLine()) != null) {
+ if (line.matches(date + ":(.*)jp_[^\\s]+")) {
+ totalcount++;
+ String[] job = line.split("\\s+");
+ String jobid = job[job.length - 1];
+ if (cw.JobisNotInsterted(jobid)) {
+ countinsertions += analyseJob(job);
+ } else {
+ ++countinserted;
+ }
+ } else {
+ ++countNotanalyzed;
+ }
+ }
+ alljobs.close();
+ System.out.println("Total number of jobs = " + totalcount);
+ System.out.println(" " + countinserted + " jobs inserted already");
+ System.out.println(" " + countNotanalyzed + " not analysed jobs");
+ System.out.println(" " + countNoData + " jobs without *.concise.fasta file (RUNNING or FAILED)");
+ System.out.println(" " + countinsertions + " new job insertions\n");
+ } catch (MalformedURLException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ ;
+ }
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.io.FileInputStream;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+
+public class JpredParserLocalFile implements JpredParser {
+ private CassandraWriter cw = new CassandraWriter();
+ private String dirprefix;
+
+ public void setSource(String newsourceprefix) {
+ this.dirprefix = newsourceprefix;
+ }
+
+ public JpredParserLocalFile() {
+ this.dirprefix = "/home/asherstnev/Projects/Java.projects/proteocache/data_stress_test/data.dat";
+ }
+
+ public JpredParserLocalFile(String sourceurl) {
+ this.dirprefix = sourceurl;
+ }
+
+ public void Parsing(String source, int nDays) throws IOException {
+ Calendar cal = Calendar.getInstance();
+ cal.add(Calendar.DATE, -nDays);
+ List<String> alljobs = new ArrayList<String>();
+ File file = new File(source);
+ BufferedReader alljobsfile = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
+ String line;
+
+ while ((line = alljobsfile.readLine()) != null) {
+ alljobs.add(line);
+ }
+ alljobsfile.close();
+
+ System.out.println("Inserting jobs for " + nDays + " days, " + alljobs.size() + " jobs in total");
+ final long startTime = System.currentTimeMillis();
+ for (int i = 0; i < nDays; ++i) {
+ cal.add(Calendar.DATE, 1);
+ int month = cal.get(Calendar.MONTH) + 1;
+ int year = cal.get(Calendar.YEAR);
+ int day = cal.get(Calendar.DATE);
+ String date = year + "/" + month + "/" + day;
+ ParsingForDate(alljobs, date);
+ }
+ final long execTime = System.currentTimeMillis() - startTime;
+ System.out.println("Execution Time = " + execTime + " ms");
+ }
+
+ private void ParsingForDate(List<String> input, String date) {
+ int totalcount = 0;
+ int countNoData = 0;
+ int countUnclearFASTAid = 0;
+ int countinsertions = 0;
+ int countinserted = 0;
+ int counAlignments = 0;
+ int countStrange = 0;
+
+ System.out.println("Inserting jobs for " + date);
+ for (String in : input) {
+ if (in.matches(date + ":(.*)jp_[^\\s]+")) {
+ String[] table = in.split("\\s+");
+ String starttime = table[0];
+ String finishtime = table[1];
+ String ip = table[2];
+ String id = table[table.length - 1];
+ totalcount++;
+ String confilename = dirprefix + "/" + id + "/" + id + ".concise";
+ File confile = new File(confilename);
+ if (confile.exists()) {
+ try {
+ final FastaReader fr = new FastaReader(confilename);
+ final List<FastaSequence> seqs = new ArrayList<FastaSequence>();
+ String newprotein = "";
+ while (fr.hasNext()) {
+ final FastaSequence fs = fr.next();
+ if (fs.getId().equals("QUERY") || fs.getId().equals(id))
+ newprotein = fs.getSequence().replaceAll("\n", "");
+ else if (fs.getId().equals("jnetpred") || fs.getId().equals("JNETPRED")) {
+ seqs.add(fs);
+ }
+ }
+ if (newprotein.equals("")) {
+ countUnclearFASTAid++;
+ } else {
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd");
+ String dateInString1 = starttime.substring(0, starttime.indexOf(":"));
+ long insertdate = 0;
+ try {
+ Date dat = formatter.parse(dateInString1);
+ insertdate = dat.getTime();
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ //countinsertions += cw.FormQueryTables(insertdate, starttime, finishtime, ip, id, "OK", "OK", newprotein, seqs);
+ }
+ fr.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ } else {
+ countNoData++;
+ }
+ } else {
+ if (in.matches(date + "(.*)Sequence0/(.*)")) {
+ ++counAlignments;
+ } else {
+ ++countStrange;
+ }
+ }
+ }
+ if (true) {
+ System.out.println("Total number of jobs = " + totalcount);
+ System.out.println(" " + countinserted + " jobs inserted already");
+ System.out.println(" " + counAlignments + " jalview jobs");
+ System.out.println(" " + countStrange + " not analysed jobs");
+ System.out.println(" " + countNoData + " jobs without *.concise.fasta file");
+ System.out.println(" " + countUnclearFASTAid + " jobs with unclear FASTA protein id in *.concise.fasta");
+ System.out.println(" " + countinsertions + " new job insertions\n");
+ }
+ }
+
+}
--- /dev/null
+package compbio.cassandra;
+
+public class Pair<K, V> {
+
+ private final K element0;
+ private final V element1;
+
+ public static <K, V> Pair<K, V> createPair(K element0, V element1) {
+ return new Pair<K, V>(element0, element1);
+ }
+
+ public Pair(K element0, V element1) {
+ this.element0 = element0;
+ this.element1 = element1;
+ }
+
+ public K getElement0() {
+ return element0;
+ }
+
+ public V getElement1() {
+ return element1;
+ }
+}
\ No newline at end of file
--- /dev/null
+package compbio.cassandra;
+
+import java.util.Map;
+
+public class StructureJobLog {
+ private String jobid;
+ private String dateStart;
+ private String dateEnd;
+ private String sequence;
+ private String ip;
+ private Map<String,String> prediction;
+
+ public StructureJobLog (String seq, String id, String dateStart, String dateEnd, String ip, Map<String,String> pred) {
+ this.sequence = seq;
+ this.jobid = id;
+ this.dateStart = dateStart;
+ this.dateEnd = dateEnd;
+ this.ip = ip;
+ this.prediction = pred;
+ }
+
+ public String getSequence () {
+ return sequence;
+ }
+
+ public String getJobid () {
+ return jobid;
+ }
+
+ public String getDateStart () {
+ return dateStart;
+ }
+
+ public String getDateEnd () {
+ return dateEnd;
+ }
+
+ public String getIp () {
+ return ip;
+ }
+
+ public Map<String,String> getPrediction () {
+ return prediction;
+ }
+}
--- /dev/null
+package compbio.cassandra;
+
+import java.util.Map;
+
+public class StructureProteinPrediction {
+ private String sequence;
+ private String jobid;
+ private Map<String,String> prediction;
+
+ public StructureProteinPrediction (String seq, String id, Map<String,String> pred) {
+ this.sequence = seq;
+ this.jobid = id;
+ this.prediction = pred;
+ }
+
+ public String getSequence () {
+ return sequence;
+ }
+
+ public String getJobid () {
+ return jobid;
+ }
+
+ public Map<String,String> getPrediction () {
+ return prediction;
+ }
+
+}
--- /dev/null
+package compbio.cassandra.readers;
+
+
+import java.util.Calendar;
+import java.util.List;
+import java.util.ArrayList;
+
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.BoundStatement;
+
+import compbio.cassandra.CassandraReader;
+import compbio.cassandra.Pair;
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.util.PropertyHelper;
+
+public class CassandraReaderExecutionTime {
+ private Session session;
+
+ public static String CASSANDRA_HOSTNAME = "localhost";
+ public static boolean READ_WEB_JPRED = false;
+ public static boolean READ_LOCALFILE_JPRED = false;
+
+ public void setSession(Session s) {
+ assert s != null;
+ session = s;
+ }
+
+ private void setConditions() {
+
+ }
+
+ public boolean JobisNotInsterted(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM ProteinLog WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ public boolean JobisNotArchived(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM JpredArchive WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ /*
+ * getting data from the db
+ */
+ public List<Pair<String, String>> ReadProteinDataTable() {
+ final long startTime = System.currentTimeMillis();
+ String com = "SELECT DataBegin,DataEnd FROM ProteinKeyspace.ProteinLog;";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+ final long queryTime = System.currentTimeMillis();
+ List<Row> rows = results.all();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+
+ List<Pair<String, String>> res = new ArrayList<Pair<String, String>>();
+ int c = 0;
+ for (Row r : rows) {
+ Pair<String, String> pair = new Pair<String, String>(r.getString("DataBegin"), r.getString("DataEnd"));
+ res.add(pair);
+ ++c;
+ }
+ final long endTime = System.currentTimeMillis();
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ return res;
+ }
+
+ /*
+ * getting earlest date of jobs from the db
+ */
+ public long getEarliestDateInDB() {
+ final long startTime = System.currentTimeMillis();
+ String com = "SELECT jobtime FROM ProteinKeyspace.ProteinData;";
+ System.out.println("Command: " + com);
+ ResultSet results = session.execute(com);
+ final long queryTime = System.currentTimeMillis();
+ System.out.println("Query time is " + (queryTime - startTime) + " msec");
+
+ Calendar cal = Calendar.getInstance();
+ long res = cal.getTimeInMillis();
+ int c = 0;
+ while (!results.isExhausted()) {
+ Row r = results.one();
+ long d1 = r.getLong("jobtime");
+ if (res > d1) {
+ res = d1;
+ }
+ ++c;
+ }
+ final long endTime = System.currentTimeMillis();
+ System.out.println(c + " rows analysed, execution time is " + (endTime - startTime) + " msec");
+ return res;
+ }
+
+}
--- /dev/null
+package compbio.engine;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class Job {
+ private String jobid;
+ private String ExecutionStatus;
+ private String FinalStatus;
+ private String IP;
+ private String Email;
+
+ private String StartingDateStr;
+ private long StartingDate;
+ private String EndTimeStr;
+ private long EndTime;
+ private String StartingTimeStr;
+ private long StartingTime;
+ private long ExecutionTime;
+
+ static SimpleDateFormat dateformatter = new SimpleDateFormat("yyyy/MM/dd");;
+ static SimpleDateFormat timeformatter = new SimpleDateFormat("yyyy/MM/dd:H:m:s");
+
+ public Job() {
+ ExecutionStatus = "OK";
+ FinalStatus = "OK";
+ jobid = "";
+
+ StartingDateStr = "1970/1/1";
+ StartingTimeStr = "1970/1/1:0:0:0";
+ EndTimeStr = "1970/1/1:0:0:0";
+ StartingDate = 0L;
+ StartingTime = 0L;
+ EndTime = 0L;
+ ExecutionTime = 0L;
+ }
+
+ public Job(String id, String date1, String date2) {
+ jobid = id;
+ ExecutionStatus = "OK";
+ FinalStatus = "OK";
+ setDates(date1, date2);
+ }
+
+ protected void setDates (String d1, String d2) {
+ StartingTimeStr = d1;
+ StartingDateStr = d1.substring(0, d1.indexOf(":"));
+ StartingTime = convertTime(StartingTimeStr);
+ StartingDate = convertDate(StartingDateStr);
+
+ EndTimeStr = d2;
+ EndTime = convertTime(d2);
+ ExecutionTime = EndTime - StartingTime;
+ }
+
+
+ protected long convertDate (String d) {
+ try {
+ if (null != d) {
+ Date startdate = dateformatter.parse(d);
+ return startdate.getTime();
+ }
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return 0L;
+ }
+
+ protected long convertTime (String d) {
+ try {
+ if (null != d) {
+ Date startdate = timeformatter.parse(d);
+ return startdate.getTime();
+ }
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return 0L;
+ }
+
+
+ public void setStartingDate(String date) {
+ this.StartingDateStr = date;
+ StartingDate = convertDate(date);
+ StartingTime = convertTime(date);
+ if (0 < EndTime) {
+ ExecutionTime = EndTime - StartingTime;
+ }
+ }
+
+ public void setEndTime(String date) {
+ this.EndTimeStr = date;
+ EndTime = convertTime(date);
+ if (0 < StartingTime) {
+ ExecutionTime = EndTime - StartingTime;
+ }
+ }
+
+ public String getStartingDateStr() {
+ return StartingDateStr;
+ }
+ public String getStartingTimeStr() {
+ return StartingTimeStr;
+ }
+ public long getStartingDate() {
+ return StartingDate;
+ }
+ public long getStartingTime() {
+ return StartingTime;
+ }
+
+ public String getEndTimeStr() {
+ return EndTimeStr;
+ }
+ public long getEndTime() {
+ return EndTime;
+ }
+
+ public void setExecutionStatus(String status) {
+ ExecutionStatus = status;
+ }
+
+ public void setFinalStatus(String status) {
+ FinalStatus = status;
+ }
+
+ public String getExecutionStatus() {
+ return ExecutionStatus;
+ }
+
+ public String getFinalStatus() {
+ return FinalStatus;
+ }
+
+ public long getExecutionTime() {
+ return ExecutionTime;
+ }
+
+ public void setIP(String IP) {
+ this.IP = IP;
+ }
+
+ public String getIP() {
+ if (null != IP)
+ return IP;
+ return "127.0.0.1";
+ }
+
+ public void setEmail(String email) {
+ this.Email = email;
+ }
+
+ public String getEmail() {
+ if (null != Email)
+ return Email;
+ return "user@localhost";
+ }
+
+ public void setJobId(String id) {
+ this.jobid = id;
+ }
+
+ public String getJobID() {
+ return jobid;
+ }
+}
--- /dev/null
+package compbio.engine;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import compbio.cassandra.FastaSequence;
+
+public class JpredJob extends Job {
+ private String protein;
+ private String log;
+ private List<FastaSequence> predictions;
+ private List<FastaSequence> alignment;
+
+ public JpredJob() {
+ super();
+ }
+
+ public JpredJob(String jobid, String date1, String date2) {
+ super(jobid, date1, date2);
+ this.protein = protein;
+ predictions = new ArrayList<FastaSequence>();
+ alignment = new ArrayList<FastaSequence>();
+ log = "";
+ }
+
+ public JpredJob(String jobid, String date1, String date2, List<FastaSequence> alignment, List<FastaSequence> predictions) {
+ super(jobid, date1, date2);
+ this.protein = protein;
+ this.alignment = alignment;
+ this.predictions = predictions;
+ }
+
+ public void setProtein(String protein) {
+ this.protein = protein;
+ }
+
+ public String getProtein() {
+ return protein;
+ }
+
+ public void setLog(String log) {
+ this.log = log;
+ }
+
+ public String getLog() {
+ return log;
+ }
+
+ public void setAlignment(List<FastaSequence> al) {
+ this.alignment = al;
+ }
+
+ public List<FastaSequence> getAlignment() {
+ return alignment;
+ }
+
+ public void setPredictions(List<FastaSequence> pr) {
+ this.predictions = pr;
+ }
+
+ public List<FastaSequence> getPredictions() {
+ return predictions;
+ }
+}
--- /dev/null
+/* Copyright (c) 2009 Peter Troshin\r
+ * \r
+ * JAva Bioinformatics Analysis Web Services (JABAWS) @version: 1.0 \r
+ * \r
+ * This library is free software; you can redistribute it and/or modify it under the terms of the\r
+ * Apache License version 2 as published by the Apache Software Foundation\r
+ * \r
+ * This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without\r
+ * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Apache \r
+ * License for more details.\r
+ * \r
+ * A copy of the license is in apache_license.txt. It is also available here:\r
+ * @see: http://www.apache.org/licenses/LICENSE-2.0.txt\r
+ * \r
+ * Any republication or derived work distributed in source code form\r
+ * must include this copyright and license notice.\r
+ */\r
+\r
+package compbio.engine;\r
+\r
+import java.io.File;\r
+import java.io.IOException;\r
+import java.net.URISyntaxException;\r
+import java.net.URL;\r
+\r
+import org.apache.log4j.Logger;\r
+\r
+import compbio.util.PropertyHelper;\r
+import compbio.util.Util;\r
+\r
+public final class ProteoCachePropertyHelperManager {\r
+\r
+ private static Logger log = Logger.getLogger(ProteoCachePropertyHelperManager.class);\r
+ private static PropertyHelper ph = null;\r
+\r
+ /**\r
+ * Ways to fix path problem: \r
+ * 1) find a path to WEB-INF directory based on the path to a known class. \r
+ * Then prepend this absolute path to the rest of paths \r
+ * pros: no input from user \r
+ * cons: relocation of the source may cause problems \r
+ * \r
+ * 2) Require users to add configuration directories to the class\r
+ * path and then load entries from it. \r
+ * pros: \r
+ * cons: Many paths needs to be added. Put significant burden on the user. \r
+ * Hard to tell web appl server to add these entries to its class path. \r
+ * \r
+ * 3) Ask for project source directory explicitly in the configuration. \r
+ * pros:\r
+ * cons: similar to 1, but this initial configuration file must reside in \r
+ * well known location! Why ask users what can be found automatically?\r
+ * \r
+ * 4) Have everything in the location already in class path for tomcat. \r
+ * pros:\r
+ * cons: only classes and lib/*.jar are added, Eclipse will remove non \r
+ * classses from classes dir.\r
+ * \r
+ * Try 1 - succeed.\r
+ * \r
+ * @return an instance\r
+ */\r
+ public static PropertyHelper getPropertyHelper() {\r
+ if (ph == null) {\r
+ try {\r
+ File MainPropertyFile = getResourceFromClasspath("conf/Proteocache.properties");\r
+ ph = new PropertyHelper(MainPropertyFile);\r
+ } catch (IOException e) {\r
+ log.warn("Cannot read property files! Reason: " + e.getLocalizedMessage(), e.getCause());\r
+ }\r
+ }\r
+ return ph;\r
+ }\r
+\r
+ static File getResourceFromClasspath(String resourceName) {\r
+ assert !Util.isEmpty(resourceName);\r
+ String locPath = getLocalPath();\r
+ File prop = new File(locPath + resourceName);\r
+ if (!prop.exists()) {\r
+ log.warn("Could not find a resource " + resourceName + " in the classpath!");\r
+ }\r
+ return prop;\r
+ }\r
+\r
+ /**\r
+ * Method return the absolute path to the project root directory. It assumes\r
+ * the following structure of the project:\r
+ * project-root: \r
+ * conf/settings\r
+ * binaries \r
+ * WEB-INF/classes/compbio/engine/conf/PropertyHelperManager.class\r
+ * If the structure changes it must be reflected in this method\r
+ * \r
+ * @return the local path\r
+ * @throws RuntimeException\r
+ * if cannot determine the local path\r
+ */\r
+ public static String getLocalPath() {\r
+ String clname = ProteoCachePropertyHelperManager.class.getSimpleName();\r
+ URL url = ProteoCachePropertyHelperManager.class.getResource(clname + ".class");\r
+ File f = null;\r
+ try {\r
+ f = new File(url.toURI());\r
+ // Iterate up the hierarchy to find a root project directory\r
+ for (int i = 0; i < 5; i++) {\r
+ f = f.getParentFile();\r
+ }\r
+ } catch (URISyntaxException e) {\r
+ String mes = "Could not find resources path! Problems locating PropertyHelperManager class! ";\r
+ log.error(mes + e.getLocalizedMessage(), e.getCause());\r
+ throw new RuntimeException(mes + e.getLocalizedMessage(), e.getCause());\r
+ } catch (IllegalArgumentException e) {\r
+ // Classes are in the jar file, using different method to determine\r
+ // the path new File(INCORRECT URL) throws it\r
+ String mes = "It looks like classes are in the jar file. " \r
+ + "Attempting a different method to determinine the path to the resources";\r
+ log.debug(mes + e.getLocalizedMessage(), e.getCause());\r
+ try {\r
+ f = new File(ProteoCachePropertyHelperManager.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath());\r
+\r
+ // Iterate up the hierarchy to find a root project directory\r
+ // This time there is not need to walk up all class packages\r
+ // WEB_APPL_NAME\WEB-INF\lib\JAR-FILE-NAME\r
+ // jws2-1.0\WEB-INF\lib\full-jws2-1.0.jar\r
+ for (int i = 0; i < 3; i++) {\r
+ f = f.getParentFile();\r
+ }\r
+ } catch (URISyntaxException e1) {\r
+ log.error("Could not find resources path! " + e1.getLocalizedMessage(), e1.getCause());\r
+ throw new RuntimeException("Could not find resources path! ", e1.getCause());\r
+ }\r
+ }\r
+ log.debug("Project directory is: " + f.getAbsolutePath());\r
+ return f.getAbsolutePath() + File.separator;\r
+ }\r
+\r
+ public static int getIntProperty(String propValue) {\r
+ if (!Util.isEmpty(propValue)) {\r
+ return Integer.parseInt(propValue.trim());\r
+ }\r
+ return -1;\r
+ }\r
+\r
+ public static boolean getBooleanProperty(String propValue) {\r
+ if (!Util.isEmpty(propValue)) {\r
+ propValue = propValue.trim();\r
+ return Boolean.parseBoolean(propValue);\r
+ }\r
+ return false;\r
+ }}\r
--- /dev/null
+package compbio.engine.archive;
+
+public class Archive {
+
+}
--- /dev/null
+package compbio.engine.archive;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.Iterator;
+import java.util.Scanner;
+
+//import compbio.util.Util;
+
+/**
+ * Manage files in ProteoCache Archive
+ *
+ * @author Alexander Sherstnev
+ * @version 1.0 November 2013
+ *
+ */
+public class ArchiveManager implements Iterator<ArchivedJob> {
+ Archive archive;
+ //private final Scanner input;
+ /**
+ * Delimiter for the scanner
+ */
+ //private final String DELIM = ">";
+
+ /**
+ * Header data can contain non-ASCII symbols and read in UTF8
+ *
+ * @param mainPath
+ * the absolute path to the ProteoCache job archive
+ * @throws FileNotFoundException
+ * if the input file is not found
+ * @throws IllegalStateException
+ * if the close method was called on this instance
+ *
+ */
+ public ArchiveManager(final String mainPath) throws FileNotFoundException {
+ /*
+ input = new Scanner(new File(mainPath), "UTF8");
+ input.useDelimiter(DELIM);
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+
+ @Override
+ public void run() {
+ if (input != null) {
+ input.close();
+ }
+ }
+ });
+ */
+ }
+
+ public ArchiveManager(Archive ar) {
+ archive = ar;
+ }
+
+
+ /**
+ * {@inheritDoc}
+ *
+ * @throws IllegalStateException
+ * if the close method was called on this instance
+ */
+ @Override
+ public boolean hasNext() {
+ //return input.hasNext();
+ return true;
+ }
+
+ /**
+ * Reads the next FastaSequence from the input
+ *
+ * @throws AssertionError
+ * if the header or the sequence is missing
+ * @throws IllegalStateException
+ * if the close method was called on this instance
+ * @throws MismatchException
+ * - if there were no more FastaSequence's.
+ */
+ @Override
+ public ArchivedJob next() {
+ String path = "bla-bla-bla";
+ /*
+ String path = input.next();
+ while (fastaHeader.indexOf("\n") < 0 && input.hasNext()) {
+ path = fastaHeader.concat(">");
+ path = fastaHeader.concat(input.next());
+ }
+ */
+ return new ArchivedJob(path);
+ }
+
+ /**
+ * Not implemented
+ */
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * Call this method to close the connection to the input file if you want to
+ * free up the resources. The connection will be closed on the JVM shutdown
+ * if this method was not called explicitly. No further reading on this
+ * instance of the FastaReader will be possible after calling this method.
+ */
+ public void close() {
+ //input.close();
+ }
+
+ private static ArchivedJob toFastaSequence(final String singleFastaEntry) {
+
+ // assert !Util.isEmpty(singleFastaEntry) :
+ // "Empty String where FASTA sequence is expected!";
+
+ int nlineidx = singleFastaEntry.indexOf("\n");
+ if (nlineidx < 0) {
+ throw new AssertionError(
+ "The FASTA sequence must contain the header information"
+ + " separated by the new line from the sequence. Given sequence does not appear to "
+ + "contain the header! Given data:\n "
+ + singleFastaEntry);
+ }
+ String header = singleFastaEntry.substring(0, nlineidx);
+
+ /*
+ * if (Util.isEmpty(sequence)) { throw new AssertionError(
+ * "Empty sequences are not allowed! Please make sure the " +
+ * " data is in the FASTA format! Given data:\n " + singleFastaEntry); }
+ */
+ return new ArchivedJob(header);
+ }
+}
--- /dev/null
+package compbio.engine.archive;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.channels.Channels;
+import java.nio.channels.ReadableByteChannel;
+import java.util.List;
+
+public class ArchivedJob {
+ String path;
+ int filesize;
+ List<String> files;
+
+ ArchivedJob (String path) {
+ this.path = path;
+ }
+
+ public boolean getArchiveFromWS() {
+ return false;
+ }
+
+
+ public boolean getArchiveFromWeb (String webpath) throws IOException, MalformedURLException {
+ URL website = new URL(webpath);
+ ReadableByteChannel rbc = Channels.newChannel(website.openStream());
+ FileOutputStream fos = new FileOutputStream(path);
+ fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
+ return true;
+ }
+
+
+ public int getSize() {
+ return filesize;
+ }
+
+ public List<String> unpack() {
+ if (null != files) {
+
+ }
+ return files;
+ }
+}
--- /dev/null
+package compbio.listeners;
+
+import java.io.IOException;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+import javax.servlet.annotation.WebListener;
+
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Session;
+
+import compbio.cassandra.CassandraNativeConnector;
+import compbio.cassandra.CassandraNewTableWriter;
+import compbio.cassandra.JpredParserHTTP;
+import compbio.cassandra.JpredParserLocalFile;
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.util.PropertyHelper;
+import compbio.util.Util;
+
+/**
+ * Application Lifecycle Listener implementation class ContextListener
+ *
+ */
+@WebListener
+public class ContextListener implements ServletContextListener {
+ private ScheduledExecutorService webjob_scheduler;
+ private ScheduledExecutorService localjob_scheduler;
+ private ScheduledExecutorService update_scheduler;
+ CassandraNativeConnector db = new CassandraNativeConnector();
+ static PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
+ private static Logger log = Logger.getLogger(ContextListener.class);
+ public static boolean READ_WEB_JPRED = false;
+ public static boolean READ_LOCALFILE_JPRED = false;
+ public static boolean FILL_NEWTABLE = false;
+
+ private static boolean initBooleanValue(String key) {
+ assert key != null;
+ String status = ph.getProperty(key);
+ log.debug("Loading property: " + key + " with value: " + status);
+ if (Util.isEmpty(status)) {
+ return false;
+ }
+ return new Boolean(status.trim()).booleanValue();
+ }
+
+ /**
+ * @see ServletContextListener#contextInitialized(ServletContextEvent)
+ */
+ public void contextInitialized(ServletContextEvent arg0) {
+ System.out.println("ProteoCache session start......");
+ // connect to the db and create table if needed
+ db.Connect();
+ final CassandraNewTableWriter updater = new CassandraNewTableWriter();
+
+ READ_WEB_JPRED = initBooleanValue("cassandra.jpred.web.update");
+ READ_LOCALFILE_JPRED = initBooleanValue("cassandra.jpred.local.update");
+ FILL_NEWTABLE = initBooleanValue("cassandra.newtables.update");
+
+ if (FILL_NEWTABLE) {
+ System.out.println("Initializating new table update scheduler");
+ update_scheduler = Executors.newSingleThreadScheduledExecutor();
+ update_scheduler.schedule(new Runnable() {
+ @Override
+ public void run() {
+ updater.FillNewTable();
+ }
+ }, 10, TimeUnit.SECONDS);
+ }
+
+ if (READ_WEB_JPRED) {
+ // get data from real Jpred production server
+ final String datasrc = "http://www.compbio.dundee.ac.uk/www-jpred/results/usage-new/alljobs.dat";
+ final String prefix = "http://www.compbio.dundee.ac.uk/www-jpred/results";
+ final JpredParserHTTP parser = new JpredParserHTTP(prefix);
+
+ int initialdelay = 300;
+ int updaterate = 600;
+ int newinitialdelay = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.web.inidelay"));
+ if (0 <= newinitialdelay) {
+ initialdelay = newinitialdelay;
+ }
+ int newupdaterate = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.web.updaterate"));
+ if (0 < newupdaterate) {
+ updaterate = newupdaterate;
+ }
+ final int updateperiod = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.web.period"));
+
+ webjob_scheduler = Executors.newSingleThreadScheduledExecutor();
+ System.out.println("Initializating web job scheduler");
+ System.out.println(" initial delay = " + initialdelay + " seconds");
+ System.out.println(" update rate = " + updaterate + " seconds");
+ if (0 < updateperiod)
+ System.out.println(" update period = " + updateperiod + " days");
+ else
+ System.out.println(" update period = 5 days");
+
+ webjob_scheduler.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (0 < updateperiod) {
+ parser.Parsing(datasrc, updateperiod);
+ } else {
+ parser.Parsing(datasrc, 5);
+ }
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ }, initialdelay, updaterate, TimeUnit.SECONDS);
+ }
+
+ if (READ_LOCALFILE_JPRED) {
+ // get irtifical data generated for the DB stress tests
+ final String datasrc = "/home/asherstnev/Projects/Java.projects/proteocache/data_stress_test/data.dat";
+ final String prefix = "/home/asherstnev/Projects/Java.projects/proteocache/data_stress_test/Jpreddata";
+ final JpredParserLocalFile parser = new JpredParserLocalFile(prefix);
+
+ int initialdelay = 300;
+ int updaterate = 600;
+ int newinitialdelay = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.local.inidelay"));
+ if (0 <= newinitialdelay) {
+ initialdelay = newinitialdelay;
+ }
+ int newupdaterate = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.local.updaterate"));
+ if (0 < newupdaterate) {
+ updaterate = newupdaterate;
+ }
+ final int updateperiod = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.local.period"));
+
+ localjob_scheduler = Executors.newSingleThreadScheduledExecutor();
+ System.out.println("Initializating local job scheduler");
+ System.out.println(" initial delay = " + initialdelay + " seconds");
+ System.out.println(" update rate = " + updaterate + " seconds");
+ if (0 < updateperiod)
+ System.out.println(" update period = " + updateperiod + " days");
+ else
+ System.out.println(" update period = 5 days");
+ localjob_scheduler.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (0 < updateperiod) {
+ parser.Parsing(datasrc, updateperiod);
+ } else {
+ parser.Parsing(datasrc, 100);
+ }
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ }, initialdelay, updaterate, TimeUnit.SECONDS);
+ }
+
+ }
+
+ /**
+ * @see ServletContextListener#contextDestroyed(ServletContextEvent)
+ */
+ public void contextDestroyed(ServletContextEvent arg0) {
+ db.Closing();
+ System.out.println("Shut down ProteoCache......");
+ if (READ_WEB_JPRED) {
+ webjob_scheduler.shutdownNow();
+ }
+ if (READ_LOCALFILE_JPRED) {
+ localjob_scheduler.shutdownNow();
+ }
+ update_scheduler.shutdownNow();
+ }
+
+}
--- /dev/null
+package compbio.listeners;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import compbio.cassandra.*;
+import compbio.statistic.StatisticsProt;
+
+/**
+ * Servlet implementation class DetailList
+ */
+@WebServlet("/DetailList")
+public class DetailList extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * @see HttpServlet#HttpServlet()
+ */
+
+ /**
+ * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
+ * response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ List<DataBase> result;
+ String date1 = request.getParameter("data1");
+ String date2 = request.getParameter("data2");
+ StatisticsProt sp = new StatisticsProt();
+ // result = sp.readDetail(date1, date2);
+ }
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
+ * response)
+ */
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ // TODO Auto-generated method stub
+ }
+
+}
--- /dev/null
+package compbio.listeners;
+
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.List;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import compbio.cassandra.DataBase;
+import compbio.statistic.CassandraRequester;
+import compbio.statistic.StatisticsProt;
+
+@WebServlet("/ServletJobsByDate")
+public class ServletJobsByDate extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
+ * response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ final long startTime = System.currentTimeMillis();
+ String date1 = request.getParameter("data1");
+ String date2 = request.getParameter("data2");
+ CassandraRequester cr = new CassandraRequester();
+ if (null != request.getParameter("option")) {
+ Calendar cal = Calendar.getInstance();
+ date1 = StatisticsProt.DateFormatYYMMDD(cr.earliestDate());
+ date2 = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DAY_OF_MONTH);
+ }
+ request.setAttribute("data1", date1);
+ request.setAttribute("data2", date2);
+ List<DataBase> res = cr.countJobs(date1, date2);
+ request.setAttribute("result", res);
+ final long endTime = System.currentTimeMillis();
+ request.setAttribute("timeExecution", (endTime - startTime));
+ System.out.println ("QueryServlet.doGet: total number of dates = " + res.size());
+ request.setAttribute("flag", request.getParameter("option"));
+ RequestDispatcher rd = request.getRequestDispatcher("/ReportJobsByDate.jsp");
+ rd.forward(request, response);
+ }
+
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ doGet(request, response);
+ }
+
+}
--- /dev/null
+package compbio.listeners;
+
+import java.io.IOException;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import compbio.statistic.CassandraRequester;
+
+
+/**
+ * Servlet implementation class LogServlet
+ */
+@WebServlet("/ServletLogInfo")
+public class ServletLogInfo extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+
+
+ /**
+ * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ final long startTime = System.currentTimeMillis();
+ String id = request.getParameter("IdJob");
+ CassandraRequester cr = new CassandraRequester();
+ request.setAttribute("result", cr.readJobLog(id));
+ final long endTime = System.currentTimeMillis();
+ request.setAttribute("timeExecution", (endTime - startTime));
+ request.setAttribute("IdJob", id);
+ RequestDispatcher rd = request.getRequestDispatcher("/ReportLogInfo.jsp");
+ rd.forward(request, response);
+ }
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ doGet(request, response);
+ }
+
+}
--- /dev/null
+package compbio.listeners;
+
+import java.io.IOException;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import compbio.statistic.CassandraRequester;
+import compbio.statistic.StatisticsProt;
+
+/**
+ * Servlet implementation class ProtServlet
+ */
+@WebServlet("/ServletSequenceProtein")
+public class ServletSequenceProtein extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
+ * response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ final long startTime = System.currentTimeMillis();
+ String flag = request.getParameter("protein");
+ String prot = request.getParameter("prot");
+ String search = request.getParameter("Search");
+ int counter = Integer.parseInt(request.getParameter("counterJob"));
+ CassandraRequester cr = new CassandraRequester();
+ if (search.equals("Search counter")) {
+ request.setAttribute("result", cr.readProteinByCounter(counter));
+ } else {
+ request.setAttribute("result", cr.readProteins(prot, flag));
+ }
+ final long endTime = System.currentTimeMillis();
+ request.setAttribute("timeExecution", (endTime - startTime));
+ request.setAttribute("prot", prot);
+ request.setAttribute("flag", flag);
+ request.setAttribute("search", search);
+ request.setAttribute("counter", counter);
+ RequestDispatcher rd = request.getRequestDispatcher("/ReportSequenceProtein.jsp");
+ rd.forward(request, response);
+ }
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
+ * response)
+ */
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ doGet(request, response);
+ }
+
+}
--- /dev/null
+package compbio.listeners;
+
+import java.io.IOException;
+import java.util.Calendar;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import compbio.statistic.StatisticsProt;
+import compbio.statistic.CassandraRequester;
+
+/**
+ * Servlet implementation class LengthServlet
+ */
+@WebServlet("/ServletTimeExecution")
+public class ServletTimeExecution extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
+ * response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ final long startTime = System.currentTimeMillis();
+ String date1 = request.getParameter("data1");
+ String date2 = request.getParameter("data2");
+ CassandraRequester sp = new CassandraRequester();
+ if (null != request.getParameter("option")) {
+ Calendar cal = Calendar.getInstance();
+ date1 = StatisticsProt.DateFormatYYMMDD(sp.earliestDate());
+ date2 = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DAY_OF_MONTH);
+ }
+ request.setAttribute("data1", date1);
+ request.setAttribute("data2", date2);
+ request.setAttribute("result", sp.extractExecutionTime(date1, date2));
+ request.setAttribute("flag", request.getParameter("option"));
+ final long endTime = System.currentTimeMillis();
+ request.setAttribute("timeExecution", (endTime - startTime));
+ RequestDispatcher rd = request.getRequestDispatcher("/ReportTimeExecution.jsp");
+ rd.forward(request, response);
+ }
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
+ * response)
+ */
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ doGet(request, response);
+ }
+}
--- /dev/null
+package compbio.statistic;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import compbio.cassandra.CassandraNativeConnector;
+import compbio.cassandra.CassandraReader;
+import compbio.cassandra.DataBase;
+import compbio.cassandra.Pair;
+import compbio.cassandra.StructureJobLog;
+import compbio.cassandra.StructureProteinPrediction;
+
+public class CassandraRequester {
+ private CassandraReader db = new CassandraReader();
+ private ArrayList<DataBase> query;
+ private static long currentDate = 0;
+ private static long earlestDate = 0;
+
+ /*
+ * query: execution time for the period from date1 till date2
+ */
+ public List<DataBase> extractExecutionTime(String date1, String date2) {
+ if (null == date1) {
+ date1 = "1970/1/1";
+ }
+ if (null == date2) {
+ date1 = "2100/1/1";
+ }
+ if (!isThisDateValid(date1) || !isThisDateValid(date2)) {
+ System.out.println("Wrong date: point 3");
+ return null;
+ }
+ SetDateRange();
+ int nbins = 5;
+ long dateStart = DateParsing(date1);
+ long dateEnd = DateParsing(date2);
+ if (dateEnd < earlestDate || dateStart > currentDate || dateStart > dateEnd)
+ return null;
+ if (dateStart < earlestDate)
+ dateStart = earlestDate;
+ if (dateEnd > currentDate)
+ dateStart = currentDate;
+
+ Calendar start = Calendar.getInstance();
+ start.setTime(new Date(dateStart));
+ Calendar end = Calendar.getInstance();
+ end.setTime(new Date(dateEnd));
+ query = new ArrayList<DataBase>();
+ List<Integer> totalTime = new ArrayList<Integer>();
+ for (int i = 0; i < nbins; i++)
+ totalTime.add(i, 0);
+ List<Pair<String, String>> res = db.ReadProteinDataTable();
+ List<Pair<Date, Long>> numres = new ArrayList<Pair<Date, Long>>();
+
+ for (Pair<String, String> entry : res) {
+ SimpleDateFormat dateformatter = new SimpleDateFormat("yyyy/MM/dd");
+ try {
+ Date jobstartdate = dateformatter.parse(entry.getElement0());
+ long date = jobstartdate.getTime();
+ if (dateStart <= date && date <= dateEnd) {
+ SimpleDateFormat datetimeformatter = new SimpleDateFormat("yyyy/MM/dd:H:m:s");
+ Date jobstarttime = datetimeformatter.parse(entry.getElement0());
+ Date jobendtime = datetimeformatter.parse(entry.getElement1());
+ long diff = (jobendtime.getTime() - jobstarttime.getTime()) / 1000;
+ Pair<Date, Long> pair = new Pair<Date, Long>(jobstartdate, Long.valueOf(diff));
+ numres.add(pair);
+ }
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ }
+
+ for (Date date = start.getTime(); !start.after(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
+ List<Integer> timeResult = new ArrayList<Integer>();
+ for (int i = 0; i < nbins; i++)
+ timeResult.add(i, 0);
+ for (Pair<Date, Long> p : numres) {
+ if (date.equals(p.getElement0())) {
+ long lenResult = p.getElement1().longValue();
+ if (lenResult <= 30)
+ timeResult.set(0, timeResult.get(0) + 1);
+ else if (lenResult > 30 && lenResult <= 60)
+ timeResult.set(1, timeResult.get(1) + 1);
+ else if (lenResult > 60 && lenResult <= 120)
+ timeResult.set(2, timeResult.get(2) + 1);
+ else if (lenResult > 120 && lenResult <= 600)
+ timeResult.set(3, timeResult.get(3) + 1);
+ else {
+ timeResult.set(4, timeResult.get(4) + 1);
+ }
+ }
+ }
+ for (int i = 0; i < nbins; i++)
+ totalTime.set(i, totalTime.get(i) + timeResult.get(i));
+ DataBase db = new DataBase();
+ db.setTimeRez(timeResult);
+ db.setDate(DateFormat(date.getTime()));
+ query.add(db);
+ }
+
+ DataBase db = new DataBase();
+ db.setTimeTotalExec(totalTime);
+ query.add(db);
+ System.out.println("StatisticsProt.readLength: total number of dates = " + query.size());
+ return query;
+ }
+
+ /*
+ * query: total number of jobs for the period from date1 till date2
+ */
+ public List<DataBase> countJobs(String date1, String date2) {
+ if (null == date1) {
+ date1 = "1970/1/1";
+ }
+ if (null == date2) {
+ date1 = "2100/1/1";
+ }
+ if (!isThisDateValid(date1) || !isThisDateValid(date2)) {
+ System.out.println("Wrong date: point 3");
+ return null;
+ }
+ SetDateRange();
+ long dateStart = DateParsing(date1);
+ long dateEnd = DateParsing(date2);
+ if (dateEnd < earlestDate || dateStart > currentDate || dateStart > dateEnd)
+ return null;
+ if (dateStart < earlestDate)
+ dateStart = earlestDate;
+ if (dateEnd > currentDate)
+ dateStart = currentDate;
+
+ Calendar start = Calendar.getInstance();
+ start.setTime(new Date(dateStart));
+ Calendar end = Calendar.getInstance();
+ end.setTime(new Date(dateEnd));
+ query = new ArrayList<DataBase>();
+ for (Date date = start.getTime(); !start.after(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
+ long res = db.ReadDateTable(date.getTime());
+ DataBase db = new DataBase();
+ db.setTotal((int)res);
+ db.setDate(DateFormat(date.getTime()));
+ query.add(db);
+ }
+ System.out.println("StatisticsProt.readLength: total number of dates = " + query.size());
+ return query;
+ }
+ /*
+ * query: protein sequence
+ * */
+ public List<DataBase> readProteins(String protIn, String flag) {
+ query = new ArrayList<DataBase>();
+ List<StructureProteinPrediction> res;
+ if (flag.equals("whole"))
+ res = db.ReadWholeSequence(protIn);
+ else
+ res = db.ReadPartOfSequence(protIn);
+ for (StructureProteinPrediction entry : res) {
+ Map<String,String> pred = entry.getPrediction();
+ Iterator it = pred.entrySet().iterator();
+ while (it.hasNext()) {
+ DataBase db = new DataBase();
+ db.setProt(entry.getSequence());
+ Map.Entry pairs = (Map.Entry)it.next();
+ db.setId(entry.getJobid());
+ db.setJpred(pairs.getValue().toString());
+ if (flag.equals("part"))
+ db.setSubProt(CreateSubprot (entry.getSequence(), protIn));
+ query.add(db);
+ }
+ }
+ return query;
+ }
+
+ /*
+ * query protein sequences with number of jobs
+ */
+ public List<DataBase> readProteinByCounter(int minimalcounter) {
+ query = new ArrayList<DataBase>();
+ Map<String, Integer> map = db.ReadProteinSequenceByCounter();
+ for (Map.Entry<String, Integer> entry : map.entrySet()) {
+ if (entry.getValue() > minimalcounter) {
+ DataBase db = new DataBase();
+ db.setTotalId(entry.getValue());
+ db.setProt(entry.getKey());
+ query.add(db);
+ }
+ }
+ return query;
+ }
+
+ /*
+ * query jobs log info
+ */
+ public DataBase readJobLog(String jobid) {
+ // query = new ArrayList<DataBase>();
+ StructureJobLog res = db.ReadJobLog(jobid);
+ DataBase query = new DataBase();
+ query.setLogInfo(res);
+ // query.setres);
+ return query;
+ }
+ /*
+ * create list of parts of protein sequence;
+ */
+ private static List<String> CreateSubprot (String protein, String subprot) {
+ List<String> sub = new ArrayList<String>();
+ String subStr = protein;
+ while (subStr.length() > 0 && subStr.contains(subprot)) {
+ String first = subStr.substring(0, subStr.indexOf(subprot));
+ if (first.length() > 0)
+ sub.add(first);
+ sub.add(subprot);
+ subStr = subStr.substring(subStr.indexOf(subprot) + subprot.length(), subStr.length());
+ }
+ if (subStr.length() > 0)
+ sub.add(subStr);
+ return sub;
+ }
+ /*
+ * convert String date into long date (miliseconds since the epoch start)
+ */
+ private static long DateParsing(String datInput) {
+ if (datInput == null) {
+ return 0;
+ }
+ long dateWorkSt = 0;
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd");
+ try {
+ dateWorkSt = formatter.parse(datInput).getTime();
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return dateWorkSt;
+ }
+
+ // convert long to date in string format
+ private static String DateFormat(long inDate) {
+ SimpleDateFormat datformat = new SimpleDateFormat("dd/MM/yyyy");
+ return datformat.format(new Date(inDate));
+ }
+
+ /*
+ * set earlest date and current dates. earlestDate is static and should be
+ * set at the 1st call currentDate should be re-calculated every time
+ */
+ private static void SetDateRange() {
+ Calendar cal = Calendar.getInstance();
+ currentDate = DateParsing(cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DAY_OF_MONTH));
+ if (0 == earlestDate) {
+ CassandraRequester cr = new CassandraRequester();
+ earlestDate = cr.earliestDate();
+ System.out.println("Set earlest Date = " + earlestDate);
+ }
+ }
+
+ public boolean isThisDateValid(String dateToValidate) {
+ if (dateToValidate == null || dateToValidate.equals("")) {
+ System.out.println("Undefined date");
+ return false;
+ }
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd");
+ try {
+ // if not valid, this will throw ParseException
+ sdf.setLenient(false);
+ Date date = sdf.parse(dateToValidate);
+ } catch (ParseException e) {
+ e.printStackTrace();
+ return false;
+ }
+ return true;
+ }
+
+ /*
+ * find the earliest date in the database
+ */
+ public long earliestDate() {
+ earlestDate = CassandraNativeConnector.getEarliestDateInDB();
+ return earlestDate;
+ }
+
+}
--- /dev/null
+package compbio.statistic;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+
+import compbio.cassandra.CassandraNativeConnector;
+import compbio.cassandra.DataBase;
+
+public class StatisticsProt {
+ private CassandraNativeConnector cc = new CassandraNativeConnector();
+ private ArrayList<DataBase> query;
+ private static long currentDate = 0;
+ private static long earlestDate = 0;
+
+ /*
+ * query: the period from date1 till date2
+ * */
+ public List<DataBase> readDetails(String date1, String date2) {
+
+ if (!isThisDateValid(date1) || !isThisDateValid(date2)) {
+ System.out.println("Wrong date: point 1");
+ return null;
+ }
+ SetDateRange();
+ long dateStart = DateParsing(date1);
+ long dateEnd = DateParsing(date2);
+ if ((dateStart < earlestDate && dateEnd < earlestDate) || (dateStart > currentDate && dateEnd > currentDate) || dateStart > dateEnd)
+ return null;
+ if (dateStart < earlestDate)
+ dateStart = earlestDate;
+ if (dateEnd > currentDate)
+ dateStart = currentDate;
+ System.out.println("StatisticsProt.readDetails: earlestDate = " + earlestDate + ", currentDate = " + currentDate);
+ System.out.println("StatisticsProt.readDetails: Start date " + date1 + ": int representation = " + dateStart);
+ System.out.println("StatisticsProt.readDetails: End date " + date2 + ": int representation = " + dateEnd);
+ Calendar start = Calendar.getInstance();
+ start.setTime(new Date(dateStart));
+ Calendar end = Calendar.getInstance();
+ end.setTime(new Date(dateEnd));
+ query = new ArrayList<DataBase>();
+ int day = 0;
+ /*
+ for (Date date = start.getTime(); !start.after(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
+ SliceQuery<Long, String, String> result = HFactory.createSliceQuery(cc.GetKeyspace(), LongSerializer.get(),
+ StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinData");
+ result.setKey(date.getTime());
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ QueryResult<ColumnSlice<String, String>> columnSlice = result.execute();
+ ++day;
+ System.out.print("Day " + day + ": dataStart = " + date + ": ");
+ if (!columnSlice.get().getColumns().isEmpty()) {
+ DataBase db = new DataBase(DateFormat(date.getTime()), columnSlice.get().getColumns().size());
+ query.add(db);
+ System.out.println("data exist");
+ } else {
+ System.out.println("no data");
+ }
+ }
+ */
+ System.out.println("StatisticsProt.readLength: total number of dates = " + query.size());
+ return query;
+ }
+
+ /*
+ * query: execution time for the period from date1 till date2
+ * */
+ public List<DataBase> readLength(String date1, String date2) {
+ if (!isThisDateValid(date1) || !isThisDateValid(date2)) {
+ System.out.println("Wrong date: point 3");
+ return null;
+ }
+ SetDateRange();
+ int nbins = 5;
+ long dateStart = DateParsing(date1);
+ long dateEnd = DateParsing(date2);
+ if ((dateStart < earlestDate && dateEnd < earlestDate) || (dateStart > currentDate && dateEnd > currentDate) || dateStart > dateEnd)
+ return null;
+ if (dateStart < earlestDate)
+ dateStart = earlestDate;
+ if (dateEnd > currentDate)
+ dateStart = currentDate;
+ System.out.println("StatisticsProt.readLength: earlestDate = " + earlestDate + ", currentDate = " + currentDate);
+ System.out.println("StatisticsProt.readLength: Start date is " + date1 + ": int representation = " + dateStart);
+ System.out.println("StatisticsProt.readLength: End date is " + date2 + ": int representation = " + dateEnd);
+ Calendar start = Calendar.getInstance();
+ start.setTime(new Date(dateStart));
+ Calendar end = Calendar.getInstance();
+ end.setTime(new Date(dateEnd));
+ query = new ArrayList<DataBase>();
+ List<Integer> totalTime = new ArrayList<Integer>();
+ for (int i = 0; i < nbins; i++)
+ totalTime.add(i, 0);
+ /*
+ for (Date date = start.getTime(); !start.after(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
+ List<Integer> timeResult = new ArrayList<Integer>();
+ SliceQuery<Long, String, String> result = HFactory.createSliceQuery(cc.GetKeyspace(), LongSerializer.get(),
+ StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinData");
+ result.setKey(date.getTime());
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ QueryResult<ColumnSlice<String, String>> columnSlice = result.execute();
+ List<HColumn<String, String>> col = columnSlice.get().getColumns();
+ if (!col.isEmpty()) {
+ Iterator<HColumn<String, String>> itCol = col.iterator();
+ for (int i = 0; i < nbins; i++)
+ timeResult.add(i, 0);
+ // split all jobs into nbins bins
+ while (itCol.hasNext()) {
+ String id = itCol.next().getName();
+ long lenResult = CountID(id);
+ if (lenResult <= 30)
+ timeResult.set(0, timeResult.get(0) + 1);
+ else if (lenResult > 30 && lenResult <= 60)
+ timeResult.set(1, timeResult.get(1) + 1);
+ else if (lenResult > 60 && lenResult <= 120)
+ timeResult.set(2, timeResult.get(2) + 1);
+ else if (lenResult > 120 && lenResult <= 600)
+ timeResult.set(3, timeResult.get(3) + 1);
+ else {
+ timeResult.set(4, timeResult.get(4) + 1);
+ }
+ }
+ for (int i = 0; i < nbins; i++)
+ totalTime.set(i, totalTime.get(i) + timeResult.get(i));
+ DataBase db = new DataBase();
+ db.setTimeRez(timeResult);
+ db.setDate(DateFormat(date.getTime()));
+ query.add(db);
+ }
+ }
+ */
+ DataBase db = new DataBase();
+ db.setTimeTotalExec(totalTime);
+ query.add(db);
+ System.out.println("StatisticsProt.readLength: total number of dates = " + query.size());
+ return query;
+ }
+
+ /*
+ * query: protein sequence
+ * */
+ public List<DataBase> readProteins(String protIn) {
+ query = new ArrayList<DataBase>();
+ /*
+ SliceQuery<String, String, String> result = HFactory.createSliceQuery(cc.GetKeyspace(), StringSerializer.get(),
+ StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinRow");
+ result.setKey(protIn);
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ QueryResult<ColumnSlice<String, String>> columnSlice = result.execute();
+ Iterator<HColumn<String, String>> it = columnSlice.get().getColumns().iterator();
+ while (it.hasNext()) {
+ HColumn<String, String> col = it.next();
+ String name = col.getName();
+ if (name.matches("(.*)jnetpred")) {
+ DataBase db = new DataBase();
+ db.setProt(protIn);
+ db.setId(col.getName());
+ db.setJpred(col.getValue());
+ query.add(db);
+ }
+ }
+ */
+ return query;
+ }
+
+ /*
+ * query by a protein sequence
+ * */
+ public List<DataBase> readProtID(int counter) {
+ query = new ArrayList<DataBase>();
+ int row_count = 100;
+ /*
+ RangeSlicesQuery<String, String, String> result = HFactory.createRangeSlicesQuery(cc.GetKeyspace(), StringSerializer.get(),
+ StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinRow");
+ result.setRange(null, null, false, 100);
+ result.setRowCount(row_count);
+ String last_key = null;
+ while (true) {
+ result.setKeys(last_key, null);
+ QueryResult<OrderedRows<String, String, String>> columnSlice = result.execute();
+ OrderedRows<String, String, String> rows = columnSlice.get();
+ Iterator<Row<String, String, String>> rowsIterator = rows.iterator();
+ while (rowsIterator.hasNext()) {
+ Row<String, String, String> row = rowsIterator.next();
+ last_key = row.getKey();
+ List<HColumn<String, String>> clms = row.getColumnSlice().getColumns();
+ //int npred = 0;
+ //for (HColumn<String, String> cln : clms) {
+ // String name = cln.getName();
+ // if (name.matches("(.*)jnetpred")) {
+ // ++npred;
+ // }
+ //}
+ int npred = clms.size();
+ if (npred > counter) {
+ DataBase db = new DataBase();
+ db.setProt(last_key);
+ db.setTotalId(npred);
+ query.add(db);
+ }
+ }
+ if (rows.getCount() < row_count)
+ break;
+ }*/
+ return query;
+ }
+
+ /*
+ * query by a part of sequence
+ * */
+ public List<DataBase> readPart(String protIn) {
+ int row_count = 10000;
+ query = new ArrayList<DataBase>();
+ /*
+ RangeSlicesQuery<String, String, String> result = HFactory.createRangeSlicesQuery(cc.GetKeyspace(), StringSerializer.get(),
+ StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinRow");
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ result.setRowCount(row_count);
+ String last_key = null;
+ while (true) {
+ result.setKeys(last_key, null);
+ QueryResult<OrderedRows<String, String, String>> columnSlice = result.execute();
+ OrderedRows<String, String, String> rows = columnSlice.get();
+ Iterator<Row<String, String, String>> rowsIterator = rows.iterator();
+ while (rowsIterator.hasNext()) {
+ Row<String, String, String> row = rowsIterator.next();
+ last_key = row.getKey();
+ if (last_key.matches("(.*)" + protIn + "(.*)")) {
+ Iterator<HColumn<String, String>> it = row.getColumnSlice().getColumns().iterator();
+ while (it.hasNext()) {
+ HColumn<String, String> col = it.next();
+ List<String> subProt = new ArrayList<String>();
+ String subStr = last_key;
+ while (subStr.length() > 0 && subStr.contains(protIn)) {
+ String first = subStr.substring(0, subStr.indexOf(protIn));
+ if (first.length() > 0)
+ subProt.add(first);
+ subProt.add(protIn);
+ subStr = subStr.substring(subStr.indexOf(protIn) + protIn.length(), subStr.length());
+ }
+ if (subStr.length() > 0)
+ subProt.add(subStr);
+ String name = col.getName();
+ if (name.matches("(.*)jnetpred")) {
+ DataBase db = new DataBase();
+ db.setProt(last_key);
+ db.setId(col.getName());
+ db.setJpred(col.getValue());
+ db.setSubProt(subProt);
+ query.add(db);
+ }
+ }
+ }
+ }
+ if (rows.getCount() < row_count)
+ break;
+ }
+ */
+ return query;
+ }
+
+ /*
+ * convert String date into long date (miliseconds since the epoch start)
+ */
+ private static long DateParsing(String datInput) {
+ if (datInput == null) {
+ return 0;
+ }
+ long dateWorkSt = 0;
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd");
+ try {
+ dateWorkSt = formatter.parse(datInput).getTime();
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return dateWorkSt;
+ }
+
+ /*
+ * convert String date:time into long date:time (miliseconds since the epoch start)
+ */
+ private static long TimeConvert(String datInput) {
+ long dateWorkSt = 0;
+ if (datInput == null) {
+ return dateWorkSt;
+ }
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd:hh:mm:ss");
+ try {
+ dateWorkSt = formatter.parse(datInput).getTime();
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return dateWorkSt;
+ }
+
+ // convert long to date in string format
+ private static String DateFormat(long inDate) {
+ SimpleDateFormat datformat = new SimpleDateFormat("dd/MM/yyyy");
+ String dateString = datformat.format(new Date(inDate));
+ return dateString;
+ }
+
+ /*
+ * convert ???
+ */
+ public static String DateFormatYYMMDD(long indate) {
+ SimpleDateFormat datformat = new SimpleDateFormat("yyyy/MM/dd");
+ String dateString = datformat.format(new Date(indate));
+ return dateString;
+ }
+
+ /*
+ * ???
+ */
+ public long CountID(String id) {
+ /*
+ SliceQuery<String, String, String> sliceQuery = HFactory.createSliceQuery(cc.GetKeyspace(), StringSerializer.get(),
+ StringSerializer.get(), StringSerializer.get());
+ sliceQuery.setColumnFamily("ProteinLog").setKey(id).setRange("", "", false, 100);
+ QueryResult<ColumnSlice<String, String>> result = sliceQuery.execute();
+ String datBegin = result.get().getColumnByName("DataBegin").getValue();
+ String datEnd = result.get().getColumnByName("DataEnd").getValue();
+
+ long datBeginLong = TimeConvert(datBegin);
+ long datEndLong = TimeConvert(datEnd);
+ return (datEndLong - datBeginLong) / 1000;
+ */
+ return 0;
+ }
+
+ /*
+ * set earlest date and current dates.
+ * earlestDate is static and should be set at the 1st call
+ * currentDate should be re-calculated every time
+ */
+ private static void SetDateRange() {
+ if (0 == earlestDate) {
+ StatisticsProt sp = new StatisticsProt();
+ earlestDate = sp.earliestDate();
+ System.out.println("Set earlest Date = " + earlestDate);
+ }
+ Calendar cal = Calendar.getInstance();
+ currentDate = DateParsing(cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DAY_OF_MONTH));
+ }
+
+ public boolean isThisDateValid(String dateToValidate) {
+ if (dateToValidate == null || dateToValidate.equals("")) {
+ System.out.println("Undefined date");
+ return false;
+ }
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd");
+ try {
+ // if not valid, this will throw ParseException
+ sdf.setLenient(false);
+ Date date = sdf.parse(dateToValidate);
+ } catch (ParseException e) {
+ e.printStackTrace();
+ return false;
+ }
+ return true;
+ }
+
+ /*
+ * find the earliest date in the database
+ */
+ public long earliestDate() {
+ ArrayList<Long> dateSort = new ArrayList<Long>();
+ int row_count = 10000;
+ /*
+ RangeSlicesQuery<Long, String, String> result = HFactory.createRangeSlicesQuery(cc.GetKeyspace(), LongSerializer.get(),
+ StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinData");
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ result.setRowCount(row_count);
+ Long last_key = null;
+ while (true) {
+ result.setKeys(last_key, null);
+ QueryResult<OrderedRows<Long, String, String>> columnSlice = result.execute();
+ OrderedRows<Long, String, String> rows = columnSlice.get();
+ Iterator<Row<Long, String, String>> rowsIterator = rows.iterator();
+ while (rowsIterator.hasNext()) {
+ Row<Long, String, String> row = rowsIterator.next();
+ last_key = row.getKey();
+ dateSort.add(last_key);
+ }
+ if (rows.getCount() < row_count)
+ break;
+ }*/
+ Collections.sort(dateSort);
+ return dateSort.get(0);
+ }
+}
--- /dev/null
+Manifest-Version: 1.0\r
+Class-Path: \r
+\r
--- /dev/null
+## Uncomment to enable ProteoCache activity logging\r
+## Valid log levels are:\r
+## TRACE - log everything from below including very detailed messages (useful for debugging only)\r
+## DEBUG - log everything from below including some minor events (useful for debugging only)\r
+## INFO - log everything from below including some information messages\r
+## WARN - log error and warnings\r
+## ERROR - log errors and fatal events only \r
+## FATAL - log fatal events only\r
+\r
+# To see debug messages in TldLocationsCache, uncomment the following line:\r
+#org.apache.jasper.compiler.TldLocationsCache.level = FINE\r
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].level = FINE\r
+\r
+## Uncomment this if you would like the system to log messages into stdout\r
+#log4j.rootLogger=TRACE, rootfile\r
+#log4j.appender.rootfile=org.apache.log4j.RollingFileAppender\r
+#log4j.appender.rootfile.File=JABAWSLogFilename.log\r
+#log4j.appender.rootfile.Target=System.out\r
+#log4j.appender.rootfile.layout=org.apache.log4j.PatternLayout\r
+#log4j.appender.rootfile.layout.ConversionPattern=%t %d %m%n\r
+\r
+## Uncomment to enable JWS2 activity logging to the file\r
+#logDir = .\r
+#log4j.logger.compbio=TRACE, ACTIVITY\r
+#log4j.appender.ACTIVITY=org.apache.log4j.RollingFileAppender\r
+################log4j.appender.ACTIVITY.File=${logDir}/activity.log\r
+#log4j.appender.ACTIVITY.File=${catalina.base}/logs/JABAWSLogFilename.log\r
+#log4j.appender.ACTIVITY.MaxFileSize=10MB\r
+#log4j.appender.ACTIVITY.MaxBackupIndex=10000\r
+#log4j.appender.ACTIVITY.layout=org.apache.log4j.PatternLayout\r
+#log4j.appender.ACTIVITY.layout.ConversionPattern=%d{MM-dd@HH:mm:ss} %-5p %3x - %m%n\r
+\r
+## Uncomment for separate local engine execution log (debugging only)\r
+#log4j.logger.compbio.engine.local.LocalExecutorService=INFO, C\r
+#log4j.appender.C=org.apache.log4j.FileAppender\r
+#log4j.appender.C.File=LocalExecutorService.log\r
+#log4j.appender.C.layout=org.apache.log4j.PatternLayout\r
+#log4j.appender.C.layout.ConversionPattern=%m%n\r
+\r
+##################################################################################################################################\r
+#log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
+#log4j.appender.stdout.Target=System.out\r
+#log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
+#log4j.appender.stdout.layout.ConversionPattern=%m%n\r
+#log4j.rootLogger=info, stdout\r
+\r
+log4j.rootLogger=ERROR, R\r
+log4j.appender.R=org.apache.log4j.FileAppender\r
+log4j.appender.R.File=${catalina.base}/logs/errors.log\r
+log4j.appender.R.layout=org.apache.log4j.PatternLayout\r
+log4j.appender.R.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
+\r
+# %d{ABSOLUTE} %5p %c{1}:%L -\r
+log4j.logger.compbio=DEBIG, B\r
+log4j.appender.B=org.apache.log4j.FileAppender\r
+log4j.appender.B.File=${catalina.base}/logs/debugging.log\r
+log4j.appender.B.layout=org.apache.log4j.PatternLayout\r
+log4j.appender.B.layout.ConversionPattern=%m%n %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
+\r
+# %d{ABSOLUTE} %5p %c{1}:%L -\r
+#log4j.logger.compbio.engine.local.LocalExecutorService=INFO, C\r
+#log4j.appender.C=org.apache.log4j.FileAppender\r
+#log4j.appender.C.File=LocalExecutorService.log\r
+#log4j.appender.C.layout=org.apache.log4j.PatternLayout\r
+#log4j.appender.C.layout.ConversionPattern=%m%n\r
+# %d{MM-dd@HH:mm:ss} %-5p (%13F:%L) %3x - \r
--- /dev/null
+<%
+ String query = request.getParameter("query");
+ if (query.equals("date"))
+ response.sendRedirect("QueryJobsByDate.jsp");
+ else if (query.equals("length"))
+ response.sendRedirect("QueryTimeExecition.jsp");
+ else if (query.equals("protein"))
+ response.sendRedirect("QuerySequenceProtein.jsp");
+ else if (query.equals("log"))
+ response.sendRedirect("QueryLogInfo.jsp");
+%>
\ No newline at end of file
--- /dev/null
+<%@ page language="java" contentType="text/html; charset=UTF-8"
+ pageEncoding="UTF-8"%>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Cassandra report</title>
+</head>
+<body>
+ <form method="post" action="AllReports.jsp">
+ <h3>Choose query type</h3>
+ <input type="radio" name="query" value="protein" Checked>Search by protein sequence<br/>
+ <input type="radio" name="query" value="date">Search by date<br/>
+ <input type="radio" name="query" value="log">Log information for current job<br/>
+ <input type="radio" name="query" value="length">Usage statistics by job time execution<br/>
+ <input type="submit" name="Search" value="Search"/>
+ </form>
+</body>
+</html>
\ No newline at end of file
--- /dev/null
+<%@ page language="java" contentType="text/html; charset=UTF-8"
+ pageEncoding="UTF-8"%>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Report</title>
+</head>
+<body>
+<h3>Report Results</h3>
+<% String rez = request.getAttribute("result").toString();
+ String[] rezLines = rez.split(";");
+ for (int i = 0; i < rezLines.length; i++) {
+%>
+<%= rezLines[i]+"<br/>" %>
+<%
+}
+%>
+<a href="javascript:history.back()">Click here to go Back</a>
+</body>
+</html>
\ No newline at end of file
--- /dev/null
+<%@ page language="java" contentType="text/html; charset=UTF-8"
+ pageEncoding="UTF-8"%>
+ <%@page import="java.util.Calendar"%>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Date period</title>
+</head>
+<body>
+ <form method="post" action="ServletJobsByDate">
+ <h3>Enter time period</h3>
+ <% Calendar cal = Calendar.getInstance();
+ String datecalBegin = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/1";
+ String datecalEnd = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DAY_OF_MONTH);
+ %>
+ <p>from <input type="text" name="data1" value = <%= datecalBegin%> style=" width : 145px;"/>
+ to <input type="text" name="data2" value = <%= datecalEnd%> style=" width : 145px;"/></p>
+ <input type="checkbox" name="option" value="AllDate">Query for all dates<br>
+ <input type="submit" name="Search" value="Search"/>
+ </form>
+</body>
+</html>
\ No newline at end of file
--- /dev/null
+<%@ page language="java" contentType="text/html; charset=UTF-8"
+ pageEncoding="UTF-8"%>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Log query</title>
+</head>
+<body>
+ <br/>
+ <form method="get" action="ServletLogInfo">
+ <h3>Enter job ID</h3>
+ <input type="text" name="IdJob"><br/>
+ <input type="submit" name="Search" value="Search"/><br/><br/>
+ </form>
+</body>
+</html>
\ No newline at end of file
--- /dev/null
+<%@ page language="java" contentType="text/html; charset=UTF-8"
+ pageEncoding="UTF-8"%>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Sequence of protein</title>
+</head>
+<body>
+ <br/>
+ <form method="get" action="ServletSequenceProtein">
+ <h3>Enter protein sequence</h3>
+ <p><textarea rows="14" cols="80" name="prot">ABCDE</textarea></p>
+ <input type="radio" name="protein" value="whole" Checked>search whole<br/>
+ <input type="radio" name="protein" value="part">search part<br/>
+ <input type="submit" name="Search" value="Search sequence"/><br/><br/>
+ <h3>Enter minimum number of jobs per protein</h3>
+ <input type="text" name="counterJob" value = 3><br/>
+ <input type="submit" name="Search" value="Search counter"/><br/><br/>
+ </form>
+</body>
+</html>
--- /dev/null
+<%@ page language="java" contentType="text/html; charset=UTF-8"
+ pageEncoding="UTF-8"%>
+<%@page import="java.util.Calendar"%>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<title>Date interval</title>
+</head>
+<body>
+ <form method="post" action="ServletTimeExecution">
+ <h3>Enter time period</h3>
+ <%
+ Calendar cal = Calendar.getInstance();
+ String datecalBegin = cal.get(Calendar.YEAR) + "/" + cal.get(Calendar.MONTH) + "/" + cal.get(Calendar.DAY_OF_MONTH);
+ String datecalEnd = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DAY_OF_MONTH);
+ %>
+ <p>
+ from <input type="text" name="data1" value=<%=datecalBegin%>
+ style="width: 145px;" /> to <input type="text" name="data2"
+ value=<%=datecalEnd%> style="width: 145px;" />
+ </p>
+ <input type="checkbox" name="option" value="AllDate">Query for
+ all dates<br> <input type="submit" name="Search" value="Search" />
+ </form>
+</body>
+</html>
--- /dev/null
+<%@page import="java.util.ArrayList"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt"%>
+<%@ taglib uri="http://displaytag.sf.net" prefix="dt"%>
+<c:choose>
+ <c:when test="${flag == 'AllDate'}">
+ <h3>Jobs statistics for the whole period</h3>
+ </c:when>
+ <c:otherwise>
+ <h3>
+ Jobs statistics for the Period:
+ <c:out value="${data1}" />
+ to
+ <c:out value="${data2}" />
+ </h3>
+ </c:otherwise>
+</c:choose>
+<h3>Time execution: ${timeExecution} ms</h3>
+
+<c:set var="sum" value="0" />
+<c:forEach items="${result}" var="res">
+ <c:set var="tot" value="${res.total}" />
+ <c:set var="sum" value="${sum + tot}" />
+</c:forEach>
+
+
+<table border="1" style="border-collapse: collapse; white-space: nowrap">
+ <thead>
+ <tr>
+ <th rowspan="2" style="text-align: centre">Date</th>
+ <th colspan="4" style="text-align: centre">Number of Proteins</th>
+ </tr>
+ <tr>
+ <th style="text-align: centre">Total</th>
+ <th style="text-align: centre">Failed</th>
+ <th style="text-align: centre">Cancelled</th>
+ <th style="text-align: centre">Abandoned</th>
+ </tr>
+ </thead>
+ <tbody>
+
+ <tr style="font-weight: bolder;">
+ <td>Total:</td>
+ <td style="text-align: right">${sum}</td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ </tr>
+
+ <c:forEach items="${result}" var="res">
+ <tr>
+ <td>${res.date}</td>
+ <td style="text-align: right"><c:out value="${res.total}" /></td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ <td style="text-align: right">0</td>
+ </tr>
+ </c:forEach>
+
+ </tbody>
+</table>
\ No newline at end of file
--- /dev/null
+<%@page import="java.util.ArrayList"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn" %>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt" %>
+<%@ taglib uri="http://displaytag.sf.net" prefix="dt" %>
+<h3>Jobs log information for: ${IdJob}</h3>
+<h3>Time execution: ${timeExecution} ms</h3>
+<p> Date Start : ${result.logInfo.dateStart}<br/>
+ Date End : ${result.logInfo.dateEnd}<br/>
+ IP : ${result.logInfo.ip}</p>
+<table border="1" style="border-collapse: collapse; white-space: nowrap">
+<tbody>
+<tr>
+<td> Sequence</td>
+<td style="text-align: left; border-buttom: dotted; font-family: monospace"> ${result.logInfo.sequence}</td>
+</tr>
+<c:forEach items="${result.logInfo.prediction}" var="pred">
+<tr>
+ <td> ${pred.key}</td>
+ <td style="text-align: left; border-buttom: dotted; font-family: monospace"> ${pred.value}</td>
+</tr>
+</c:forEach>
+</tbody>
+</table>
--- /dev/null
+<%@page import="java.util.ArrayList"%>
+<%@ page trimDirectiveWhitespaces="true"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt"%>
+<%@ taglib uri="http://displaytag.sf.net" prefix="dt"%>
+<c:choose>
+ <c:when test="${search == 'Search counter'}">
+ <h3>Dundee ProteoCache query result for jobs more than ${counter} </h3>
+ </c:when>
+ <c:otherwise>
+ <h3>Dundee ProteoCache query result</h3>
+ </c:otherwise>
+</c:choose>
+<h3>Time execution: ${timeExecution} ms</h3>
+<table border="1" style="border-collapse: collapse; white-space: nowrap">
+ <c:choose>
+ <c:when test="${search == 'Search counter'}">
+ <thead>
+ <tr>
+ <th style="text-align: centre">Number of jobs</th>
+ <th style="text-align: left">Protein</th>
+ </tr>
+ </thead>
+ <tbody>
+ <c:forEach items="${result}" var="res">
+ <tr>
+ <td>${res.totalId}</td>
+ <td
+ style="text-align: left; border-buttom: dotted; font-family: monospace"><a
+ title="Click to view predictions"
+ href="ServletSequenceProtein?prot=${res.prot}&protein=whole&Search=Search+sequence&counterJob=${counter}">${res.prot}</a>
+ </td>
+ </tr>
+ </c:forEach>
+ </c:when>
+ <c:otherwise>
+ <thead>
+ <tr>
+ <th style="text-align: centre">ID</th>
+ <th style="text-align: centre">Prediction</th>
+ </tr>
+ </thead>
+ <tbody>
+ <c:forEach items="${result}" var="res" varStatus="status">
+ <tr>
+ <td rowspan="2">${res.id}</td>
+ <c:if test="${flag == 'whole'}">
+ <td
+ style="text-align: left; border-buttom: dotted; font-family: monospace"><c:out
+ value="${res.prot}" /></td>
+ </c:if>
+ <c:if test="${flag == 'part'}">
+ <td
+ style="text-align: left; border-buttom: dotted; font-family: monospace">
+ <c:forEach items="${res.subProt}" var="seq">
+ <c:choose>
+ <c:when test="${fn:contains(seq, prot)}">
+ <span style="background-color: blue">${seq}</span>
+ </c:when>
+ <c:otherwise>${seq}</c:otherwise>
+ </c:choose>
+ </c:forEach>
+ </td>
+ </c:if>
+ </tr>
+ <tr>
+ <td
+ style="text-align: left; border-top: hidden; font-family: monospace"><c:out
+ value="${res.jpred}" /></td>
+ </tr>
+ </c:forEach>
+ </c:otherwise>
+ </c:choose>
+ </tbody>
+</table>
--- /dev/null
+<%@page import="java.util.ArrayList"%>
+<%@ page trimDirectiveWhitespaces="true"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn"%>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt"%>
+<%@ taglib uri="http://displaytag.sf.net" prefix="dt"%>
+<c:choose>
+ <c:when test="${flag == 'AllDate'}">
+ <h3>Time execution for the whole period</h3>
+ </c:when>
+ <c:otherwise>
+ <h3>Time execution for the interval: ${data1} - ${data2}</h3>
+ </c:otherwise>
+</c:choose>
+<h3>Time execution: ${timeExecution} ms</h3>
+
+<c:set var="sum" value="0" />
+<c:forEach items="${result}" var="res" varStatus="loop">
+ <c:choose>
+ <c:when test="${loop.last}">
+ <c:forEach items="${res.timeTotalExec}" var="total">
+ <c:set var="sum" value="${sum + total}" />
+ </c:forEach>
+ </c:when>
+ </c:choose>
+</c:forEach>
+
+<h3>Total number of jobs: ${sum}</h3>
+
+<table border="1" style="border-collapse: collapse; white-space: nowrap">
+ <thead>
+ <tr>
+ <th style="text-align: centre; width: 150px">Date</th>
+ <th style="text-align: centre; width: 150px">less then 30 s</th>
+ <th style="text-align: centre; width: 150px">30 s - 60 s</th>
+ <th style="text-align: centre; width: 150px">1 min - 2 min</th>
+ <th style="text-align: centre; width: 150px">2 min - 10 min</th>
+ <th style="text-align: centre; width: 150px">more then 10 min</th>
+ </tr>
+ </thead>
+ <tbody>
+ <c:forEach items="${result}" var="res" varStatus="loop">
+ <c:choose>
+ <c:when test="${loop.last}">
+ <tr style="font-weight: bolder;">
+ <td>Total:</td>
+ <c:forEach items="${res.timeTotalExec}" var="total">
+ <td style="text-align: right">${total}</td>
+ </c:forEach>
+ </tr>
+ </c:when>
+ </c:choose>
+ </c:forEach>
+
+ <c:forEach items="${result}" var="res" varStatus="loop">
+ <c:choose>
+ <c:when test="${not loop.last}">
+ <tr>
+ <td>${res.date}</td>
+ <c:forEach items="${res.timeRez}" var="time">
+ <td style="text-align: right">${time}</td>
+ </c:forEach>
+ </tr>
+ </c:when>
+ </c:choose>
+ </c:forEach>
+ </tbody>
+</table>
--- /dev/null
+<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn" %>
+<%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt" %>
+<%@ taglib uri="http://displaytag.sf.net" prefix="dt" %>
+
+<table class="its" style="width:800px" border="1">
+<thead>
+<tr>
+<th rowspan="2"style="text-align: centre">Date</th>
+<th colspan="4" style="text-align: centre">Number of Proteins</th>
+</tr>
+<tr>
+<th style="text-align: centre">Total</th>
+<th style="text-align: centre">Failed</th>
+<th style="text-align: centre">Cancelled</th>
+<th style="text-align: centre">Abandoned</th>
+</tr>
+</thead>
+<tbody>
+<c:set var="sum" value="0" />
+<c:forEach items="${statistics}" var="res">
+<tr>
+<td>${res.date}</td>
+<c:set var="tot" value="${res.total}" />
+<c:set var="sum" value="${sum + tot}" />
+<td style="text-align: right"><c:out value = "${res.total}" /></td>
+<td style="text-align: right">0</td>
+<td style="text-align: right">0</td>
+<td style="text-align: right">0</td>
+</tr>
+</c:forEach>
+<tr style="font-weight: bolder;">
+<td>Total:</td>
+<td style="text-align: right">${sum}</td>
+<td style="text-align: right">0</td>
+<td style="text-align: right">0</td>
+<td style="text-align: right">0</td>
+
+</tr>
+</tbody>
+</table>
--- /dev/null
+<web-app xmlns="http://java.sun.com/xml/ns/j2ee"\r
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
+ xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd"\r
+ version="2.4">\r
+\r
+<display-name>ProteoCache</display-name>\r
+<description>ProteoCache - Results repository for the Dundee Resource</description>\r
+\r
+ <welcome-file-list>\r
+ <welcome-file>Index.jsp</welcome-file>\r
+ </welcome-file-list>\r
+\r
+ <!-- ProteoCache listeners -->\r
+ <listener>\r
+ <listener-class>compbio.listeners.ContextListener</listener-class>\r
+ </listener>\r
+\r
+\r
+ <!-- Session timeout in minutes -->\r
+ <session-config>\r
+ <session-timeout>20</session-timeout>\r
+ </session-config>\r
+\r
+\r
+ <servlet>\r
+ <display-name>This is a standard tomcat 'default' servlet for making listings</display-name>\r
+ <servlet-name>listings</servlet-name>\r
+ <servlet-class>org.apache.catalina.servlets.DefaultServlet</servlet-class>\r
+ <init-param>\r
+ <param-name>debug</param-name>\r
+ <param-value>0</param-value>\r
+ </init-param>\r
+ <init-param>\r
+ <param-name>readonly</param-name>\r
+ <param-value>true</param-value>\r
+ </init-param>\r
+ <init-param>\r
+ <param-name>listings</param-name>\r
+ <param-value>true</param-value>\r
+ </init-param>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+\r
+ <!-- ProteoCache servlets -->\r
+<!--\r
+ <servlet>\r
+ <description>Display pre-calculated accounting info</description>\r
+ <servlet-name>DisplayStat</servlet-name>\r
+ <servlet-class>compbio.stat.servlet.DisplayStat</servlet-class>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <description>WebServices Status</description>\r
+ <servlet-name>ServiceStatus</servlet-name>\r
+ <servlet-class>compbio.stat.servlet.ServiceStatus</servlet-class>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>DownloadRedirector</servlet-name>\r
+ <servlet-class>compbio.stat.servlet.DownloadRedirector</servlet-class>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <description>WebService Status with no UI. Accepts web service name as a parameter</description>\r
+ <servlet-name>HttpCodeResponseServiceStatus</servlet-name>\r
+ <servlet-class>compbio.stat.servlet.HttpCodeResponseServiceStatus</servlet-class>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <description>Display job list</description>\r
+ <servlet-name>Joblist</servlet-name>\r
+ <servlet-class>compbio.stat.servlet.Joblist</servlet-class>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <description>Display monthly summary statistics</description>\r
+ <servlet-name>AnnualStat</servlet-name>\r
+ <servlet-class>compbio.stat.servlet.AnnualStat</servlet-class>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <description>Display monthly summary statistics with no links to details</description>\r
+ <servlet-name>PublicAnnualStat</servlet-name>\r
+ <servlet-class>compbio.stat.servlet.AnnualStat</servlet-class>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>RegistryWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>ClustalWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>ClustalOWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>MuscleWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>MafftWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>TcoffeeWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>ProbconsWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>MSAprobsWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>GLprobsWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>AAConWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>JronnWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>DisemblWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>GlobPlotWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>IUPredWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+\r
+ <servlet>\r
+ <servlet-name>JpredWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+ <servlet>\r
+ <servlet-name>RNAalifoldWS</servlet-name>\r
+ <servlet-class>com.sun.xml.ws.transport.http.servlet.WSServlet</servlet-class>\r
+ <load-on-startup>1</load-on-startup>\r
+ </servlet>\r
+-->\r
+\r
+\r
+\r
+ <!-- ProteoCache servlet mappings -->\r
+<!--\r
+ <servlet-mapping>\r
+ <servlet-name>listings</servlet-name>\r
+ <url-pattern>/</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>DownloadRedirector</servlet-name>\r
+ <url-pattern>/download</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>DisplayStat</servlet-name>\r
+ <url-pattern>/DisplayStat</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>ServiceStatus</servlet-name>\r
+ <url-pattern>/ServiceStatus</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>HttpCodeResponseServiceStatus</servlet-name>\r
+ <url-pattern>/HttpCodeResponseServiceStatus/*</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>AnnualStat</servlet-name>\r
+ <url-pattern>/AnnualStat</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>PublicAnnualStat</servlet-name>\r
+ <url-pattern>/PublicAnnualStat</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>Joblist</servlet-name>\r
+ <url-pattern>/Joblist</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>RegistryWS</servlet-name>\r
+ <url-pattern>/RegistryWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>ClustalWS</servlet-name>\r
+ <url-pattern>/ClustalWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>ClustalOWS</servlet-name>\r
+ <url-pattern>/ClustalOWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>MuscleWS</servlet-name>\r
+ <url-pattern>/MuscleWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>MafftWS</servlet-name>\r
+ <url-pattern>/MafftWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>TcoffeeWS</servlet-name>\r
+ <url-pattern>/TcoffeeWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>ProbconsWS</servlet-name>\r
+ <url-pattern>/ProbconsWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>MSAprobsWS</servlet-name>\r
+ <url-pattern>/MSAprobsWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>GLprobsWS</servlet-name>\r
+ <url-pattern>/GLprobsWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>AAConWS</servlet-name>\r
+ <url-pattern>/AAConWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>JronnWS</servlet-name>\r
+ <url-pattern>/JronnWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>DisemblWS</servlet-name>\r
+ <url-pattern>/DisemblWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>GlobPlotWS</servlet-name>\r
+ <url-pattern>/GlobPlotWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>IUPredWS</servlet-name>\r
+ <url-pattern>/IUPredWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>JpredWS</servlet-name>\r
+ <url-pattern>/JpredWS</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ <servlet-mapping>\r
+ <servlet-name>RNAalifoldWS</servlet-name>\r
+ <url-pattern>/RNAalifoldWS</url-pattern>\r
+ </servlet-mapping>\r
+-->\r
+\r
+\r
+ <!-- ProteoCache security constraints -->\r
+ <security-constraint>\r
+ <web-resource-collection>\r
+ <web-resource-name>Administrator pages</web-resource-name>\r
+ <!-- Prohibit access to any resources -->\r
+ <url-pattern>/DisplayStat</url-pattern>\r
+ <url-pattern>/Joblist</url-pattern>\r
+ <url-pattern>/AnnualStat</url-pattern>\r
+ <url-pattern>/conf/*</url-pattern>\r
+ <url-pattern>/binaries/*</url-pattern>\r
+ <url-pattern>/testsrc/*</url-pattern>\r
+ <url-pattern>/lib/*</url-pattern>\r
+ <!-- Uncomment this to secure services status checker \r
+ <url-pattern>/HttpCodeResponseServiceStatus/*</url-pattern>\r
+ <url-pattern>/ServiceStatus</url-pattern>\r
+ -->\r
+ <!-- No http-method means all methods are constrained -->\r
+ </web-resource-collection>\r
+ <auth-constraint>\r
+ <role-name>admin</role-name>\r
+ </auth-constraint>\r
+ </security-constraint>\r
+\r
+\r
+ <!-- unsupported HTTP methods -->\r
+ <security-constraint>\r
+ <web-resource-collection>\r
+ <web-resource-name>unsupported HTTP methods</web-resource-name>\r
+ <url-pattern>/*</url-pattern>\r
+ <http-method>PUT</http-method>\r
+ <http-method>DELETE</http-method>\r
+ <http-method>TRACE</http-method>\r
+ <http-method>OPTIONS</http-method>\r
+ </web-resource-collection>\r
+ <auth-constraint/>\r
+ </security-constraint>\r
+\r
+ <!-- Define the Login Configuration for this Application -->\r
+ <login-config>\r
+ <auth-method>BASIC</auth-method>\r
+ <realm-name>ProteCache administrators</realm-name>\r
+ </login-config>\r
+\r
+\r
+ <!-- Security roles referenced by this web application -->\r
+ <security-role>\r
+ <description>The role that is required to log in and view ProteCache internals</description>\r
+ <role-name>admin</role-name>\r
+ </security-role>\r
+\r
+</web-app>\r
--- /dev/null
+#################################################################################\r
+# enable or disable engine true | false\r
+# OPTIONAL defaults to true\r
+engine.local.enable=true \r
+\r
+#################################################################################\r
+# Directory to use for temporary files storage\r
+# OPTIONAL defaults to java temporary directory \r
+# Relative path within the project will be converted in absolute at runtime\r
+#local.tmp.directory=/cluster/gjb_lab/fc/www-jws2/jaba/local_jobsout \r
+local.tmp.directory=jobsout \r
+\r
+#################################################################################\r
+# Number of threads for tasks execution (valid values between 1 and 2x cpu. \r
+# Where x is a number of cores available in the system)\r
+# OPTIONAL defaults to the number of cores for core number <=4 and \r
+# number of cores-1 for greater core numbers\r
+engine.local.thread.number=2\r
+\r
+#################################################################################\r
+# Enable/disable cluster statistics collector\r
+local.stat.collector.enable=true\r
+# Frequency of statistics collecting (in minutes)\r
+# normal rate: once a date = 24 * 60 = 1440\r
+local.stat.collector.update.frequency=1\r
+\r
+# Maximum amount of time the job is considered running in hours\r
+# Optional defaults to 24 hours\r
+local.stat.maxruntime=6\r
+\r
+#################################################################################\r
+# Maximum amount of time the job directory is living (in hours), \r
+# -1 means the directories are never deleted\r
+# Defaults is one week, i.e. 168 hours\r
+local.jobdir.maxlifespan=168\r
+# Frequency of cleaning job directory (in minutes)\r
+# normal rate: once a date = 24 * 60 = 1440\r
+local.jobdir.cleaning.frequency=1\r
--- /dev/null
+# This file contains settings for Google Analytics (GA) Statistics Engine.\r
+# Feel free to remove/delete this file if you do not want us to see that you\r
+# use ProteoCache. \r
+# However, we would appreciate it greatly if you could leave it on. ProteoCache \r
+# usage statistics are collected for funding and reporting purposes, and no \r
+# private information is collected. The data sent by ProteoCache is as follows: \r
+# 1) The IP address of the ProteoCache server machine.\r
+# 2) The name of the web service that was called.\r
+# 3) A few details of the system such as java version, user language, \r
+# color depth, screen, resolution and character encoding.\r
+\r
+# All calls to GA are completed asynchronously and create very little\r
+# overhead. Thus they do not influence the server response time or performance.\r
+\r
+# Values yes or true = enable GA, any other value will disable it\r
+enable.ga=no\r
+# If set to yes replaces your server name/IP address with 127.0.0.1\r
+anonymize.ip=no\r
+\r
--- /dev/null
+#################################################################################
+# Cassandra host or IP
+# test server is 10.0.115.190
+cassandra.host=localhost
+
+cassandra.newtables.update=false
+
+#################################################################################
+# Jpred sources
+# real Jpred web-server
+cassandra.jpred.web.update=true
+cassandra.jpred.web.inidelay=0
+cassandra.jpred.web.updaterate=30
+
+# update time period (in days)
+# by defauls for 100 past days
+cassandra.jpred.web.period=5
+
+
+#################################################################################
+# local test job source
+cassandra.jpred.local.update=false
+cassandra.jpred.local.inidelay=10
+cassandra.jpred.local.updaterate=200
+
+# update time period (in days)
+# by defauls for 100 past days
+cassandra.jpred.local.period=300