--- /dev/null
+package compbio.cassandra;
+
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.BoundStatement;
+
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.cassandra.CassandraNativeConnector;
+import compbio.util.PropertyHelper;
+
+public class CassandraNewTableWriter {
+ private Session session;
+ private static Logger log = Logger.getLogger(CassandraNativeConnector.class);
+
+ public CassandraNewTableWriter() {
+ Session inis = CassandraNativeConnector.getSession();
+ setSession(inis);
+ }
+
+ public void setSession(Session s) {
+ assert s != null;
+ session = s;
+ }
+
+ public boolean JobisNotInsterted(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM ProteinLog WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ public boolean JobisNotArchived(String jobid) {
+ ResultSet results1 = session.execute("SELECT * FROM JpredArchive WHERE JobID = '" + jobid + "';");
+ if (results1.isExhausted()) {
+ return true;
+ }
+ return false;
+ }
+
+ /*
+ * fill new table
+ */
+ public void FillNewTable() {
+ long date1 = CassandraNativeConnector.getEarliestDateInDB();
+ Calendar start = Calendar.getInstance();
+ start.setTime(new Date(date1));
+ Calendar endcal = Calendar.getInstance();
+ Date end = endcal.getTime();
+ for (Date date = start.getTime(); !start.after(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
+ String query1 = "SELECT * FROM ProteinData WHERE jobtime = " + date.getTime() + ";";
+ System.out.println("Query db: " + query1);
+ ResultSet results = session.execute(query1);
+ String query2 = "INSERT INTO JobDateInfo " + "(jobday, Total)" + " VALUES (" + date.getTime() + "," + results.all().size()+ ");";
+ System.out.println("Insert DB: " + query2);
+ session.execute(query2);
+ }
+ System.out.println("Table JobDateInfo filled");
+ }
+
+ /*
+ * fill new table
+ */
+ public void FillParameters() {
+ Date bubu = new Date(CassandraNativeConnector.getEarliestDateInDB());
+ System.out.println("Old EarliestJobDate is " + bubu.toString());
+
+ String query1 = "SELECT jobtime FROM ProteinData LIMIT 2000000;";
+ System.out.println("Query db: " + query1);
+ ResultSet results = session.execute(query1);
+ Calendar endcal = Calendar.getInstance();
+ long newearliestday = endcal.getTime().getTime();
+ while (!results.isExhausted()) {
+ Row r = results.one();
+ long day = r.getLong("jobtime");
+ if (day < newearliestday) {
+ newearliestday = day;
+ }
+ }
+ String com = "INSERT INTO MainParameters " + "(Name, Value)" + " VALUES ('EarliestJobDate','" + String.valueOf(newearliestday)
+ + "');";
+ session.execute(com);
+ Date gaga = new Date(newearliestday);
+ System.out.println("New EarliestJobDate is " + gaga.toString());
+ }
+}
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
+import org.apache.log4j.Logger;
+
+import com.datastax.driver.core.Session;
+
import compbio.cassandra.CassandraNativeConnector;
+import compbio.cassandra.CassandraNewTableWriter;
+import compbio.cassandra.JpredParserHTTP;
+import compbio.cassandra.JpredParserLocalFile;
import compbio.engine.ProteoCachePropertyHelperManager;
import compbio.util.PropertyHelper;
+import compbio.util.Util;
/**
* Application Lifecycle Listener implementation class ContextListener
@WebListener
public class ContextListener implements ServletContextListener {
private ScheduledExecutorService webjob_scheduler;
+ private ScheduledExecutorService localjob_scheduler;
CassandraNativeConnector db = new CassandraNativeConnector();
static PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
+ private static Logger log = Logger.getLogger(ContextListener.class);
+ public static boolean READ_WEB_JPRED = false;
+ public static boolean READ_LOCALFILE_JPRED = false;
+
+ private static boolean initBooleanValue(String key) {
+ assert key != null;
+ String status = ph.getProperty(key);
+ log.debug("Loading property: " + key + " with value: " + status);
+ if (Util.isEmpty(status)) {
+ return false;
+ }
+ return new Boolean(status.trim()).booleanValue();
+ }
/**
* @see ServletContextListener#contextInitialized(ServletContextEvent)
*/
public void contextInitialized(ServletContextEvent arg0) {
System.out.println("ProteoCache session start......");
+ // connect to the db and create table if needed
db.Connect();
+ CassandraNewTableWriter updater = new CassandraNewTableWriter();
+
+ // updater.FillParameters();
+ // updater.FillNewTable();
+
+ READ_WEB_JPRED = initBooleanValue("cassandra.jpred.web.update");
+ READ_LOCALFILE_JPRED = initBooleanValue("cassandra.jpred.local.update");
+
+ if (READ_WEB_JPRED) {
+ // get data from real Jpred production server
+ final String datasrc = "http://www.compbio.dundee.ac.uk/www-jpred/results/usage-new/alljobs.dat";
+ final String prefix = "http://www.compbio.dundee.ac.uk/www-jpred/results";
+ final JpredParserHTTP parser = new JpredParserHTTP(prefix);
+
+ int initialdelay = 300;
+ int updaterate = 600;
+ int newinitialdelay = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.web.inidelay"));
+ if (0 <= newinitialdelay) {
+ initialdelay = newinitialdelay;
+ }
+ int newupdaterate = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.web.updaterate"));
+ if (0 < newupdaterate) {
+ updaterate = newupdaterate;
+ }
+ final int updateperiod = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.web.period"));
- webjob_scheduler = Executors.newSingleThreadScheduledExecutor();
- webjob_scheduler.scheduleAtFixedRate(new Runnable() {
- @Override
- public void run() {
- try {
- db.Parsing();
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ webjob_scheduler = Executors.newSingleThreadScheduledExecutor();
+ System.out.println("Initializating web job scheduler");
+ System.out.println(" initial delay = " + initialdelay + " seconds");
+ System.out.println(" update rate = " + updaterate + " seconds");
+ if (0 < updateperiod)
+ System.out.println(" update period = " + updateperiod + " days");
+ else
+ System.out.println(" update period = 5 days");
+
+ webjob_scheduler.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (0 < updateperiod) {
+ parser.Parsing(datasrc, updateperiod);
+ } else {
+ parser.Parsing(datasrc, 5);
+ }
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
}
+ }, initialdelay, updaterate, TimeUnit.SECONDS);
+ }
+
+ if (READ_LOCALFILE_JPRED) {
+ // get irtifical data generated for the DB stress tests
+ final String datasrc = "/home/asherstnev/Projects/Java.projects/proteocache/data_stress_test/data.dat";
+ final String prefix = "/home/asherstnev/Projects/Java.projects/proteocache/data_stress_test/Jpreddata";
+ final JpredParserLocalFile parser = new JpredParserLocalFile(prefix);
+
+ int initialdelay = 300;
+ int updaterate = 600;
+ int newinitialdelay = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.local.inidelay"));
+ if (0 <= newinitialdelay) {
+ initialdelay = newinitialdelay;
+ }
+ int newupdaterate = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.local.updaterate"));
+ if (0 < newupdaterate) {
+ updaterate = newupdaterate;
}
- }, 0, 600, TimeUnit.SECONDS);
+ final int updateperiod = ProteoCachePropertyHelperManager.getIntProperty(ph.getProperty("cassandra.jpred.local.period"));
+
+ localjob_scheduler = Executors.newSingleThreadScheduledExecutor();
+ System.out.println("Initializating local job scheduler");
+ System.out.println(" initial delay = " + initialdelay + " seconds");
+ System.out.println(" update rate = " + updaterate + " seconds");
+ if (0 < updateperiod)
+ System.out.println(" update period = " + updateperiod + " days");
+ else
+ System.out.println(" update period = 5 days");
+ localjob_scheduler.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (0 < updateperiod) {
+ parser.Parsing(datasrc, updateperiod);
+ } else {
+ parser.Parsing(datasrc, 100);
+ }
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ }, initialdelay, updaterate, TimeUnit.SECONDS);
+ }
}
public void contextDestroyed(ServletContextEvent arg0) {
db.Closing();
System.out.println("Shut down ProteoCache......");
- webjob_scheduler.shutdownNow();
+ if (READ_WEB_JPRED) {
+ webjob_scheduler.shutdownNow();
+ }
+ if (READ_LOCALFILE_JPRED) {
+ localjob_scheduler.shutdownNow();
+ }
}
}
import java.util.Map;
import compbio.cassandra.CassandraNativeConnector;
+import compbio.cassandra.CassandraReader;
import compbio.cassandra.DataBase;
import compbio.cassandra.Pair;
import compbio.cassandra.StructureJobLog;
import compbio.cassandra.StructureProteinPrediction;
public class CassandraRequester {
- private CassandraNativeConnector DBInstance = new CassandraNativeConnector();
+ private CassandraReader db = new CassandraReader();
private ArrayList<DataBase> query;
private static long currentDate = 0;
private static long earlestDate = 0;
List<Integer> totalTime = new ArrayList<Integer>();
for (int i = 0; i < nbins; i++)
totalTime.add(i, 0);
- List<Pair<String, String>> res = DBInstance.ReadProteinDataTable();
+ List<Pair<String, String>> res = db.ReadProteinDataTable();
List<Pair<Date, Long>> numres = new ArrayList<Pair<Date, Long>>();
for (Pair<String, String> entry : res) {
end.setTime(new Date(dateEnd));
query = new ArrayList<DataBase>();
for (Date date = start.getTime(); !start.after(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
- Integer res = DBInstance.ReadDateTable(date.getTime());
+ Integer res = db.ReadDateTable(date.getTime());
if (res == null)
continue;
DataBase db = new DataBase();
db.setTotal(res);
db.setDate(DateFormat(date.getTime()));
query.add(db);
- }
+ }
System.out.println("StatisticsProt.readLength: total number of dates = " + query.size());
return query;
}
query = new ArrayList<DataBase>();
List<StructureProteinPrediction> res;
if (flag.equals("whole"))
- res = DBInstance.ReadWholeSequence(protIn);
+ res = db.ReadWholeSequence(protIn);
else
- res = DBInstance.ReadPartOfSequence(protIn);
+ res = db.ReadPartOfSequence(protIn);
for (StructureProteinPrediction entry : res) {
Map<String,String> pred = entry.getPrediction();
Iterator it = pred.entrySet().iterator();
* */
public List<DataBase> readProteinByCounter(int counter) {
query = new ArrayList<DataBase>();
- Map<String, Integer> map = DBInstance.ReadProteinDataByCounter();
+ Map<String, Integer> map = db.ReadProteinDataByCounter();
for (Map.Entry<String, Integer> entry : map.entrySet()) {
if (entry.getValue() > counter) {
DataBase db = new DataBase();
*/
public DataBase readJobLog(String jobid) {
// query = new ArrayList<DataBase>();
- StructureJobLog res = DBInstance.ReadJobLog(jobid);
+ StructureJobLog res = db.ReadJobLog(jobid);
DataBase query = new DataBase();
query.setLogInfo(res);
// query.setres);
* find the earliest date in the database
*/
public long earliestDate() {
- earlestDate = DBInstance.getEarliestDateInDB();
+ earlestDate = CassandraNativeConnector.getEarliestDateInDB();
return earlestDate;
}