--- /dev/null
+package combio.cassandra;
+
+import java.util.Arrays;
+
+import me.prettyprint.cassandra.serializers.LongSerializer;
+import me.prettyprint.cassandra.serializers.StringSerializer;
+import me.prettyprint.cassandra.service.ThriftKsDef;
+import me.prettyprint.hector.api.Cluster;
+import me.prettyprint.hector.api.Keyspace;
+import me.prettyprint.hector.api.beans.ColumnSlice;
+import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition;
+import me.prettyprint.hector.api.ddl.ComparatorType;
+import me.prettyprint.hector.api.ddl.KeyspaceDefinition;
+import me.prettyprint.hector.api.factory.HFactory;
+import me.prettyprint.hector.api.mutation.Mutator;
+import me.prettyprint.hector.api.query.QueryResult;
+import me.prettyprint.hector.api.query.SliceQuery;
+
+public class CassandraCreate {
+ private static Keyspace ksp;
+ private static Cluster cluster;
+ private static Mutator<Long> mutatorLong;
+ private static Mutator<String> mutatorString;
+ private static Mutator<String> mutatorLog;
+ StringSerializer ss = StringSerializer.get();
+ LongSerializer ls = LongSerializer.get();
+
+
+ // connect to cluster
+ public void Connection() {
+ cluster = HFactory.getOrCreateCluster("Protein Cluster", "127.0.0.1:9160");
+ KeyspaceDefinition keyspaceDef = cluster.describeKeyspace("ProteinKeyspace");
+ // If keyspace does not exist, the CFs don't exist either. => create them.
+/* if (keyspaceDef != null) {
+ cluster.dropColumnFamily("ProteinKeyspace", "ProteinRow", true);
+ cluster.dropColumnFamily("ProteinKeyspace", "ProteinData", true);
+ cluster.dropKeyspace("ProteinKeyspace", true);
+ System.out.println("ProteinKeyspace has been dropped");
+ } else*/ if (keyspaceDef == null) { // create column family
+ System.out.println("ProteinKeyspace has been null");
+ ColumnFamilyDefinition cfProtein = HFactory.createColumnFamilyDefinition("ProteinKeyspace", "ProteinRow", ComparatorType.ASCIITYPE);
+ ColumnFamilyDefinition cfLog = HFactory.createColumnFamilyDefinition("ProteinKeyspace", "ProteinLog", ComparatorType.ASCIITYPE);
+ ColumnFamilyDefinition cfData = HFactory.createColumnFamilyDefinition("ProteinKeyspace", "ProteinData", ComparatorType.ASCIITYPE);
+
+ KeyspaceDefinition newKeyspace = HFactory.createKeyspaceDefinition("ProteinKeyspace",
+ ThriftKsDef.DEF_STRATEGY_CLASS, 1, Arrays.asList(cfProtein, cfLog, cfData));
+ //Add the schema to the cluster.
+ //"true" as the second param means that Hector will block until all nodes see the change.
+ cluster.addKeyspace(newKeyspace, true);
+ cluster.addColumnFamily(cfProtein, true);
+ cluster.addColumnFamily(cfLog, true);
+ cluster.addColumnFamily(cfData, true);
+ }
+ ksp = HFactory.createKeyspace("ProteinKeyspace", cluster);
+ System.out.println("Cassantra has been connected");
+ }
+
+ // parsing data from http://www.compbio.dundee.ac.uk/www-jpred/results/usage-new/alljobs.dat
+ public void Parsing() {
+ mutatorString = HFactory.createMutator(ksp, ss); // CF ProteinRow store protein and prediction
+ mutatorLog = HFactory.createMutator(ksp, ss); // CF ProteinLog store log informations (ip, id, dates start and dates of end)
+ mutatorLong = HFactory.createMutator(ksp, ls); // CF ProteinData store id and protein per data
+ System.out.println("Parsing......");
+ String in = "http://www.compbio.dundee.ac.uk/www-jpred/results/usage-new/alljobs.dat";
+ DataParsing datParsing = new DataParsing();
+ datParsing.ParsingTest(in);
+ mutatorString.execute();
+ mutatorLong.execute();
+ mutatorLog.execute();
+ System.out.println("Data Inserted");
+ }
+
+ public void Closing() {
+ cluster.getConnectionManager().shutdown();
+ System.out.println("Cassantra has been closed");
+ }
+
+ // check whether this id exists in the cassandra DB
+ public boolean CheckIP(String ip) {
+ SliceQuery<String, String, String> sliceQuery = HFactory.createSliceQuery(ksp, ss, ss, ss);
+ sliceQuery.setColumnFamily("ProteinLog").setKey(ip).setRange("", "", false, 100);
+ QueryResult<ColumnSlice<String, String>> result = sliceQuery.execute();
+ if (result.get().getColumns().size() > 0)
+ return true;
+ else
+ return false;
+ }
+
+ public void InsertData(long dataWork, String dataBegin, String dataEnd, String ip, String id, String statusEx, String statusFinal, String protein, String jnetpred) {
+ mutatorLog.addInsertion(id, "ProteinLog", HFactory.createColumn("ip", ip, ss, ss))
+ .addInsertion(id, "ProteinLog", HFactory.createColumn("DataBegin", dataBegin, ss, ss))
+ .addInsertion(id, "ProteinLog", HFactory.createColumn("DataEnd", dataEnd, ss, ss))
+ .addInsertion(id, "ProteinLog", HFactory.createColumn("Status ex", statusEx, ss, ss))
+ .addInsertion(id, "ProteinLog", HFactory.createColumn("Status final", statusFinal, ss, ss))
+ .addInsertion(id, "ProteinLog", HFactory.createColumn("Protein", protein, ss, ss));
+ mutatorString.addInsertion(protein, "ProteinRow", HFactory.createColumn(id, jnetpred, ss, ss));
+ mutatorLong.addInsertion(dataWork, "ProteinData", HFactory.createColumn(id, protein, ss, ss));
+ }
+
+ public Keyspace GetKeyspace() {
+ return ksp;
+ }
+}
--- /dev/null
+package combio.cassandra;
+
+import java.util.List;
+
+public class DataBase {
+ String date;
+ int total;
+ String id;
+ String prot;
+ String jpred;
+ List<String> subProt;
+ List<Integer> timeRez;
+
+
+ public DataBase() {}
+
+ public DataBase(String dat, int total) {
+ this.date = dat;
+ this.total = total;
+ }
+
+ public void setDate(String dat) {
+ this.date = dat;
+ }
+
+ public String getDate() {
+ return date;
+ }
+
+ public void setTotal(int tot) {
+ this.total = tot;
+ }
+ public int getTotal() {
+ return total;
+ }
+
+ public void setProt(String prot) {
+ this.prot = prot;
+ }
+
+ public String getProt() {
+ return prot;
+ }
+
+ public void setJpred(String jpred) {
+ this.jpred = jpred;
+ }
+
+ public String getJpred() {
+ return jpred;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setSubProt(List<String> subProt) {
+ this.subProt = subProt;
+ }
+
+ public List<String> getSubProt() {
+ return subProt;
+ }
+
+ public void setTimeRez(List<Integer> timeRez) {
+ this.timeRez = timeRez;
+ }
+
+ public List<Integer> getTimeRez() {
+ return timeRez;
+ }
+}
--- /dev/null
+package combio.cassandra;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLConnection;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
+public class DataParsing {
+ private CassandraCreate cc = new CassandraCreate();
+
+ public void ParsingTest(String input) {
+ System.out.println("Inserting.....");
+ URL url, urltable;
+// int count = 0;
+// int countNotM = 0;
+// int countEr = 0;
+// int countEmp = 0;
+ Calendar cal = Calendar.getInstance();
+ String date = cal.get(Calendar.YEAR) + "/" + cal.get(Calendar.MONTH) + "/" + cal.get(Calendar.DAY_OF_MONTH);
+ try {
+ url = new URL(input);
+ URLConnection conn = url.openConnection();
+ BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+ String line;
+
+ while ((line = br.readLine()) != null) {
+ if (line.matches(date + "(.*)jp_[^\\s]+")) {
+ String[] table;
+ table = line.split("\\s+");
+ // count++;
+ if (!cc.CheckIP(table[table.length - 1])) {
+ // countNotM++;
+ urltable = new URL("http://www.compbio.dundee.ac.uk/www-jpred/results/" +table[table.length - 1]+"/"+table[table.length - 1]+".concise.fasta");
+ HttpURLConnection httpConnection = (HttpURLConnection) urltable.openConnection();
+ if (httpConnection.getResponseCode() > 200) {
+// countEr++;
+ continue;
+ }
+ try {
+ BufferedReader br1 = new BufferedReader(new InputStreamReader(httpConnection.getInputStream()));
+ String lineNext;
+ String newProt = "";
+ String jnetpred = "";
+ while ((lineNext = br1.readLine()) != null) {
+ if (lineNext.equals(">QUERY")) {
+ while (!(lineNext = br1.readLine()).matches(">[^\\s]+"))
+ newProt += lineNext;
+ } else if (lineNext.equals(">jnetpred")) {
+ while (!(lineNext = br1.readLine()).matches(">[^\\s]+"))
+ jnetpred += lineNext;
+ }
+ }
+ br1.close();
+ if (newProt.length() <= 1) {
+ // countEmp++;
+ continue;
+ }
+
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd");
+
+ String dateInString1 = table[0].substring(0, table[0].indexOf(":"));
+
+ long dateWork1 = 0;
+
+ try {
+ Date dat1 = formatter.parse(dateInString1);
+ dateWork1 = dat1.getTime();
+
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ cc.InsertData(dateWork1, table[0], table[1], table[2], table[table.length - 1], "OK", "OK", newProt, jnetpred);
+ } catch (IOException e) {
+ // e.printStackTrace();
+ }
+ // }
+ }
+ }
+ }
+ br.close();
+// System.out.println("Match " + count);
+// System.out.println("Not Match " + countNotM);
+// System.out.println("Error " + countEr);
+// System.out.println("No protein " + countEmp);
+ } catch (MalformedURLException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+}
--- /dev/null
+package combio.listeners;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+import javax.servlet.annotation.WebListener;
+
+import combio.cassandra.CassandraCreate;
+
+/**
+ * Application Lifecycle Listener implementation class ContextListener
+ *
+ */
+@WebListener
+public class ContextListener implements ServletContextListener {
+ private ScheduledExecutorService scheduler;
+ CassandraCreate cc = new CassandraCreate();
+ /**
+ * @see ServletContextListener#contextInitialized(ServletContextEvent)
+ */
+ public void contextInitialized(ServletContextEvent arg0) {
+ System.out.println("Session start ...........................................");
+ cc.Connection();
+
+
+ scheduler = Executors.newSingleThreadScheduledExecutor();
+ scheduler.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() { cc.Parsing();}}
+ , 0, 30, TimeUnit.SECONDS);
+ }
+
+ /**
+ * @see ServletContextListener#contextDestroyed(ServletContextEvent)
+ */
+ public void contextDestroyed(ServletContextEvent arg0) {
+ cc.Closing();
+ System.out.println("Shut down");
+ scheduler.shutdownNow();
+ }
+
+
+
+
+}
--- /dev/null
+package combio.listeners;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import combio.cassandra.*;
+import combio.statistic.StatisticsProt;
+
+/**
+ * Servlet implementation class DetailList
+ */
+@WebServlet("/DetailList")
+public class DetailList extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * @see HttpServlet#HttpServlet()
+ */
+
+ /**
+ * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ List<DataBase> result;
+ String date1 = request.getParameter("data1");
+ String date2 = request.getParameter("data2");
+ StatisticsProt sp = new StatisticsProt();
+// result = sp.readDetail(date1, date2);
+ }
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ // TODO Auto-generated method stub
+ }
+
+}
--- /dev/null
+package combio.listeners;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import combio.cassandra.DataBase;
+import combio.statistic.StatisticsProt;
+
+/**
+ * Servlet implementation class LengthServlet
+ */
+@WebServlet("/LengthServlet")
+public class LengthServlet extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+ /**
+ * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ List<DataBase> result;
+ String date1 = request.getParameter("data1");
+ String date2 = request.getParameter("data2");
+ StatisticsProt sp = new StatisticsProt();
+ result = sp.readLength(date1, date2);
+ request.setAttribute("data1", date1);
+ request.setAttribute("data2", date2);
+ request.setAttribute("result", result);
+ RequestDispatcher rd = request.getRequestDispatcher("/ReportLength.jsp");
+ rd.forward(request, response);
+ }
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ doGet(request, response);
+ }
+}
--- /dev/null
+package combio.listeners;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import combio.cassandra.DataBase;
+import combio.statistic.StatisticsProt;
+
+/**
+ * Servlet implementation class ProtServlet
+ */
+@WebServlet("/ProtServlet")
+public class ProtServlet extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+ /**
+ * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ List<DataBase> result;
+ String flag = request.getParameter("protein");
+ String prot = request.getParameter("prot");
+ StatisticsProt sp = new StatisticsProt();
+ if (flag.equals("whole")) {
+ result = sp.readProt(prot);
+ } else {
+ result = sp.readPart(prot);
+ }
+ request.setAttribute("prot", prot);
+ request.setAttribute("flag", flag);
+ request.setAttribute("result", result);
+ RequestDispatcher rd = request.getRequestDispatcher("/ReportProt.jsp");
+ rd.forward(request, response);
+ }
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ doGet(request, response);
+ }
+
+}
--- /dev/null
+package combio.listeners;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.annotation.WebServlet;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import combio.cassandra.*;
+import combio.statistic.StatisticsProt;
+
+
+@WebServlet("/QueryServlet")
+public class QueryServlet extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
+ */
+ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ List<DataBase> result;
+ String date1 = request.getParameter("data1");
+ String date2 = request.getParameter("data2");
+ StatisticsProt sp = new StatisticsProt();
+ result = sp.readDetail(date1, date2);
+ request.setAttribute("data1", date1);
+ request.setAttribute("data2", date2);
+ request.setAttribute("result", result);
+ RequestDispatcher rd = request.getRequestDispatcher("/ReportNew.jsp");
+ rd.forward(request, response);
+ }
+
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ doGet(request, response);
+ }
+
+}
--- /dev/null
+package combio.statistic;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+
+import me.prettyprint.cassandra.serializers.LongSerializer;
+import me.prettyprint.cassandra.serializers.StringSerializer;
+import me.prettyprint.hector.api.beans.ColumnSlice;
+import me.prettyprint.hector.api.beans.HColumn;
+import me.prettyprint.hector.api.beans.OrderedRows;
+import me.prettyprint.hector.api.beans.Row;
+import me.prettyprint.hector.api.factory.HFactory;
+import me.prettyprint.hector.api.query.QueryResult;
+import me.prettyprint.hector.api.query.RangeSlicesQuery;
+import me.prettyprint.hector.api.query.SliceQuery;
+import combio.cassandra.CassandraCreate;
+import combio.cassandra.DataBase;
+
+public class StatisticsProt {
+ private final static long MILLISECONDS_PER_DAY = 1000L * 60 * 60 * 24;
+ private CassandraCreate cc = new CassandraCreate();
+ private ArrayList<DataBase> query;
+
+ // query for the period from date1 till date2
+ public List<DataBase> readDetail(String dateInStringSt, String dateInStringEnd) {
+ long dateWorkSt = DateParsing(dateInStringSt);
+ long dateWorkEnd = DateParsing(dateInStringEnd);
+ query = new ArrayList<DataBase>();
+ while (dateWorkSt <= dateWorkEnd) {
+ SliceQuery<Long, String, String> result = HFactory.createSliceQuery(cc.GetKeyspace(), LongSerializer.get(), StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinData");
+ result.setKey(dateWorkSt);
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ QueryResult <ColumnSlice<String, String>> columnSlice = result.execute();
+ DataBase db = new DataBase(DateFormat(dateWorkSt), columnSlice.get().getColumns().size());
+ query.add(db);
+ dateWorkSt += MILLISECONDS_PER_DAY ;
+ }
+ return query;
+ }
+
+ // query jobs for the period from dateInStringSt till dateInStringEnd
+ public List<DataBase> readLength(String dateInStringSt, String dateInStringEnd) {
+ query = new ArrayList<DataBase>();
+ long dateWorkSt = DateParsing(dateInStringSt);
+ long dateWorkEnd = DateParsing(dateInStringEnd);
+ while (dateWorkSt <= dateWorkEnd) {
+ List<Integer> timeResult = new ArrayList<Integer>();
+ SliceQuery<Long, String, String> result = HFactory.createSliceQuery(cc.GetKeyspace(), LongSerializer.get(), StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinData");
+ result.setKey(dateWorkSt);
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ QueryResult <ColumnSlice<String, String>> columnSlice = result.execute();
+ List<HColumn<String, String>> col = columnSlice.get().getColumns();
+ Iterator<HColumn<String, String>> itCol = col.iterator();
+ for (int i = 0; i < 4; i++)
+ timeResult.add(i, 0);
+ while (itCol.hasNext()) {
+ String id = itCol.next().getName();
+ long lenResult = CountID(id);
+ if (lenResult <= 1)
+ timeResult.set(0, timeResult.get(0) + 1);
+ else if (lenResult > 1 && lenResult <= 10)
+ timeResult.set(1, timeResult.get(1) + 1);
+ else if (lenResult > 10 && lenResult <= 20)
+ timeResult.set(2, timeResult.get(2) + 1);
+ else
+ timeResult.set(3, timeResult.get(3) + 1);
+ }
+ DataBase db = new DataBase();
+ db.setTimeRez(timeResult);
+ db.setDate(DateFormat(dateWorkSt));
+ query.add(db);
+ List<Integer> test = query.get(0).getTimeRez();
+ dateWorkSt += MILLISECONDS_PER_DAY ;
+ }
+ return query;
+ }
+
+ //query by a protein name
+ public List<DataBase> readProt(String protIn) {
+ query = new ArrayList<DataBase>();
+ SliceQuery<String, String, String> result = HFactory.createSliceQuery(cc.GetKeyspace(), StringSerializer.get(), StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinRow");
+ result.setKey(protIn);
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ QueryResult <ColumnSlice<String, String>> columnSlice = result.execute();
+ Iterator <HColumn<String, String>> it = columnSlice.get().getColumns().iterator();
+ while (it.hasNext()) {
+ HColumn<String, String> col = it.next();
+ DataBase db = new DataBase();
+ db.setProt(protIn);
+ db.setId(col.getName());
+ db.setJpred(col.getValue());
+ query.add(db);
+ }
+ return query;
+ }
+
+
+ //query by a sequence (whether exists proteins contains this sequence in the name)
+ public List<DataBase> readPart(String protIn) {
+ int row_count = 10000;
+ query = new ArrayList<DataBase>();
+ RangeSlicesQuery<String, String, String> result = HFactory.createRangeSlicesQuery(cc.GetKeyspace(), StringSerializer.get(), StringSerializer.get(), StringSerializer.get());
+ result.setColumnFamily("ProteinRow");
+ result.setRange(null, null, false, Integer.MAX_VALUE);
+ result.setRowCount(row_count);
+ String last_key = null;
+ while (true) {
+ result.setKeys(last_key, null);
+ QueryResult <OrderedRows<String,String, String>> columnSlice = result.execute();
+ OrderedRows<String, String, String> rows = columnSlice.get();
+ Iterator<Row<String, String, String>> rowsIterator = rows.iterator();
+ while (rowsIterator.hasNext()) {
+ Row<String, String, String> row = rowsIterator.next();
+ last_key = row.getKey();
+ if (last_key.matches("(.*)" + protIn + "(.*)")) {
+ Iterator <HColumn<String, String>> it = row.getColumnSlice().getColumns().iterator();
+ while (it.hasNext()) {
+ HColumn<String, String> col = it.next();
+ List<String> subProt = new ArrayList<String>();
+ String subStr = last_key;
+ while (subStr.length() > 0 && subStr.contains(protIn)) {
+ String first = subStr.substring(0, subStr.indexOf(protIn));
+ if (first.length() > 0)
+ subProt.add(first);
+ subProt.add(protIn);
+ subStr = subStr.substring(subStr.indexOf(protIn) + protIn.length(), subStr.length());
+ }
+ if (subStr.length() > 0)
+ subProt.add(subStr);
+ DataBase db = new DataBase();
+ db.setProt(last_key);
+ db.setId(col.getName());
+ db.setJpred(col.getValue());
+ db.setSubProt(subProt);
+ query.add(db);
+ }
+ }
+ }
+ if (rows.getCount() < row_count)
+ break;
+ }
+ return query;
+ }
+
+ // convert String to Date
+ private static long DateParsing(String datInput) {
+ long dateWorkSt = 0;
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd");
+ try {
+ dateWorkSt = formatter.parse(datInput).getTime();
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return dateWorkSt;
+ }
+
+ // convert String to Date
+ private static long TimeConvert(String datInput) {
+ long dateWorkSt = 0;
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy/MM/dd:hh:mm:ss");
+ try {
+ dateWorkSt = formatter.parse(datInput).getTime();
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ // System.out.println("start reverce" + DateFormat1(dateWorkSt));
+ return dateWorkSt;
+ }
+
+ // convert long to date in string format
+ private static String DateFormat(long inDate){
+ SimpleDateFormat datformat = new SimpleDateFormat("dd/MM/yyyy");
+ String dateString = datformat.format(new Date(inDate));
+ return dateString;
+ }
+
+ private static String DateFormat1(long inDate){
+ SimpleDateFormat datformat = new SimpleDateFormat("dd/MM/yyyy:hh:mm:ss");
+ String dateString = datformat.format(new Date(inDate));
+ return dateString;
+ }
+
+ public long CountID(String id) {
+ SliceQuery<String, String, String> sliceQuery = HFactory.createSliceQuery(cc.GetKeyspace(), StringSerializer.get(), StringSerializer.get(), StringSerializer.get());
+ sliceQuery.setColumnFamily("ProteinLog").setKey(id).setRange("", "", false, 100);
+ QueryResult<ColumnSlice<String, String>> result = sliceQuery.execute();
+ String datBegin = result.get().getColumnByName("DataBegin").getValue();
+ String datEnd = result.get().getColumnByName("DataEnd").getValue();
+ long datBeginLong = TimeConvert(datBegin);
+ long datEndLong = TimeConvert(datEnd);
+ return datEndLong-datBeginLong;
+
+ }
+
+}