1 package compbio.cassandra;
3 import java.io.BufferedReader;
4 import java.io.FileNotFoundException;
5 import java.io.IOException;
6 import java.io.InputStream;
7 import java.io.InputStreamReader;
8 import java.net.HttpURLConnection;
9 import java.net.MalformedURLException;
11 import java.net.URLConnection;
12 import java.util.ArrayList;
13 import java.util.Calendar;
14 import java.util.Date;
15 import java.util.List;
17 import compbio.cassandra.JpredParser;
18 import compbio.data.sequence.FastaReader;
19 import compbio.data.sequence.FastaSequence;
20 import compbio.engine.JpredJob;
21 import compbio.engine.ProteoCachePropertyHelperManager;
22 import compbio.engine.archive.Archive;
23 import compbio.engine.archive.ArchivedJob;
24 import compbio.util.PropertyHelper;
25 import compbio.util.Util;
27 public class JpredParserHTTP implements JpredParser {
28 private CassandraWriter cw = new CassandraWriter();
29 private static Archive archive;
30 private String dirprefix;
31 private List<FastaSequence> alignment;
32 private List<FastaSequence> predictions;
33 private int countNoData;
34 private static boolean archiving = false;
35 private static final PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
37 public JpredParserHTTP() {
38 dirprefix = "http://www.compbio.dundee.ac.uk/www-jpred/results";
41 public JpredParserHTTP(String sourceurl) {
42 dirprefix = sourceurl;
45 public void setSource(String newsourceprefix) {
46 dirprefix = newsourceprefix;
49 private boolean initBooleanValue(String key) {
51 String status = ph.getProperty(key);
52 if (Util.isEmpty(status)) {
55 return new Boolean(status.trim()).booleanValue();
58 public void Parsing(String source, int nDays) throws IOException {
59 Calendar cal = Calendar.getInstance();
60 cal.add(Calendar.DATE, -nDays);
61 archiving = initBooleanValue("archive.enable");
63 archive = new Archive();
65 for (int i = 0; i < nDays; ++i) {
66 cal.add(Calendar.DATE, 1);
67 String date = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DATE);
68 ParsingOneDay(source, date);
73 * The method parses the Jpred output concise file in the FASTA format If
74 * there is a record with ID = QUERY or jobid, this a "one protein" job
75 * otherwise this is an alignment job
77 private String parsePredictions(final InputStream stream, String jobid) throws FileNotFoundException {
78 final FastaReader fr = new FastaReader(stream);
80 while (fr.hasNext()) {
81 final FastaSequence fs = fr.next();
82 String seqid = fs.getId();
83 String seq = fs.getSequence().replaceAll("\n", "");
84 if (seqid.equals("jnetpred") || seqid.equals("Lupas_21") || seqid.equals("Lupas_14") || seqid.equals("Lupas_28")
85 || seqid.equals("JNETSOL25") || seqid.equals("JNETSOL5") || seqid.equals("JNETSOL0") || seqid.equals("JNETCONF")
86 || seqid.equals("JNETHMM") || seqid.equals("JNETPSSM") || seqid.equals("JNETCONF")) {
90 if (seqid.equals("QUERY") || seqid.equals(jobid))
97 private String parseLogFile(final InputStream stream) throws IOException {
99 BufferedReader buffer = new BufferedReader(new InputStreamReader(stream));
101 while (null != (line = buffer.readLine())) {
107 private int analyseJob(String[] jobinfo) throws IOException {
108 alignment = new ArrayList<FastaSequence>();
109 predictions = new ArrayList<FastaSequence>();
110 boolean running = true;
111 boolean ConcisefileExists = false;
112 boolean LogfileExists = false;
113 JpredJob job = new JpredJob(jobinfo[jobinfo.length - 1], jobinfo[0], jobinfo[1]);
114 job.setIP(jobinfo[2]);
115 Date currDate = new Date();
116 String maindir = dirprefix + "/" + job.getJobID() + "/";
119 URL dirurl = new URL(maindir);
120 HttpURLConnection httpConnection_dirurl = (HttpURLConnection) dirurl.openConnection();
121 if (httpConnection_dirurl.getResponseCode() < 199 || 300 <= httpConnection_dirurl.getResponseCode()) {
124 URL conciseurl = new URL(maindir + job.getJobID() + ".concise.fasta");
125 URL logurl = new URL(maindir + "LOG");
126 HttpURLConnection httpConnection_conciseurl = (HttpURLConnection) conciseurl.openConnection();
127 HttpURLConnection httpConnection_logurl = (HttpURLConnection) logurl.openConnection();
128 if (199 < httpConnection_conciseurl.getResponseCode() && httpConnection_conciseurl.getResponseCode() < 300) {
129 ConcisefileExists = true;
132 job.setProtein(parsePredictions(conciseurl.openStream(), job.getJobID()));
133 } catch (IOException e) {
137 // The job still can be running of failed...
140 if (199 < httpConnection_logurl.getResponseCode() && httpConnection_logurl.getResponseCode() < 300) {
141 LogfileExists = true;
142 job.setLog(parseLogFile(logurl.openStream()));
144 // The job has not been started at all...
145 job.setExecutionStatus("FAIL");
146 job.setFinalStatus("STOPPED");
149 if (job.getLog().matches("(.*)TIMEOUT\\syour\\sjob\\stimed\\sout(.*)")) {
150 // blast job was too long (more than 3600 secs by default)...
151 job.setExecutionStatus("FAIL");
152 job.setFinalStatus("TIMEDOUT");
154 } else if (job.getLog().matches("(.*)Jpred\\serror:\\sDied\\sat(.*)")) {
155 // an internal Jpred error...
156 job.setExecutionStatus("FAIL");
157 job.setFinalStatus("JPREDERROR");
159 } else if ((currDate.getTime() - job.getEndTime()) / 1000 > 3601 && LogfileExists && !ConcisefileExists) {
160 // the job was stopped with unknown reason...
161 job.setExecutionStatus("FAIL");
162 job.setFinalStatus("STOPPED");
166 httpConnection_conciseurl.disconnect();
167 httpConnection_logurl.disconnect();
168 } catch (MalformedURLException e) {
173 job.setAlignment(alignment);
174 job.setPredictions(predictions);
175 cw.FormQueryTables(job);
178 ArchivedJob ajob = new ArchivedJob(job.getJobID());
179 String arlink = archive.createJob(job.getJobID());
180 if (job.getFinalStatus().equals("OK")) {
181 ajob.setArchivePath(arlink);
182 ajob.copyArchiveFromWeb(maindir + job.getJobID() + ".tar.gz");
183 cw.ArchiveData(job, arlink);
185 cw.ArchiveData(job, "undefined");
194 private void ParsingOneDay(String input, String date) {
196 int countinsertions = 0;
197 int countinserted = 0;
198 int countNotanalyzed = 0;
201 System.out.println("Inserting jobs for " + date);
203 URL url = new URL(input);
204 URLConnection conn = url.openConnection();
205 BufferedReader alljobs = new BufferedReader(new InputStreamReader(conn.getInputStream()));
208 while ((line = alljobs.readLine()) != null) {
209 if (line.matches(date + ":(.*)jp_[^\\s]+")) {
211 String[] job = line.split("\\s+");
212 String jobid = job[job.length - 1];
213 if (cw.JobisNotInsterted(jobid)) {
214 countinsertions += analyseJob(job);
223 System.out.println("Total number of jobs = " + totalcount);
224 System.out.println(" " + countinserted + " jobs inserted already");
225 System.out.println(" " + countNotanalyzed + " not analysed jobs");
226 System.out.println(" " + countNoData + " jobs without *.concise.fasta file (RUNNING or FAILED)");
227 System.out.println(" " + countinsertions + " new job insertions\n");
228 } catch (MalformedURLException e) {
230 } catch (IOException e) {