import compbio.data.sequence.FastaReader;
import compbio.data.sequence.FastaSequence;
import compbio.engine.JpredJob;
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.engine.archive.Archive;
+import compbio.engine.archive.ArchivedJob;
+import compbio.util.PropertyHelper;
+import compbio.util.Util;
public class JpredParserHTTP implements JpredParser {
private CassandraWriter cw = new CassandraWriter();
+ private static Archive archive;
private String dirprefix;
private List<FastaSequence> alignment;
private List<FastaSequence> predictions;
private int countNoData;
+ private static boolean archiving = false;
+ private static final PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
public JpredParserHTTP() {
dirprefix = "http://www.compbio.dundee.ac.uk/www-jpred/results";
dirprefix = newsourceprefix;
}
+ private boolean initBooleanValue(String key) {
+ assert key != null;
+ String status = ph.getProperty(key);
+ if (Util.isEmpty(status)) {
+ return false;
+ }
+ return new Boolean(status.trim()).booleanValue();
+ }
+
public void Parsing(String source, int nDays) throws IOException {
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -nDays);
+ archiving = initBooleanValue("archive.enable");
+ if (archiving) {
+ archive = new Archive();
+ }
for (int i = 0; i < nDays; ++i) {
cal.add(Calendar.DATE, 1);
String date = cal.get(Calendar.YEAR) + "/" + (cal.get(Calendar.MONTH) + 1) + "/" + cal.get(Calendar.DATE);
- ParsingForDate(source, date);
+ ParsingOneDay(source, date);
}
}
private String parsePredictions(final InputStream stream, String jobid) throws FileNotFoundException {
final FastaReader fr = new FastaReader(stream);
String protein = "";
-// alignment = new ArrayList<FastaSequence>();
-// predictions = new ArrayList<FastaSequence>();
while (fr.hasNext()) {
final FastaSequence fs = fr.next();
String seqid = fs.getId();
boolean running = true;
boolean ConcisefileExists = false;
boolean LogfileExists = false;
- JpredJob job = new JpredJob (jobinfo[jobinfo.length - 1], jobinfo[0], jobinfo[1]);
+ JpredJob job = new JpredJob(jobinfo[jobinfo.length - 1], jobinfo[0], jobinfo[1]);
job.setIP(jobinfo[2]);
Date currDate = new Date();
String maindir = dirprefix + "/" + job.getJobID() + "/";
- //System.out.println("analyzing job " + job.getJobID());
try {
URL dirurl = new URL(maindir);
HttpURLConnection httpConnection_dirurl = (HttpURLConnection) dirurl.openConnection();
return 0;
}
URL conciseurl = new URL(maindir + job.getJobID() + ".concise.fasta");
- URL archiveurl = new URL(maindir + job.getJobID() + ".tar.gz");
URL logurl = new URL(maindir + "LOG");
HttpURLConnection httpConnection_conciseurl = (HttpURLConnection) conciseurl.openConnection();
HttpURLConnection httpConnection_logurl = (HttpURLConnection) logurl.openConnection();
- HttpURLConnection httpConnection_archiveurl = (HttpURLConnection) archiveurl.openConnection();
if (199 < httpConnection_conciseurl.getResponseCode() && httpConnection_conciseurl.getResponseCode() < 300) {
ConcisefileExists = true;
running = false;
httpConnection_conciseurl.disconnect();
httpConnection_logurl.disconnect();
- httpConnection_archiveurl.disconnect();
} catch (MalformedURLException e) {
e.printStackTrace();
}
job.setAlignment(alignment);
job.setPredictions(predictions);
cw.FormQueryTables(job);
- cw.ArchiveData(job, "undefined");
+ // archiving the job
+ if (archiving) {
+ ArchivedJob ajob = new ArchivedJob(job.getJobID());
+ String arlink = archive.createJob(job.getJobID());
+ if (job.getFinalStatus().equals("OK")) {
+ ajob.setArchivePath(arlink);
+ ajob.copyArchiveFromWeb(maindir + job.getJobID() + ".tar.gz");
+ cw.ArchiveData(job, arlink);
+ } else {
+ cw.ArchiveData(job, "undefined");
+ }
+ }
return 1;
}
return 0;
}
- private void ParsingForDate(String input, String date) {
+ private void ParsingOneDay(String input, String date) {
int totalcount = 0;
int countinsertions = 0;
int countinserted = 0;
}
;
}
-}
+};
package compbio.engine.archive;
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.log4j.Logger;
+
+import compbio.cassandra.CassandraNativeConnector;
+import compbio.engine.ProteoCachePropertyHelperManager;
+import compbio.util.PropertyHelper;
+
public class Archive {
+ private static final PropertyHelper ph = ProteoCachePropertyHelperManager.getPropertyHelper();
+ private static Logger log = Logger.getLogger(CassandraNativeConnector.class);
+
+ private String archivepath;
+ private File archive;
+ private boolean archiveexist;
+
+ /*
+ * connect to the cluster and look whether all tables exist
+ */
+ public Archive() throws IOException {
+ String path = ph.getProperty("archive.path");
+ assert (null != path);
+
+ if (isAbsolutePath (path)) {
+ archivepath = path;
+ } else {
+ String abspath = ProteoCachePropertyHelperManager.getLocalPath();
+ archivepath = abspath + "/" + path;
+ }
+ if (!isDirExists(archivepath)) {
+ archiveexist = (new File(archivepath).mkdirs());
+ }
+ }
+
+ private boolean isAbsolutePath (String path) {
+ return (new File(path).isAbsolute());
+ }
+
+ private boolean isDirExists (String path) throws IOException {
+ archive = new File(path);
+ return archive.getCanonicalFile().isDirectory();
+ }
+
+
+ public boolean addArchivedJob (ArchivedJob job) {
+ return true;
+ }
+
+ public String createJob(String jobid) {
+ return archivepath + "/" + jobid + ".tar.gz";
+ }
}
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
+import java.nio.file.Paths;
+import java.nio.file.Files;
import java.util.List;
public class ArchivedJob {
String path;
+ String id;
int filesize;
List<String> files;
- ArchivedJob (String path) {
- this.path = path;
- }
-
- public boolean getArchiveFromWS() {
- return false;
+ public ArchivedJob(String id) {
+ this.id = id;
}
-
-
- public boolean getArchiveFromWeb (String webpath) throws IOException, MalformedURLException {
+
+ public boolean copyArchiveFromWeb(String webpath) throws IOException, MalformedURLException {
URL website = new URL(webpath);
ReadableByteChannel rbc = Channels.newChannel(website.openStream());
FileOutputStream fos = new FileOutputStream(path);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
return true;
}
-
-
+
+ public boolean copyArchiveLocaly(String localpath) throws IOException {
+ Files.copy(Paths.get(localpath), Paths.get(path));
+ return true;
+ }
+
+ public void setArchivePath(String path) {
+ this.path = path;
+ }
+
public int getSize() {
return filesize;
}
-
+
public List<String> unpack() {
if (null != files) {
-
+
}
return files;
}