--- /dev/null
+package org.vamsas.test.simpleclient;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.text.DateFormat;
+import java.util.Date;
+import java.util.Hashtable;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.vamsas.client.simpleclient.VamsasArchive;
+import org.vamsas.client.simpleclient.VamsasArchiveReader;
+import org.vamsas.objects.core.ApplicationData;
+import org.vamsas.objects.core.Entry;
+import org.vamsas.objects.core.Provenance;
+import org.vamsas.objects.core.VAMSAS;
+import org.vamsas.objects.core.VamsasDocument;
+
+public class ArchiveWriter {
+
+ /**
+ * Test program for writing archive files.
+ */
+
+ static Log log = LogFactory.getLog("org.vamsas.test.simpleclient.ArchiveWriter");
+
+ /**
+ * @param action
+ * text for action entry
+ * @return new Provenance entry for ArchiveWriter created docs.
+ */
+ public static Entry newProvenanceEntry(String user, String action) {
+ log.debug("Adding ProvenanceEntry("+user+","+action+")");
+ Entry e = new Entry();
+ e.setAction(action);
+ e.setUser(user);
+ e.setDate(new org.exolab.castor.types.Date(new Date()));
+ return e;
+ }
+
+ public static Provenance newProvenance() {
+ Provenance list = new Provenance();
+ list.addEntry(newProvenanceEntry("ArchiveWriter", "Created new Vamsas Document"));
+ return list;
+ }
+ private static void mergeVecs(Object[] destvec, Object[] svec1, Object[] svec2) {
+ int i;
+ for (i=0; i<svec1.length; i++)
+ destvec[i] = svec1[i];
+ for (int j=0; j<svec2.length; i++, j++)
+ destvec[i] = svec2[j];
+ }
+ // Merge appDataReferences require transfer of jar entries, perhaps with a renaming of the entry.
+ // Merge appDatas require eventually unique URNS
+
+ public static Hashtable hashOfAppDatas(Hashtable ht, ApplicationData[] appdatas) {
+ if (ht==null)
+ ht = new Hashtable();
+ for (int i=0, j=appdatas.length; i<j; i++) {
+ if (!ht.containsKey(appdatas[i].getUrn())) {
+ Hashtable aphash = new Hashtable();
+ ht.put(appdatas[i].getUrn(), aphash);
+ aphash.put(appdatas[i], appdatas[i].getAppDataChoice().getDataReference());
+ } else {
+ // ensure urns and references are unique
+
+
+ }
+
+ }
+ return ht;
+ }
+ public static void addAppDataEntry(VamsasArchive darc, VamsasDocument dest, VamsasArchiveReader sarc, ApplicationData entry) {
+
+ // check uniqueness of entry.urn amongst dest.ApplicationData[].urn
+ // check uniqueness of entry.user[].urn amongst dest.ApplicationData[].user[].urn
+ // check uniqueness of entry.user
+ // entry.getAppDataChoice().getData() or getDataReference is unique
+ ApplicationData newo = new ApplicationData();
+ for (int i=0, j=dest.getApplicationDataCount(); i<j; i++) {
+ ApplicationData o = dest.getApplicationData()[i];
+ // ensure new urn is really unique
+ String urn = entry.getUrn();
+ int v = 1;
+ while (o.getUrn().equals(urn)) {
+ urn = entry.getUrn()+v++;
+ }
+ // uniqueness of urn
+ // check each user ApplicationData
+ // uniqueness (again)
+ // copy over valid objects
+ //
+ }
+ }
+ public static boolean mergeDocs(VamsasArchive darc, VamsasDocument dest, VamsasArchiveReader sarc, VamsasDocument source) {
+ log.debug("mergeDocs entered.");
+ // search for appDatas in cdoc
+ VAMSAS[] newr = new VAMSAS[dest.getVAMSASCount()+source.getVAMSASCount()];
+ mergeVecs(newr, dest.getVAMSAS(), source.getVAMSAS());
+ dest.setVAMSAS(newr);
+ // TODO: should verify that all ids really are unique in newly merged document. If not then what ?
+
+ if (source.getApplicationDataCount()>0) {
+ ApplicationData[] newdat = new ApplicationData[source.getApplicationDataCount()+dest.getApplicationDataCount()];
+ ApplicationData[] sappd = source.getApplicationData();
+ // check refs and update/modify if necessary
+ for (int i=0; i<sappd.length; i++) {
+ addAppDataEntry(darc, dest, sarc, sappd[i]);
+ }
+
+ }
+
+ return true; // success
+ }
+
+
+ public static void main(String argv[]) {
+ if (argv.length<1) {
+ log.fatal("Usage : <archive to create> [(commands)]");
+ return;
+ }
+ File newarch = new File(argv[0]);
+ int argpos = 0;
+ try {
+ // test fully fledged doc construction
+ VamsasArchive varc = new VamsasArchive(newarch, true);
+ VamsasDocument docroot;
+ docroot = new VamsasDocument();
+ docroot.setProvenance(newProvenance());
+ while (++argpos<argv.length) {
+ File archive = new File(argv[argpos]);
+ if (archive.exists()) {
+ VamsasArchiveReader vdoc = new VamsasArchiveReader(archive);
+ if (vdoc.isValid()) {
+ InputStream istream = vdoc.getVamsasDocumentStream();
+ if (istream!=null) {
+ VamsasDocument cdocroot = VamsasDocument.unmarshal(new InputStreamReader(istream));
+ if (cdocroot!=null)
+ mergeDocs(varc, docroot, vdoc, cdocroot);
+ } else {
+ if ((istream = vdoc.getVamsasXmlStream())!=null) {
+ // make a new vamsas document from the vamsas.xml entry
+ VAMSAS root = VAMSAS.unmarshal(new InputStreamReader(istream));
+ docroot.getProvenance().addEntry(newProvenanceEntry("user", "added vamsas.xml from "+argv[argpos-1]));
+ docroot.addVAMSAS(root);
+ }
+ }
+
+ }
+ // Write a dummy vamsas document
+
+ PrintWriter docwriter = varc.getDocumentOutputStream();
+ }
+ }
+ } catch (Exception e) {
+ log.error("Whilst manipulating "+argv[0], e);
+ }
+ }
+}