site

(djk)
2011-06-11: Fixes from other repo.

Fixes from other repo.

diff --git a/alien/src/wormarc/ExternalRefs.java b/alien/src/wormarc/ExternalRefs.java
--- a/alien/src/wormarc/ExternalRefs.java
+++ b/alien/src/wormarc/ExternalRefs.java
@@ -129,7 +129,8 @@ public final class ExternalRefs {
         StringBuilder buffer = new StringBuilder();
         buffer.append(String.format("--- %sExternalRefs ---\n", labelWithTrailingSpace));
         for (Reference ref: mRefs) {
-            buffer.append(String.format("   [%d]:%s\n", ref.mKind, ref.mExternalKey));
+            buffer.append(String.format("   [%d]:%s (%s)\n", ref.mKind, ref.mExternalKey,
+                                        IOUtil.getHexDigest(ref.mExternalKey, 8)));
         }
         buffer.append("---");
         return buffer.toString();
diff --git a/alien/src/wormarc/IOUtil.java b/alien/src/wormarc/IOUtil.java
--- a/alien/src/wormarc/IOUtil.java
+++ b/alien/src/wormarc/IOUtil.java
@@ -122,6 +122,17 @@ public class IOUtil {
         }
     }
 
+    public static String getHexDigest(String value, int numBytes) {
+        try {
+            if (value == null) {
+                return "???";
+            }
+            return IOUtil.getFileDigest(IOUtil.toStreamAsUtf8(value)).hexDigest(numBytes);
+        } catch (IOException ioe) {
+            return "???";
+        }
+    }
+
     public final static void copyAndClose(InputStream fromStream, String toFileName) throws IOException {
         copyAndClose(fromStream, new FileOutputStream(toFileName));
     }
diff --git a/alien/src/wormarc/io/BlobIO.java b/alien/src/wormarc/io/BlobIO.java
--- a/alien/src/wormarc/io/BlobIO.java
+++ b/alien/src/wormarc/io/BlobIO.java
@@ -55,7 +55,7 @@ public class BlobIO implements Archive.I
     // REQUIRES: Length must not change.
     protected final static String VERSION_1 = "BLOB0001";
 
-    // Anything that you can read or write a BLOB to.
+    // Anything that you can read or write a BLOB from/to.
     public interface StreamFactory {
         InputStream getInputStream() throws IOException;
         OutputStream getOutputStream() throws IOException;
diff --git a/doc/latest_release.txt b/doc/latest_release.txt
--- a/doc/latest_release.txt
+++ b/doc/latest_release.txt
@@ -1,18 +1,6 @@
-Fixed the "Discover" page to draw lines for rebased versions.
+Fixed bug on the "Discover" page that caused lines to be drawn incorrectly
+in some cases for graphs with rebased versions.
 
-Added preliminary support for file diffs on the "Edit" page.
-It needs layout / CSS cleanup but it works.
+Use .zip instead of .tgz for source release.
 
-Added <<<ArchiveVersion>>> macro to print the 16 digit hex version
-of the current archive.
 
-Added <<<ArchiveUri>>> macro to print a link to the archive SSK
-that the current wiki version was loaded from.
-
-These macros where added so that people viewing dumped wiki
-freesites can tell which version / uri the wiki was
-dumped from.
-
-Release notes for previous version:
-USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw,2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,AQACAAE/jfniki_releases/2/
-
diff --git a/readme.txt b/readme.txt
--- a/readme.txt
+++ b/readme.txt
@@ -1,4 +1,4 @@
-20110605
+20110611
 djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks
 
 WARNING:
@@ -121,7 +121,9 @@ IDEA: Freetalk vs Freenet interop
       Hmmm... not sure if people would use this feature because of the correlation of ids.
 ---
 Fixed bugs:
-8cfb2f3e7c38  BUG: Default FCP port wrong for CLI client. [requested by a real user]
+238c7dcc5ae3: BUG: incorrect drawing of rebased changes on discover page (since b963650876a7).
+              [The plausable commit order code was just wrong. Fixed it I hope.]
+8cfb2f3e7c38: BUG: Default FCP port wrong for CLI client. [requested by a real user]
               [The default port is set to 9481 now and can be set via Java System properties. See ./script/wa.sh]
 710d700bc7a1: BUG: Header links to discussion pages. [requested by a real user]
 2ce3a4499a2c: BUG: No way to create an empty wiki from the UI. [requested by a real user]
diff --git a/release/cut_release.py b/release/cut_release.py
--- a/release/cut_release.py
+++ b/release/cut_release.py
@@ -25,8 +25,8 @@
 # It is brittle and not fully debugged.
 # It assumes you have hg infocalypse installed and configured.
 #
-# BUG: ANONYMITY ISSUE: This script currently leaks the *nix user id
-#      into the inserted .tgz and .jar files.
+# POSSIBLE BUG: ANONYMITY ISSUE: This script may leak the *nix user id into the .jar file.
+#      Need to audit.
 #      DO NOT RUN IT if this concerns you.
 #
 import os
@@ -43,6 +43,10 @@ from minimalfms import get_connection, s
 
 ############################################################
 
+# For testing this script.
+#JUST_STAGE = True
+JUST_STAGE = False
+
 # CAUTION: This directory is recursively deleted!
 STAGING_DIR = '/tmp/staging'
 
@@ -63,11 +67,11 @@ PUBLIC_SITE = "USK@kRM~jJVREwnN2qnA8R0Vt
               SITE_NAME
 
 ############################################################
-# Indexes of refereneced USK sites
+# Indexes of referenced USK sites
 
-FREENET_DOC_WIKI_IDX = 34
+FREENET_DOC_WIKI_IDX = 40
 FNIKI_IDX = 84
-REPO_IDX = 17
+REPO_IDX = 18
 
 ############################################################
 
@@ -82,6 +86,15 @@ FMS_MESSAGE_TEMPLATE = os.path.abspath(o
 
 ############################################################
 
+def zip_source(staging_dir, source_dir, zip_file_name):
+    subprocess.check_call(['/usr/bin/zip',
+                           '-r',
+                           '-9', # best compression
+                           zip_file_name,
+                           source_dir],
+                          # LATER: Better way to supress full path in zip?
+                          cwd=staging_dir)
+
 def stage_release():
     # LATER: check for uncommitted changes
     ui_ = ui.ui()
@@ -93,10 +106,10 @@ def stage_release():
     head = heads[0]
 
     jar_name = "jfniki.%s.jar" % head
-    tgz_name = "jfniki.%s.tgz" % head
+    zip_name = "jfniki.%s.zip" % head
     export_dir_name = "jfniki.%s" % head
 
-    tgz_file_name = "%s/%s" % (STAGING_DIR, tgz_name)
+    zip_file_name = "%s/%s" % (STAGING_DIR, zip_name)
     jar_file_name = "%s/%s" % (STAGING_DIR, jar_name)
 
     # scrub staging directory
@@ -120,17 +133,8 @@ def stage_release():
     # remove origin tarballs to save space
     shutil.rmtree("%s/alien/origins/" % dest)
 
-    # tar up source
-    tgz_file = tarfile.open(tgz_file_name, 'w:gz')
-
-    #def reset(tarinfo):
-    #    tarinfo.uid = tarinfo.gid = 0
-    #    tarinfo.uname = tarinfo.gname = "root"
-    #    return tarinfo
-    # LATER: Use line after upgrading python. Keeps uid, gid, uname out of tar.
-    # tgz_file.add("%s/%s" % (STAGING_DIR, export_dir_name), arcname=export_dir_name, filter=reset) # python 2.7
-    tgz_file.add("%s/%s" % (STAGING_DIR, export_dir_name), arcname=export_dir_name)
-    tgz_file.close()
+    # zip up the source.
+    zip_source(STAGING_DIR, export_dir_name, zip_file_name)
 
     # cp freenet.jar required for build
     os.makedirs("%s/%s/%s" % (STAGING_DIR, export_dir_name, "alien/libs"))
@@ -148,9 +152,9 @@ def stage_release():
     print
     print "SUCCESSFULLY STAGED:"
     print jar_file_name
-    print tgz_file_name
+    print zip_file_name
     print
-    return (head, jar_file_name, tgz_file_name)
+    return (head, jar_file_name, zip_file_name)
 
 
 def simple_templating(text, substitutions):
@@ -188,7 +192,7 @@ def html_escape(text):
 
 ############################################################
 
-def update_html(head, jar_chk, tgz_chk):
+def update_html(head, jar_chk, zip_chk):
     ui_ = ui.ui()
     repo = hg.repository(ui_, REPO_DIR)
     site_usk = PUBLIC_SITE % (latest_site_index(repo) + 1)
@@ -196,7 +200,7 @@ def update_html(head, jar_chk, tgz_chk):
     html = simple_templating(open(INDEX_HTML_TEMPLATE).read(),
                              {'__HEAD__':head,
                               '__JAR_CHK__': jar_chk,
-                              '__SRC_CHK__': tgz_chk,
+                              '__SRC_CHK__': zip_chk,
                               '__RELEASE_NOTES__' : html_escape(open(RELEASE_NOTES).read()),
                               '__SITE_USK__': site_usk,
                               '__INDEX_FDW__': FREENET_DOC_WIKI_IDX,
@@ -240,14 +244,14 @@ def insert_freesite():
     # LATER: Do better. Parse request URI from output.
     return PUBLIC_SITE % target_index, target_index
 
-def send_fms_notification(site_uri, target_index, head, jar_chk, tgz_chk):
+def send_fms_notification(site_uri, target_index, head, jar_chk, zip_chk):
 
     connection = get_connection(FMS_HOST, FMS_PORT, FMS_ID)
 
     msg = simple_templating(open(FMS_MESSAGE_TEMPLATE).read(),
                              {'__HEAD__':head,
                               '__JAR_CHK__': jar_chk,
-                              '__SRC_CHK__': tgz_chk,
+                              '__SRC_CHK__': zip_chk,
                               '__SITE_USK__' : site_uri,
                               '__RELEASE_NOTES__' : open(RELEASE_NOTES).read(),
                               })
@@ -270,11 +274,15 @@ def release():
     print
     print "------------------------------------------------------------"
 
-    head, jar_file, tgz_file = stage_release()
-    jar_chk, tgz_chk = insert_files(FCP_HOST, FCP_PORT, [jar_file, tgz_file])
-    update_html(head, jar_chk, tgz_chk)
+    head, jar_file, zip_file = stage_release()
+
+    if JUST_STAGE:
+        return
+
+    jar_chk, zip_chk = insert_files(FCP_HOST, FCP_PORT, [jar_file, zip_file])
+    update_html(head, jar_chk, zip_chk)
     site_uri, target_index = insert_freesite()
-    send_fms_notification(site_uri, target_index, head, jar_chk, tgz_chk)
+    send_fms_notification(site_uri, target_index, head, jar_chk, zip_chk)
 
     print
     print "Success!"
diff --git a/script/graphtest.sh b/script/graphtest.sh
new file mode 100755
--- /dev/null
+++ b/script/graphtest.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env sh
+
+# Script to run GraphLog.java code from a text file for debugging.
+
+export set SCRIPT_DIR=`dirname $0`
+. "${SCRIPT_DIR}/setup_env.sh"
+
+${JAVA_CMD} -classpath ${JAR_FILE}:${FN_JAR_FILE} fniki.wiki.GraphLog "$@"
diff --git a/src/fniki/freenet/plugin/Fniki.java b/src/fniki/freenet/plugin/Fniki.java
--- a/src/fniki/freenet/plugin/Fniki.java
+++ b/src/fniki/freenet/plugin/Fniki.java
@@ -105,7 +105,7 @@ public class Fniki implements FredPlugin
                     continue;
                 }
                 mParamTable.put(name, mParent.getParam(name));
-                System.err.println("Set Param: " + name + " : " + mParamTable.get(name));
+                //System.err.println("Set Param: " + name + " : " + mParamTable.get(name));
             }
 
             // Then read multipart params if there are any.
@@ -118,8 +118,8 @@ public class Fniki implements FredPlugin
 
                         String value = new String(mParent.getPartAsBytesFailsafe(part, 64 * 1024), "utf-8");
                         mParamTable.put(part, value);
-                        System.err.println("Set multipart Param: " + part + " : " +
-                                           mParamTable.get(part));
+                        // System.err.println("Set multipart Param: " + part + " : " +
+                        //                    mParamTable.get(part));
                     }
                 } catch (UnsupportedEncodingException ue) {
                     // Shouldn't happen.
@@ -157,7 +157,7 @@ public class Fniki implements FredPlugin
                 throw new RuntimeException("Request doesn't start with: " + containerPrefix);
             }
 
-            System.err.println("Raw path: " + path);
+            //System.err.println("Raw path: " + path);
             path = path.substring(containerPrefix.length());
 
             while(path.startsWith("/")) {
diff --git a/src/fniki/standalone/FnikiContextHandler.java b/src/fniki/standalone/FnikiContextHandler.java
--- a/src/fniki/standalone/FnikiContextHandler.java
+++ b/src/fniki/standalone/FnikiContextHandler.java
@@ -63,7 +63,7 @@ public class FnikiContextHandler impleme
                 }
                 baos.write(oneByte);
             }
-            System.err.println("read part bytes: " +  baos.toByteArray().length);
+            //System.err.println("read part bytes: " +  baos.toByteArray().length);
             return new String(baos.toByteArray(), "utf8");
         }
 
@@ -76,7 +76,7 @@ public class FnikiContextHandler impleme
                 if (!parentParams.containsKey(name)) {
                     continue;
                 }
-                System.err.println("Set Param: " + name + " : " + parentParams.get(name));
+                //System.err.println("Set Param: " + name + " : " + parentParams.get(name));
                 mParamTable.put(name, parentParams.get(name));
             }
 
@@ -90,10 +90,10 @@ public class FnikiContextHandler impleme
                         continue;
                     }
                     mParamTable.put(part.name, readAsUtf8(part));
-                    System.err.println(String.format("Set multipart Param: %s[%d]:\n%s",
-                                                     part.name,
-                                                     mParamTable.get(part.name).length(),
-                                                     mParamTable.get(part.name)));
+                    // System.err.println(String.format("Set multipart Param: %s[%d]:\n%s",
+                    //                                  part.name,
+                    //                                  mParamTable.get(part.name).length(),
+                    //                                  mParamTable.get(part.name)));
                 }
                 mParent.consumeBody();
             }
@@ -127,7 +127,7 @@ public class FnikiContextHandler impleme
                 throw new RuntimeException("Request doesn't start with: " + containerPrefix);
             }
 
-            System.err.println("Raw path: " + path);
+            //System.err.println("Raw path: " + path);
 
             path = path.substring(containerPrefix.length());
             while(path.startsWith("/")) {
diff --git a/src/fniki/wiki/ArchiveManager.java b/src/fniki/wiki/ArchiveManager.java
--- a/src/fniki/wiki/ArchiveManager.java
+++ b/src/fniki/wiki/ArchiveManager.java
@@ -375,9 +375,12 @@ public class ArchiveManager {
     }
 
     public List<FMSUtil.BISSRecord> getRecentWikiVersions(PrintStream out) throws IOException {
+        out.println("Reading version announcements via NNTP...");
+
         List<FMSUtil.BISSRecord> records =
             FMSUtil.getBISSRecords(mFmsHost, mFmsPort, mFmsId, mFmsGroup, mBissName, MAX_VERSIONS);
 
+        out.println("Finished reading. Processing...");
         // LATER: do better.
         for (FMSUtil.BISSRecord record : records) {
             String fields[] = record.mFmsId.split("@");
@@ -394,6 +397,7 @@ public class ArchiveManager {
             mNymLut.put(fields[1].trim(), fields[0].trim());
         }
 
+        out.println("Finished processing.");
         return records;
     }
 
diff --git a/src/fniki/wiki/GraphLog.java b/src/fniki/wiki/GraphLog.java
--- a/src/fniki/wiki/GraphLog.java
+++ b/src/fniki/wiki/GraphLog.java
@@ -374,7 +374,7 @@ public class GraphLog {
     }
 
     // A directed graph node.
-    public static class DAGNode {
+    public static class DAGNode implements Comparable<DAGNode> {
         public final String mTag;
         public final int mId;
         public final List<Integer> mParentIds;
@@ -383,6 +383,9 @@ public class GraphLog {
             mId = id;
             mParentIds = parentIds;
         }
+        public int compareTo(DAGNode other) {
+            return other.mId - mId; // Descending order of integer id.
+        }
 
         public String asString() {
             return "{" + mTag + ", " + mId + ", " + GraphLog.prettyString_(mParentIds) + "}";
@@ -459,13 +462,10 @@ public class GraphLog {
 
     static class ParentInfo {
         public final Set<String> mAllParents;
-        public final Set<String> mUnseenParents;
         public final Set<String> mRootIds;
         public ParentInfo(Set<String> allParents,
-                          Set<String> unseenParents,
                           Set<String> rootIds) {
             mAllParents = allParents;
-            mUnseenParents = unseenParents;
             mRootIds = rootIds;
         }
     }
@@ -492,6 +492,19 @@ public class GraphLog {
         }
     }
 
+    static void remove_edges(List<GraphEdge> edges, String matchingFrom) {
+        while (true) {
+            top_of_while:
+            for (int index = 0; index < edges.size(); index++) {
+                if (edges.get(index).mFrom.equals(matchingFrom)) {
+                    edges.remove(index);
+                    continue top_of_while; // for
+                }
+            }
+            break; // while
+        }
+    }
+
     static Set<String> allDescendants(Map<String, Set<String>> graph, String leaf_id) {
         Set<String> seen = new HashSet<String>();
         List<String> stack = new ArrayList<String>();
@@ -578,39 +591,6 @@ public class GraphLog {
         return graph;
     }
 
-    // IMPORTANT: The "reversed" graph is what you need to traverse to build the DAG.
-    // Take a graph which maps node ids -> set of parents and create a graph
-    // which maps node ids -> set of children from it.
-    static Map<String, Set<String>> get_reversed_graph(Map<String, Set<String>> graph,
-                                                       Set<String> root_ids,
-                                                       OrdinalMapper ordinals) {
-        Map<String, Set<String>> reversed_graph = new HashMap<String, Set<String>>();
-
-        for(Map.Entry<String, Set<String>> entry : graph.entrySet()) {
-            String key = entry.getKey();
-            Set<String> parents = entry.getValue();
-
-            for (String parent : parents) {
-                if (!graph.keySet().contains(parent)) {
-                    continue;
-                }
-
-                if (!reversed_graph.keySet().contains(parent)) {
-                    reversed_graph.put(parent, new HashSet<String>());
-                }
-
-                //# i.e. is a child of the parent
-                reversed_graph.get(parent).add(key);
-            }
-
-            //# For leaf nodes.
-            if (!reversed_graph.keySet().contains(key)) {
-                reversed_graph.put(key, new HashSet<String>());
-            }
-        }
-        return reversed_graph;
-    }
-
     // DCI: fix name
     public static ParentInfo getParentInfo(Map<String, Set<String>> graph) {
         Set<String> allParents = new HashSet<String>();
@@ -645,107 +625,96 @@ public class GraphLog {
             }
         }
 
-        return new ParentInfo(allParents, unseenParents, rootIds);
+        return new ParentInfo(allParents, rootIds);
     }
 
-    // LATER: Grok harder. Possible to get rid of this?
-    //# Breadth first traversal of the graph to force creation of ordinals
-    //# in an order that won't freak out the drawing code.
-    static void traverse_in_plausible_commit_order(Map<String, Set<String>> reversed_graph,
-                                                   Set<String> root_ids,
-                                                   OrdinalMapper ordinals) {
+    // Graph maps from child -> list of parents (i.e. like a commit DAG).
+    static void set_ordinals_in_plausible_commit_order(Map<String, Set<String>> graph,
+                                                       OrdinalMapper ordinals) {
 
-        LinkedList<String> queue = new LinkedList<String>();
-        queue.addAll(root_ids);
-        Collections.sort(queue);
+        // Make a list of all  parent to child edges.
+        // Each edge represents the constraint "the ordinal of this parent is less
+        // than the ordinal of this child".
+        List<GraphEdge> edges = new ArrayList<GraphEdge>();
+        Set<String> leaf_nodes = new HashSet<String>();
+        for (Map.Entry<String, Set<String>> entry : graph.entrySet()) {
+            String child = entry.getKey();
+            Set<String> parents = entry.getValue();
+            if (parents.size() == 0) {
+                leaf_nodes.add(child);
+                continue;
+            }
 
-        //# Force creation of ordinals for root nodes
-        for (String id_value : queue) {
-            ordinals.ordinal(id_value);
+            for (String parent : parents) {
+                edges.add(new GraphEdge(parent, child));
+            }
         }
 
-        Set<String> seen = new HashSet<String>();
-        while (queue.size() > 0) {
-            String id_value = queue.removeFirst();
-            if (seen.contains(id_value)) {
-                continue;
+        while (!edges.isEmpty()) {
+            // Find edges to vertices that no other edges reference.
+            Set<String> candidates = new HashSet<String>();
+            Set<String> referenced = new HashSet<String>();
+            for (GraphEdge edge : edges) {
+                candidates.add(edge.mFrom);
+                referenced.add(edge.mTo);
             }
-            seen.add(id_value);
+            // i.e. "the parents which no remaining children reference."
+            // Implies must have lower ordinals than all unprocessed children.
+            candidates.removeAll(referenced);
+            assert_(candidates.size() > 0);
 
-            // Tricky: Sort to force creation of new ordinals in "natural order"
-            //         of child ids.
-            List<String> child_ids = new ArrayList<String>();
-            child_ids.addAll(reversed_graph.get(id_value));
-            Collections.sort(child_ids);
+            List<String> sorted = new ArrayList<String>(candidates);
+            Collections.sort(sorted);
+            for (String candidate : sorted) {
+                // Set the ordinal.
+                ordinals.ordinal(candidate);
+                // Remove any unreferenced edges from the list.
+                 // LATER: fix crappy code. do in one pass outside loop.
+                remove_edges(edges, candidate);
+            }
+        }
 
-            for (String child_id : child_ids) {
-                ordinals.ordinal(child_id);
-                queue.add(child_id);
-            }
+        // Set the ordinals for nodes which have no children.
+        List<String> sorted_leaf_nodes = new ArrayList<String>(leaf_nodes);
+        Collections.sort(sorted_leaf_nodes);
+        for (String leaf_node : sorted_leaf_nodes) {
+            ordinals.ordinal(leaf_node);
         }
     }
 
     // Create a list of graph nodes in the correct order so that
     // they can be used to draw the graph with the ascii() function.
     static List<DAGNode> build_dag(Map<String, Set<String>> graph, String null_rev_name) {
-        ParentInfo parentInfo = getParentInfo(graph);
-        Set<String> all_parents = parentInfo.mAllParents;
-        Set<String> unseen_parents = parentInfo.mUnseenParents;
-        Set<String> root_ids = parentInfo.mRootIds;
+        assert_(!graph.keySet().contains(null_rev_name));
 
         OrdinalMapper ordinals = new OrdinalMapper();
 
-        Map<String, Set<String>> reversed_graph = get_reversed_graph(graph, root_ids, ordinals);
+        int null_rev_ordinal = ordinals.ordinal(null_rev_name);
 
         //# Important: This sets the graph ordinals correctly.
-        traverse_in_plausible_commit_order(reversed_graph, root_ids, ordinals);
-
-        int null_rev_ordinal = ordinals.ordinal(null_rev_name);
-
-        LinkedList<String> queue = new LinkedList<String>();
-        queue.addAll(root_ids);
-        Collections.sort(queue);
+        set_ordinals_in_plausible_commit_order(graph, ordinals);
 
         List<DAGNode> dag = new ArrayList<DAGNode>();
-        Set<String> seen = new HashSet<String>();
-        while (queue.size() > 0) {
-            String id_value = queue.removeFirst();
-            if (seen.contains(id_value)) {
-                continue;
-            }
-            seen.add(id_value);
+        for(Map.Entry<String, Set<String>> entry : graph.entrySet()) {
+            String child = entry.getKey();
 
-            List<Integer> parents = new ArrayList<Integer>();
-            for (String parent_id : graph.get(id_value)) {
-                // This accesses ordinals in parent order, but the DAG requires them
-                // to be in child order.  That's why we need the call to
-                // traverse_in_plausible_commit_order above.
-                parents.add(ordinals.ordinal(parent_id));
-            }
-            Collections.sort(parents);
-
-            if (parents.size() == 1 && parents.get(0) == null_rev_ordinal) {
-                // Causes ascii() not to draw a line down from the 'o'.
-                parents = new ArrayList<Integer>();
+            List<Integer> parent_ids = new ArrayList<Integer>();
+            for (String parent : entry.getValue()) {
+                parent_ids.add(ordinals.ordinal(parent));
             }
 
-            dag.add(new DAGNode(id_value, ordinals.ordinal(id_value), parents));
-
-            if (!reversed_graph.keySet().contains(id_value)) {
-                continue;
+            if (parent_ids.size() == 1 && parent_ids.get(0) == null_rev_ordinal) {
+                // Causes ascii() not to draw a line down from the 'o'.
+                parent_ids = new ArrayList<Integer>();
             }
 
-            // Tricky: Must traverse in order or the dag nodes won't be right.
-            List<String> child_ids = new ArrayList<String>();
-            child_ids.addAll(reversed_graph.get(id_value));
-            Collections.sort(child_ids);
-
-            for (String child_id : child_ids) {
-                queue.add(child_id);
-            }
+            dag.add(new DAGNode(child, ordinals.ordinal(child), parent_ids));
         }
 
-        Collections.reverse(dag);
+        //ordinals.dump();
+
+        // IMPORTANT: Sort in order of descending ordinal otherwise drawing code won't work.
+        Collections.sort(dag);
         return dag;
     }
 
diff --git a/src/fniki/wiki/WikiApp.java b/src/fniki/wiki/WikiApp.java
--- a/src/fniki/wiki/WikiApp.java
+++ b/src/fniki/wiki/WikiApp.java
@@ -160,9 +160,9 @@ public class WikiApp implements ChildCon
             return mState;
         }
 
-        System.err.println(String.format("[%s] => [%s]",
-                                         mState.getClass().getName(),
-                                         container.getClass().getName()));
+        // System.err.println(String.format("[%s] => [%s]",
+        //                                  mState.getClass().getName(),
+        //                                  container.getClass().getName()));
         if (mState != null && mState instanceof ModalContainer) {
             ((ModalContainer)mState).exited();
         }
@@ -200,9 +200,9 @@ public class WikiApp implements ChildCon
             // Handle transitions out of modal UI states.
             ModalContainer state = (ModalContainer)mState;
             if (action.equals("finished")) {
-                System.err.println("finished");
+                //System.err.println("finished");
                 if (!state.isFinished()) {
-                    System.err.println("canceling");
+                    //System.err.println("canceling");
                     state.cancel();
                     try {
                         Thread.sleep(250); // HACK
@@ -212,7 +212,7 @@ public class WikiApp implements ChildCon
                 }
                 // No "else" because it might have finished while sleeping.
                 if (state.isFinished()) {
-                    System.err.println("finished");
+                    //System.err.println("finished");
                     setState(request, mWikiContainer);
                     return mGotoRedirect;
                 }
@@ -229,7 +229,7 @@ public class WikiApp implements ChildCon
         }
 
         // DCI: Fix. Use a hashmap of paths -> instances for static paths
-        System.err.println("WikiApp.routeRequest: " + path);
+        //System.err.println("WikiApp.routeRequest: " + path);
         if (path.equals("fniki/config")) {
             return setState(request, mSettingConfig);
         } else if (path.equals("fniki/submit")) {
@@ -257,7 +257,7 @@ public class WikiApp implements ChildCon
     public synchronized String handle(WikiContext context) throws ChildContainerException {
         try {
             ChildContainer childContainer = routeRequest(context);
-            System.err.println("Request routed to: " + childContainer.getClass().getName());
+            //System.err.println("Request routed to: " + childContainer.getClass().getName());
 
             return mFilter.filter(childContainer.handle(context));
         } catch (ChildContainerException cce) {
@@ -452,15 +452,15 @@ public class WikiApp implements ChildCon
         // Hacks to find bugs
         if (!containerRelativePath.startsWith("/")) {
             containerRelativePath = "/" + containerRelativePath;
-            System.err.println("WikiApp.makeLink -- added leading '/': " +
-                               containerRelativePath);
+            // System.err.println("WikiApp.makeLink -- added leading '/': " +
+            //                    containerRelativePath);
             (new RuntimeException("find missing /")).printStackTrace();
 
         }
         String full = containerPrefix() + containerRelativePath;
         while (full.indexOf("//") != -1) {
-            System.err.println("WikiApp.makeLink -- fixing  '//': " +
-                               full);
+            // System.err.println("WikiApp.makeLink -- fixing  '//': " +
+            //                    full);
             full = full.replace("//", "/");
             (new RuntimeException("find extra /")).printStackTrace();
         }
diff --git a/src/fniki/wiki/child/AsyncTaskContainer.java b/src/fniki/wiki/child/AsyncTaskContainer.java
--- a/src/fniki/wiki/child/AsyncTaskContainer.java
+++ b/src/fniki/wiki/child/AsyncTaskContainer.java
@@ -147,17 +147,17 @@ public abstract class AsyncTaskContainer
     protected void invokeWorkerMethod() {
         boolean failed = true;
         try {
-            System.err.println("Task started: " + mState);
+            //System.err.println("Task started: " + mState);
             PrintStream log = new PrintStream(mBuffer, true);
             mArchiveManager.setDebugOutput(log);
-           failed = !doWork(new PrintStream(mBuffer, true));
+            failed = !doWork(new PrintStream(mBuffer, true));
         } catch (Exception e) {
             e.printStackTrace();
             failed = true;
         } finally {
             synchronized (this) {
                 mState = failed ? STATE_FAILED : STATE_SUCCEEDED;
-                System.err.println("Task finished: " + mState);
+                //System.err.println("Task finished: " + mState);
                 mThread = null;
             }
         }
diff --git a/src/fniki/wiki/child/LoadingVersionList.java b/src/fniki/wiki/child/LoadingVersionList.java
--- a/src/fniki/wiki/child/LoadingVersionList.java
+++ b/src/fniki/wiki/child/LoadingVersionList.java
@@ -1,4 +1,4 @@
-/* A UI subcomponent to load a list of other versions of this wiki via FMS.
+/* A UI subcomponent to load a list of other versions of this wiki via NNTP.
  *
  * Copyright (C) 2010, 2011 Darrell Karbott
  *
@@ -97,19 +97,19 @@ public class LoadingVersionList extends 
             switch (getState()) {
             case STATE_WORKING:
                 showBuffer = true;
-                title = "Loading Wiki Version Info from FMS";
+                title = "Loading Wiki Version Info from Board";
                 cancelTitle = "Cancel";
                 break;
             case STATE_WAITING:
                 // Shouldn't hit this state.
                 showBuffer = false;
-                title = "Load Wiki Version Info from FMS";
+                title = "Load Wiki Version Info from Board";
                 confirmTitle = "Load";
                 cancelTitle = "Cancel";
                 break;
             case STATE_SUCCEEDED:
                 showBuffer = true;
-                title = "Loaded Wiki Version Info from FMS";
+                title = "Loaded Wiki Version Info from Board";
                 confirmTitle = null;
                 cancelTitle = "Done";
                 break;
@@ -132,7 +132,7 @@ public class LoadingVersionList extends 
             body.println("</head><body>\n");
 
             body.println("<h3>" + escapeHTML(title) + "</h3>");
-            body.println(String.format("wikiname:%s<br>FMS group:%s<p>",
+            body.println(String.format("wikiname: %s<br>board: %s<p>",
                                     escapeHTML(context.getString("wikiname", "NOT_SET")),
                                     escapeHTML(context.getString("fms_group", "NOT_SET"))));
 
@@ -267,9 +267,10 @@ public class LoadingVersionList extends 
         }
     }
 
-    public synchronized String getRevisionGraphHtml(List<FMSUtil.BISSRecord> records)
+    public synchronized String getRevisionGraphHtml(PrintStream textLog, List<FMSUtil.BISSRecord> records)
         throws IOException {
 
+        textLog.println("Building graph...");
         // Build a list of revision graph edges from the NNTP notification records.
         List<GraphLog.GraphEdge> edges = new ArrayList<GraphLog.GraphEdge>();
         Map<String, List<FMSUtil.BISSRecord>> lut = new HashMap<String, List<FMSUtil.BISSRecord>>();
@@ -347,8 +348,10 @@ public class LoadingVersionList extends 
                     lines.add(String.format("date: %s", reference.mDate)); // Reliable?
                 }
                 String[] parentsAgain  = getParentVersions(references.get(0));
+
+                lines.add(escapeHTML(String.format("parent: [%s]", parentsAgain[0])));
                 if (parentsAgain.length == 2) {
-                    lines.add(escapeHTML(String.format("rebased: %s (UNVERIFIED!)", parentsAgain[1])));
+                    lines.add(escapeHTML(String.format("rebased: [%s] (UNVERIFIED!)", parentsAgain[1])));
                 }
 
                 lines.add("");
@@ -358,6 +361,8 @@ public class LoadingVersionList extends 
         }
         out.write("</pre>\n");
         out.flush();
+        textLog.println("Finished.");
+
         return out.toString();
     }
 
@@ -366,8 +371,7 @@ public class LoadingVersionList extends 
             mListHtml = new StringBuilder();
         }
         try {
-            out.println("Reading versions from FMS...");
-            String graphHtml = getRevisionGraphHtml(mArchiveManager.getRecentWikiVersions(out));
+            String graphHtml = getRevisionGraphHtml(out, mArchiveManager.getRecentWikiVersions(out));
             synchronized (this) {
                 mListHtml.append(graphHtml);
             }
diff --git a/test/graphs/branch0.txt b/test/graphs/branch0.txt
new file mode 100644
--- /dev/null
+++ b/test/graphs/branch0.txt
@@ -0,0 +1,12 @@
+c0 b0
+b0 a0
+a0 Z
+
+c1 b1
+b1 a1
+a1 Z
+
+d c0
+d c1
+
+
diff --git a/test/graphs/branch1.txt b/test/graphs/branch1.txt
new file mode 100644
--- /dev/null
+++ b/test/graphs/branch1.txt
@@ -0,0 +1,12 @@
+c0 b0
+b0 a0
+a0 Z0
+
+c1 b1
+b1 a1
+a1 Z1
+
+d c0
+d c1
+
+
diff --git a/test/graphs/empty.txt b/test/graphs/empty.txt
new file mode 100644
--- /dev/null
+++ b/test/graphs/empty.txt
@@ -0,0 +1,1 @@
+# empty on purpose
diff --git a/test/graphs/linear.txt b/test/graphs/linear.txt
new file mode 100644
--- /dev/null
+++ b/test/graphs/linear.txt
@@ -0,0 +1,4 @@
+c b
+b a
+a Z
+
diff --git a/test/graphs/one_edge.txt b/test/graphs/one_edge.txt
new file mode 100644
--- /dev/null
+++ b/test/graphs/one_edge.txt
@@ -0,0 +1,1 @@
+b a
diff --git a/test/graphs/sethcg.34194b24d92a.bug.txt b/test/graphs/sethcg.34194b24d92a.bug.txt
new file mode 100644
--- /dev/null
+++ b/test/graphs/sethcg.34194b24d92a.bug.txt
@@ -0,0 +1,40 @@
+# Used to diagnose a bug reported in the wild in version: 34194b24d92a.
+08c26c3ba8625fca 0b492241db429a63
+
+e76f3653fa8464c6 14d0b189ba3c9eb6
+
+0b492241db429a63 64d55b97a35bc07d
+
+14d0b189ba3c9eb6 26c56f1461beae7f
+
+64d55b97a35bc07d e76f3653fa8464c6
+
+64d55b97a35bc07d 7bbb7de37f88f49e
+
+26c56f1461beae7f 551458011a230904
+
+7bbb7de37f88f49e 0bd59cc84b2043ed
+
+551458011a230904 0bd59cc84b2043ed
+
+0bd59cc84b2043ed 0f5a714975290de4
+
+# --- dag ---
+# o  id: 08c26c3ba8625fca (10)
+# |
+# o  id: 0b492241db429a63 (9)
+# |
+# o    id: 64d55b97a35bc07d (8)
+# |\
+# | o  id: e76f3653fa8464c6 (7)
+# | |
+# | o  id: 14d0b189ba3c9eb6 (6)
+# | |
+# | o  id: 26c56f1461beae7f (5)
+# | |
+# o |  id: 7bbb7de37f88f49e (4)
+# | |
+# | o  id: 551458011a230904 (3)
+# |/
+# o  id: 0bd59cc84b2043ed (2)
+# |