site

(djk)
2011-05-14: Wrote script to insert releases into Freenet.

Wrote script to insert releases into Freenet.

diff --git a/doc/latest_release.txt b/doc/latest_release.txt
new file mode 100644
diff --git a/freesite.cfg b/freesite.cfg
new file mode 100644
--- /dev/null
+++ b/freesite.cfg
@@ -0,0 +1,19 @@
+# freesite.cfg used by fn-putsite (called from cut_release.py)
+[default]
+# Human readable site name.
+# MUST match value in cut_release.py
+site_name = jfniki_releases_tst000
+# Directory to insert from relative to the repository root.
+site_dir = release/generated_freesite
+# Optional external file to load the site key from, relative
+# to the directory your .infocalypse/infocalypse.ini file
+# is stored in. This file should contain ONLY the SSK insert
+# key up to the first slash.
+#
+# If this value is not set the insert SSK for the repo is
+# used.
+#site_key_file = example_freesite_key.txt
+#
+# Optional file to display by default.  If this is not
+# set index.html is used.
+#default_file = index.html
diff --git a/release/cut_release.py b/release/cut_release.py
new file mode 100755
--- /dev/null
+++ b/release/cut_release.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python
+
+""" Script to insert jfniki releases into Freenet.
+
+    Copyright (C) 2011 Darrell Karbott
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public
+    License as published by the Free Software Foundation; either
+    version 2.0 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+    Author: djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks
+"""
+
+# This script isn't really for public consumption.
+#
+# It assumes you have hg infocalypse installed and configured.
+#
+# BUG: ANONYMITY ISSUE: This script currently leaks the *nix user id
+#      into the inserted .tgz and .jar files.
+#      DO NOT RUN IT if this concerns you.
+#
+import os
+import shutil
+import subprocess
+import tarfile
+
+from binascii import hexlify
+
+from mercurial import ui, hg, commands
+
+from insert_files import insert_files
+from minimalfms import get_connection, send_msgs
+
+############################################################
+
+# CAUTION: This directory is recursively deleted!
+STAGING_DIR = '/tmp/staging'
+
+FCP_HOST = '127.0.0.1'
+FCP_PORT = 19481
+
+FMS_HOST = '127.0.0.1'
+FMS_PORT =  11119
+
+FMS_ID = 'djk'
+FMS_GROUP = 'test'
+
+# REQUIRES: must match name in freesite.cfg. LATER: fix.
+SITE_NAME = 'jfniki_releases_tst000'
+
+PUBLIC_SITE = "USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw," + \
+              "2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,AQACAAE/%s/%%d/" % \
+              SITE_NAME
+
+############################################################
+# Indexes of refereneced USK sites
+
+FREENET_DOC_WIKI_IDX = 30
+FNIKI_IDX = 81
+REPO_IDX = 13
+
+############################################################
+
+THIS_FILES_DIR = os.path.abspath(os.path.dirname(__file__))
+
+REPO_DIR = os.path.abspath(os.path.join(THIS_FILES_DIR, '..'))
+FREENET_JAR = os.path.abspath(os.path.join(THIS_FILES_DIR, '../alien/libs/freenet.jar'))
+RELEASE_NOTES = os.path.abspath(os.path.join(THIS_FILES_DIR, '../doc/latest_release.txt'))
+INDEX_HTML = os.path.abspath(os.path.join(THIS_FILES_DIR, 'generated_freesite/index.html'))
+INDEX_HTML_TEMPLATE = os.path.abspath(os.path.join(THIS_FILES_DIR, 'index_template.html'))
+FMS_MESSAGE_TEMPLATE = os.path.abspath(os.path.join(THIS_FILES_DIR, 'fms_message_template.txt'))
+
+############################################################
+
+def stage_release():
+    # LATER: check for uncommitted changes
+    ui_ = ui.ui()
+    repo = hg.repository(ui_, REPO_DIR)
+
+    # Get current head.
+    heads = [hexlify(repo[head].node())[:12] for head in repo.heads()]
+    assert len(heads) == 1 # Don't try to handle multiple heads
+    head = heads[0]
+
+    jar_name = "jfniki.%s.jar" % head
+    tgz_name = "jfniki.%s.tgz" % head
+    export_dir_name = "jfniki.%s" % head
+
+    tgz_file_name = "%s/%s" % (STAGING_DIR, tgz_name)
+    jar_file_name = "%s/%s" % (STAGING_DIR, jar_name)
+
+    # scrub staging directory
+    try:
+        shutil.rmtree(STAGING_DIR)
+    except:
+        pass
+
+    os.makedirs(STAGING_DIR)
+
+    # dump clean source to staging
+    dest =  "%s/%s" % (STAGING_DIR, export_dir_name)
+
+    # TRICKY: Had to put a print in the implementation of command.archive to figure
+    #         out required default opts.
+    # {'rev': '', 'no_decode': None, 'prefix': '', 'exclude': [], 'include': [], 'type': ''}
+    commands.archive(ui_, repo, dest,
+                     rev='', no_decode=None, prefix='', exclude=[], include=[], type='')
+
+
+    # remove origin tarballs  to save space
+    shutil.rmtree("%s/alien/origins/" % dest)
+
+    # tar up source
+    tgz_file = tarfile.open(tgz_file_name, 'w:gz')
+
+    #def reset(tarinfo):
+    #    tarinfo.uid = tarinfo.gid = 0
+    #    tarinfo.uname = tarinfo.gname = "root"
+    #    return tarinfo
+    # LATER: Use line after upgrading python. Keeps uid, gid, uname out of tar.
+    # tgz_file.add("%s/%s" % (STAGING_DIR, export_dir_name),  filter=reset) # python 2.7
+    tgz_file.add("%s/%s" % (STAGING_DIR, export_dir_name))
+    tgz_file.close()
+
+    # cp freenet.jar required for build
+    os.makedirs("%s/%s/%s" % (STAGING_DIR, export_dir_name, "alien/libs"))
+    shutil.copyfile(FREENET_JAR, "%s/%s/%s" % (STAGING_DIR, export_dir_name, "alien/libs/freenet.jar"))
+
+    # build jar
+    result = subprocess.check_call(["/usr/bin/ant",
+                                    "-buildfile",
+                                    "%s/%s/build.xml" % (STAGING_DIR, export_dir_name)])
+    print "ant result code: %d" % result
+
+    # copy jar with name including the hg rev.
+    shutil.copyfile("%s/%s/%s" % (STAGING_DIR, export_dir_name, "build/jar/jfniki.jar"), jar_file_name)
+
+    print
+    print "SUCCESSFULLY STAGED:"
+    print jar_file_name
+    print tgz_file_name
+    print
+    return (head, jar_file_name, tgz_file_name)
+
+
+def simple_templating(text, substitutions):
+    for variable in substitutions:
+        text = text.replace(variable, str(substitutions[variable]))
+    assert text.find("__") == -1 # Catch unresolved variables
+    return text
+
+def latest_site_index(repo): # C&P: wikibot.py
+    """ Read the latest known freesite index out of the hg changelog. """
+    for tag, dummy in reversed(repo.tagslist()):
+        if tag.startswith('I_'):
+            return int(tag.split('_')[1])
+    return -1
+
+def tag_site_index(ui_, repo, index=None): # C&P: wikibot.py
+    """ Tag the local repository with a freesite index. """
+    if index is None:
+        index = latest_site_index(repo) + 1
+    commands.tag(ui_, repo, 'I_%i' % index)
+
+############################################################
+# ATTRIBUTION:
+# http://wiki.python.org/moin/EscapingHtml
+HTML_ESCAPE_TABLE = {
+    "&": "&",
+    '"': """,
+    "'": "'",
+    ">": ">",
+    "<": "<"}
+
+def html_escape(text):
+    """Produce entities within text."""
+    return "".join(HTML_ESCAPE_TABLE.get(c,c) for c in text)
+
+############################################################
+
+def update_html(head, jar_chk, tgz_chk):
+    ui_ = ui.ui()
+    repo = hg.repository(ui_, REPO_DIR)
+    site_usk = PUBLIC_SITE % (latest_site_index(repo) + 1)
+
+    html = simple_templating(open(INDEX_HTML_TEMPLATE).read(),
+                             {'__HEAD__':head,
+                              '__JAR_CHK__': jar_chk,
+                              '__SRC_CHK__': tgz_chk,
+                              '__RELEASE_NOTES__' : html_escape(open(RELEASE_NOTES).read()),
+                              '__SITE_USK__': site_usk,
+                              '__INDEX_FDW__': FREENET_DOC_WIKI_IDX,
+                              '__INDEX_FNIKI__': FNIKI_IDX,
+                              '__INDEX_REPO__': REPO_IDX,
+                              })
+
+    updated = open(INDEX_HTML, 'w')
+    updated.write(html)
+    updated.close()
+
+    commit_msg = "index.html:%s" % head
+    commands.commit(ui_, repo, pat = (INDEX_HTML, ),
+                    include = [], addremove = None, close_branch = None,
+                    user = '', date = '',
+                    exclude = [], logfile = '',
+                    message = commit_msg)
+    tag_site_index(ui_, repo)
+
+# Insert the latest tagged freesite version into freenet.
+# NOTE: depends on state set in freesite.cfg
+# REQUIRES: 'I_<num>' tag exists in the repo.
+# REQUIRES: hg infocalypse is installed and configured.
+def insert_freesite():
+    print REPO_DIR
+    ui_ = ui.ui()
+    repo = hg.repository(ui_, REPO_DIR)
+    target_index = latest_site_index(repo)
+    assert target_index >= 0
+
+    # DCI: Test. does fn-putsite set error code on failure?
+    subprocess.check_call(["/usr/bin/hg",
+                           "-R",
+                           REPO_DIR,
+                           "fn-putsite",
+                           "--index",
+                           str(target_index)])
+
+    # LATER: Do better. Parse request URI from output.
+    return PUBLIC_SITE % target_index, target_index
+
+def send_fms_notification(site_uri, target_index, head, jar_chk, tgz_chk):
+
+    connection = get_connection(FMS_HOST, FMS_PORT, FMS_ID)
+
+    msg = simple_templating(open(FMS_MESSAGE_TEMPLATE).read(),
+                             {'__HEAD__':head,
+                              '__JAR_CHK__': jar_chk,
+                              '__SRC_CHK__': tgz_chk,
+                              '__SITE_USK__' : site_uri,
+                              '__RELEASE_NOTES__' : open(RELEASE_NOTES).read(),
+                              })
+
+    send_msgs(connection,
+              ((FMS_ID, FMS_GROUP,
+                "jfniki releases #%d" % target_index,
+                msg),))
+
+    print "Sent FMS notification to: %s" % FMS_GROUP
+
+def release():
+    print
+    print "If you haven't read the warning about ANONYMITY ISSUES"
+    print "with this script, now might be a good time to hit Ctrl-C."
+    print
+    print
+    head, jar_file, tgz_file = stage_release()
+    jar_chk, tgz_chk = insert_files(FCP_HOST, FCP_PORT, [jar_file, tgz_file])
+    update_html(head, jar_chk, tgz_chk)
+    site_uri, target_index = insert_freesite()
+    send_fms_notification(site_uri, target_index, head, jar_chk, tgz_chk)
+    print
+    print "Success!"
+
+
+if __name__ == "__main__":
+    release()
diff --git a/release/fcpclient.py b/release/fcpclient.py
new file mode 100644
--- /dev/null
+++ b/release/fcpclient.py
@@ -0,0 +1,890 @@
+# REDFLAG: There are changes here that haven't been pushed back into main repo.
+""" Simplified client interface for common FCP request.
+
+    Copyright (C) 2008 Darrell Karbott
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public
+    License as published by the Free Software Foundation; either
+    version 2.0 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+    Author: djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks
+"""
+
+import mimetypes, os, re
+
+from fcpconnection import FCPConnection, IDataSource, READ_BLOCK, \
+     MinimalClient, PolledSocket, FCPError, sha1_hexdigest
+
+from fcpmessage import GETNODE_DEF, GENERATE_SSK_DEF, \
+     GET_REQUEST_URI_DEF, GET_DEF, \
+     PUT_FILE_DEF, PUT_REDIRECT_DEF,  PUT_COMPLEX_DIR_DEF
+
+# Defaults for commonly used FCP parameters.
+FCP_PARAM_DEFAULTS = {
+    'ReturnType':'direct',
+    'IgnoreDS':False,
+    'MaxRetries':3,
+    'DontCompress':True, # Hmmmm...
+    'Verbosity':1023 # MUST set this to get progress messages.
+}
+
+#-----------------------------------------------------------#
+# file_info helper functions
+#-----------------------------------------------------------#
+
+def get_file_infos(directory, forced_mime_type=None, accept_regex = None):
+    """ Traverse a directory and return a list of file information
+        tuples which is suitable for use by
+        FCPClient.put_complex_dir().
+
+        forced_mime_type determines the value of
+        the mime_type field in the returned tuples.
+
+        If acceptRegex is not None only files which match
+        it are added.
+
+        TUPLE FORMAT:
+        (name, length, mime_type, full_path)
+    """
+
+    def walk_visitor(file_info, dirname, names):
+        """ Function scope visitor implementation passed to os.path.walk.
+        """
+
+        for name in names:
+            full_name = os.path.join(dirname, name)
+            if os.path.isfile(full_name):
+                base = file_info[0]
+                local_path = full_name.replace(base, '')
+                # REDFLAG: More principled way to do this?
+                # Fix slashes on windows.
+                local_path = local_path.replace('\\', '/') 
+                if file_info[2] and not file_info[2].match(local_path):
+                    # Skip files rejected by the regex
+                    continue
+
+                file_info[1].append((local_path,
+                                     os.path.getsize(full_name),
+                                     forced_mime_type,
+                                     full_name))
+    if directory[-1] != os.path.sep:
+        # Force trailing path separator.
+        directory += os.path.sep
+    file_info = (directory, [], accept_regex) #REDFLAG: bad variable name
+    os.path.walk(directory, walk_visitor, file_info)
+    return file_info[1]
+
+def total_length(file_infos):
+    """ Returns the sum of the file lengths in file_info list. """
+
+    total = 0
+    for info in file_infos:
+        total += info[1]
+    return total
+
+def set_index_file(file_infos, file_name):
+    """ Move the tuple with the name file_name to the front of
+        file_infos so that it will be used as the index.
+    """
+    index = None
+    for info in file_infos: # hmmm... faster search?
+        if info[0] == file_name:
+            index = info
+            break
+
+    if index is None:
+        raise ValueError("No file named: %s" % file_name)
+
+    file_infos.remove(index)
+    file_infos.insert(0, index)
+
+def sort_file_infos(file_infos):
+    """ Helper function forces file infos into a fixed order.
+
+        Note: Doesn't move the first entry.
+    """
+
+    if len(file_infos) < 3:
+        return file_infos
+    rest = file_infos[1:]
+    rest.sort()
+
+    return file_infos[:1] + rest
+
+class FileInfoDataSource(IDataSource):
+    """ IDataSource which concatenates files in a list of
+        file infos into a contiguous data stream.
+
+        Useful for direct ClientPutComplexDir requests.
+    """
+
+    MSG_LENGTH_MISMATCH = "Upload bytes doesn't match sum of " \
+                          + "lengths in file_infos. Did the files " \
+                          + "change during uploading?"
+
+    def __init__(self, file_infos):
+        IDataSource.__init__(self)
+        assert file_infos
+        self.infos = file_infos
+        self.total_length = total_length(file_infos)
+        self.running_total = 0
+        self.chunks = None
+        self.input_file = None
+
+    def data_generator(self, infos):
+        """ INTERNAL: Returns a generator which yields the concatenated
+            data from all the file infos.
+        """
+
+        for info in infos:
+            #print "FileInfoDataSource.GEN -- opening", info[3]
+            self.input_file = open(info[3], 'rb')
+            while True:
+                raised = True
+                try:
+                    data = self.input_file.read(READ_BLOCK)
+                    #print "FileInfoDataSource.GEN -- read:", len(data)
+                    raised = False
+                finally:
+                    # Note: Wacky control flow because you can't yield
+                    #       from a finally block
+                    if raised or data is None:
+                        #print "FileInfoDataSource.GEN -- closing", info[3]
+                        self.input_file.close()
+                        self.input_file = None
+                if not data:
+                    break
+                self.running_total += len(data)
+                if self.running_total > self.total_length:
+                    raise IOError(self.MSG_LENGTH_MISMATCH)
+                #print "FileInfoDataSource.GEN -- yeilding", len(data)
+                yield data
+
+        if self.running_total != self.total_length:
+            raise IOError(self.MSG_LENGTH_MISMATCH)
+
+        yield None
+        return
+
+    def initialize(self):
+        """ IDataSource implementation. """
+        #print "FileInfoDataSource.initialize -- called"
+        assert self.chunks is None
+        self.chunks = self.data_generator(self.infos)
+
+    def data_length(self):
+        """ IDataSource implementation. """
+        #print "FileInfoDataSource.data_length -- ", self.total_length
+        return self.total_length
+
+    def release(self):
+        """ IDataSource implementation. """
+        #print "FileInfoDataSource.release -- called"
+        if not self.chunks is None:
+            self.chunks = None
+        if not self.input_file:
+            self.input_file.close()
+            self.input_file = None
+
+    def read(self):
+        """ IDataSource implementation. """
+        #print "FileInfoDataSource.read -- called"
+        assert not self.chunks is None
+        if self.chunks:
+            ret = self.chunks.next()
+            if ret is None:
+                self.chunks = None
+                #print "FileInfoDataSource.read -- returned None"
+                return None
+            #print "FileInfoDataSource.read -- returned:", len(ret)
+            return ret
+        #print "FileInfoDataSource.read(1) -- returned None, \
+        #      SHOULD NEVER HAPPEN"
+        return None
+
+
+
+
+#-----------------------------------------------------------#
+# Key classification and manipulation helper functions
+#-----------------------------------------------------------#
+
+# REDFLAG:  Use a common regex?  Not sure that would cut loc...
+USK_FILE_REGEX = re.compile('(freenet:)?(USK).*/((\\-)?[0-9]+[0-9]*)$')
+def is_usk_file(uri):
+    """ Returns True if uri points to a single file, False otherwise. """
+    return bool(USK_FILE_REGEX.match(uri))
+
+USK_CONTAINER_REGEX = re.compile('(freenet:)?(USK).*/((\\-)?[0-9]+[0-9]*)/$')
+def is_usk_container(uri):
+    """ Return True if uri is USK uri which points to a Freenet
+        Container, False otherwise.
+    """
+    return bool(USK_CONTAINER_REGEX.match(uri))
+
+KEY_TYPE_REGEX = re.compile('(freenet:)?(?P<key_type>CHK|KSK|SSK|USK)@')
+def key_type(uri):
+    """ Returns the key type. """
+
+    match = KEY_TYPE_REGEX.match(uri)
+    if not match:
+        raise Exception("Doesn't look like a Freenet URI: %s" % uri)
+    return match.groupdict()['key_type']
+
+def is_chk(uri):
+    """ Returns True if the URI is a CHK key, False otherwise. """
+    return key_type(uri) == 'CHK'
+
+def is_ksk(uri):
+    """ Returns True if the URI is a KSK key, False otherwise. """
+    return key_type(uri) == 'KSK'
+
+def is_ssk(uri):
+    """ Returns True if the URI is a SSK key, False otherwise. """
+    return key_type(uri) == 'SSK'
+
+def is_usk(uri):
+    """ Returns True if the URI is a USK key, False otherwise. """
+    return key_type(uri) == 'USK'
+
+# LATER: fix regex to work for SSKs too.
+VERSION_REGEX = re.compile('(?P<usk>USK)@(.*)/(?P<version>'
+                           + '(\\-)?[0-9]+[0-9]*)(/.*)?')
+def get_version(uri):
+    """ Return the version index of USK.
+
+        Raises ValueError if no version could be extracted.
+    """
+
+    try:
+        version = int(VERSION_REGEX.match(uri).
+                      groupdict()['version'])
+    except:
+        raise ValueError("Couldn't parse a USK or SSK version from: %s" % uri)
+    return version
+
+def get_ssk_for_usk_version(usk_uri, version):
+    """ Return an SSK for a specific version of a USK.
+
+        NOTE:
+        The version in usk_uri is ignored.
+    """
+    match = VERSION_REGEX.match(usk_uri)
+    if not match:
+        raise Exception("Couldn't parse version from USK: %s" % usk_uri)
+
+    return 'SSK' + usk_uri[match.end('usk') : match.start('version') - 1] \
+           + '-' + str(version) + usk_uri[match.end('version'):]
+
+def get_usk_for_usk_version(usk_uri, version, negative = False):
+    """ Return an USK for a specific version of a USK.
+
+        NOTE:
+        The version in usk_uri is ignored.
+        Works for both containers and files.
+    """
+    match = VERSION_REGEX.match(usk_uri)
+    if not match:
+        raise Exception("Couldn't parse version from USK: %s" % usk_uri)
+    if negative and version > 0:
+        version = -1 * version
+    version_str = str(version)
+    if version == 0 and negative:
+        version_str = '-0'
+        # BITCH:
+        # They should have picked some other symbol ('*'?) which doesn't
+        # encourage implementers to jam the version into an integer.
+        # i.e. because you can't represent the version with an integer
+        # because -0 == 0.
+    assert not negative or version_str.find('-') > -1
+
+    return usk_uri[0 : match.start('version')] \
+           + version_str + usk_uri[match.end('version'):]
+
+def is_negative_usk(usk_uri):
+    """ Returns True if usk_uri has a negative version index,
+        False otherwise.
+
+        REQUIRES: usk_uri is a USK key.
+    """
+    match = VERSION_REGEX.match(usk_uri)
+    if not match:
+        raise Exception("Couldn't parse version from USK: %s" % usk_uri)
+    return match.groupdict()['version'].find('-') > -1
+
+def get_negative_usk(usk_uri):
+    """ Return an USK with a negative version index.
+
+        NOTE:
+        Using a negative index causes the FCP server to search
+        harder for later versions in ClientGet requests.
+
+        NOTE:
+        This is a NOP if usk_uri is already negative.
+    """
+    version = get_version(usk_uri)
+    if is_negative_usk(usk_uri):
+        return usk_uri
+
+    return get_usk_for_usk_version(usk_uri, version, True)
+
+def prefetch_usk(client, usk_uri, allowed_redirects = 3,
+                 message_callback = None):
+    """ Force the FCP server to explicitly search for updates
+        to the USK.
+
+        Returns the latest version as an integer or None if
+        no version could be determined.
+
+        This works by sending a negative index value for the USK.
+
+        Note that this can return a version LESS THAN the version
+        in usk_uri.
+    """
+
+    if client.in_params.async:
+        raise ValueError("This function only works synchronously.")
+
+    usk_uri = get_negative_usk(usk_uri)
+    client.reset()
+    callback = client.message_callback
+    return_type = client.in_params.default_fcp_params.get('ReturnType')
+    version = None
+    try:
+        if message_callback:
+            # Install a custom message callback
+            client.message_callback = message_callback
+        client.in_params.default_fcp_params['ReturnType'] = 'none'
+        try:
+            # BUG: HANGS
+            version = get_version(client.get(usk_uri,
+                                             allowed_redirects)[1]['URI'])
+        except FCPError:
+            version = None
+    finally:
+        client.message_callback = callback
+        if return_type:
+            client.in_params.default_fcp_params['ReturnType'] = return_type
+
+    return version
+
+def latest_usk_index(client, usk_uri, allowed_redirects = 1,
+                     message_callback = None):
+    """ Determines the version index of a USK key.
+
+        Returns a (version, data_found) tuple where version
+        is the integer version and data_found is the data_found
+        message for the latest index.
+
+        
+        NOTE:
+        This fetches the key and discards the data.
+        It may take a very long time if you call it for
+        a key which points to a large block of data.
+    """
+
+    if client.in_params.async:
+        raise ValueError("This function only works synchronously.")
+
+    client.reset()
+    callback = client.message_callback
+    #print "PARAMS:", client.in_params.default_fcp_params
+    return_type = client.in_params.default_fcp_params.get('ReturnType')
+    try:
+        if message_callback:
+            # Install a custom message callback
+            client.message_callback = message_callback
+        client.in_params.default_fcp_params['ReturnType'] = 'none'
+        prev = None
+        while True:
+            # Hmmmm... Make sure that the USK has 'settled'
+            next = client.get(usk_uri, allowed_redirects)
+            if prev and next[1]['URI'] == prev[1]['URI']:
+                break
+            prev = next
+    finally:
+        client.message_callback = callback
+        if return_type:
+            client.in_params.default_fcp_params['ReturnType'] = return_type
+
+    return (get_version(prev[1]['URI']), prev)
+
+def get_insert_chk_filename(uri):
+    """ Returns the file name part of CHK@/file_part.ext style
+    CHK insert uris. """
+    assert uri.startswith('CHK@')
+    if not uri.startswith('CHK@/'):
+        if uri != 'CHK@':
+            raise ValueError("Unexpected data after '@'. Maybe you forgot the "
+                             + "'/' before the filename part?")
+        return None
+    return uri[5:]
+
+def set_insert_uri(params, uri):
+    """ INTERNAL: Set the 'URI' and 'TargetFilename' in params,
+    correctly handling CHK@/filename.ext style insert URIs. """
+
+    if is_chk(uri):
+        params['URI'] = 'CHK@'
+        filename = get_insert_chk_filename(uri)
+        if not filename is None:
+            params['TargetFilename'] = filename
+    else:
+        params['URI'] = uri
+
+def get_usk_hash(usk):
+    """ Returns a 12 hex digit hash for a USK which is independant
+    of verison. """
+    return sha1_hexdigest(get_usk_for_usk_version(usk, 0))[:12]
+
+def check_usk_hash(usk, hash_value):
+    """ Returns True if the hash matches, False otherwise. """
+    return (sha1_hexdigest(get_usk_for_usk_version(usk, 0))[:12]
+           == hash_value)
+
+def show_progress(dummy, msg):
+    """ Default message callback implementation. """
+
+    if msg[0] == 'SimpleProgress':
+        print "Progress: (%s/%s/%s)" % (msg[1]['Succeeded'],
+                                        msg[1]['Required'],
+                                        msg[1]['Total'])
+    else:
+        print "Progress: %s" % msg[0]
+
+def parse_progress(msg):
+    """ Parse a SimpleProgress message into a tuple. """
+    assert msg[0] == 'SimpleProgress'
+
+    return (int(msg[1]['Succeeded']),
+            int(msg[1]['Required']),
+            int(msg[1]['Total']),
+            int(msg[1]['Failed']),
+            int(msg[1]['FatallyFailed']),
+            bool(msg[1]['FinalizedTotal'].lower() == 'true'))
+
+class FCPClient(MinimalClient):
+    """ A class to execute common FCP requests.
+
+        This class provides a simplified interface for common FCP commands.
+        Calls are blocking by default.  Set FCPClient.in_params.async = True
+        to run asynchronously.
+
+        You can set FCP parameters using the
+        FCPClient.in_params.default_fcp_params dictionary.
+
+        GOTCHA:
+        Don't set FCPClient.in_params.fcp_params directly. It is reset
+        before most calls so changes to it probably won't have any effect.
+    """
+    def __init__(self, conn):
+        MinimalClient.__init__(self)
+        self.conn = conn
+        self.message_callback = show_progress
+        self.in_params.default_fcp_params = FCP_PARAM_DEFAULTS.copy()
+
+    @classmethod
+    def connect(cls, host, port, socket_class = PolledSocket,
+                state_callback = None):
+        """ Create an FCPClient which owns a new FCPConnection.
+
+            NOTE: If you need multiple FCPClient instances it is
+                  better to explictly create an FCPConnection and
+                  use the FCPClient.__init__() method so that all
+                  instances are multiplexed over the same connection.
+            """
+        sock = None
+        conn = None
+        raised = True
+        try:
+            sock = socket_class(host, port)
+            conn = FCPConnection(sock, True, state_callback)
+            raised = False
+        finally:
+            if raised:
+                if conn:
+                    conn.close()
+                if sock:
+                    sock.close()
+
+        return FCPClient(conn)
+
+
+    def wait_until_finished(self):
+        """ Wait for the current request to finish. """
+        assert self.conn
+        self.conn.wait_for_terminal(self)
+
+    def close(self):
+        """ Close the underlying FCPConnection. """
+        if self.conn:
+            self.conn.close()
+
+    def get_node(self, opennet = False, private = False, volatile = True):
+        """ Query node information by sending an FCP GetNode message. """
+
+        # Hmmmm... I added an 'Identifier' value to request message
+        # even though there's None in the doc. See GETNODE_DEF.
+        # It seems to work.
+        self.reset()
+        self.in_params.definition = GETNODE_DEF
+        self.in_params.fcp_params = {'GiveOpennetRef': opennet,
+                                     'WithPrivate': private,
+                                     'WithVolatile': volatile }
+
+        return self.conn.start_request(self)
+
+    def generate_ssk(self):
+        """ Generate an SSK key pair.
+
+        Returns the SSKKeyPair message.
+        """
+        self.reset()
+        self.in_params.definition = GENERATE_SSK_DEF
+        return self.conn.start_request(self)
+
+    def get_request_uri(self, insert_uri):
+        """ Return the request URI corresponding to the insert URI.
+
+            REQUIRES: insert_uri is a private SSK or USK.
+        """
+
+        if self.in_params.async:
+            raise ValueError("This function only works synchronously.")
+
+        assert is_usk(insert_uri) or is_ssk(insert_uri)
+
+        if is_usk(insert_uri):
+            target = get_ssk_for_usk_version(insert_uri, 0)
+        else:
+            target = insert_uri
+
+        self.reset()
+        self.in_params.definition = GET_REQUEST_URI_DEF
+        self.in_params.fcp_params = {'URI': target,
+                                     'MaxRetries': 1,
+                                     'PriorityClass':1,
+                                     'UploadFrom':'direct',
+                                                'DataLength':9,
+                                     'GetCHKOnly':True}
+        self.in_params.send_data = '012345678' # 9 bytes of data
+        inverted = self.conn.start_request(self)[1]['URI']
+        public = inverted[inverted.find('@') + 1: inverted.find('/')]
+        return insert_uri[:insert_uri.find('@') + 1] + public \
+               + insert_uri[insert_uri.find('/'):]
+
+    def get(self, uri, allowed_redirects = 0, output_file = None):
+        """ Requests the data corresponding to the URI from the
+        FCP server.
+
+        Returns an AllData or DataFound (when
+        self.default_fcp_params['ReturnType'] == 'none') message
+        on success.
+
+        If output_file or self.output_file is not None, write the
+        raw data to file instead of returning it as a string.
+
+        Raises an FCPError on failure.
+
+        An extra 'URI' entry is added to the returned message
+        containing the final URI the data was requested
+        from after redirecting.
+
+        An extra 'Metadata.ContentType' entry is added to the
+        returned AllData message containing the mime type
+        information extracted from the last DataFound.
+        """
+        self.reset()
+        self.in_params.definition = GET_DEF
+        self.in_params.fcp_params = {'URI':uri }
+        self.in_params.allowed_redirects = allowed_redirects
+        self.in_params.file_name = output_file
+        # REDFLAG: fix
+        self.in_params.send_data = False
+        return self.conn.start_request(self)
+
+
+    def put(self, uri, bytes, mime_type=None):
+        """ Insert a string into Freenet.
+
+            Returns a PutSuccessful message on success.
+            Raises an FCPError on failure.
+        """
+        self.reset()
+        self.in_params.definition = PUT_FILE_DEF
+        set_insert_uri(self.in_params.fcp_params, uri)
+        if mime_type:
+            self.in_params.fcp_params['Metadata.ContentType'] = mime_type
+
+        self.in_params.send_data = bytes
+        return self.conn.start_request(self)
+
+    def put_file(self, uri, path, mime_type=None):
+        """ Insert a single file into Freenet.
+
+        Returns a PutSuccessful message on success.
+        Raises an FCPError on failure.
+
+        REQUIRES: The size of the file can't change during this
+                  call.
+        """
+
+        self.reset()
+        self.in_params.definition = PUT_FILE_DEF
+        set_insert_uri(self.in_params.fcp_params, uri)
+
+        if mime_type:
+            self.in_params.fcp_params['Metadata.ContentType'] = mime_type
+
+        # REDFLAG: test. not sure this ever worked in previous version
+        #if 'UploadFrom' in params and params['UploadFrom'] == 'disk':
+        #    # REDFLAG: test this code path!
+        #    params['FileName'] = path
+        #    path = None
+
+        self.in_params.file_name = path
+        # REDFLAG: fix
+        self.in_params.send_data = True
+        return self.conn.start_request(self)
+
+    def put_redirect(self, uri, target_uri, mime_type=None):
+        """ Insert a redirect into freenet.
+
+        Returns a PutSuccessful message on success.
+        Raises an FCPError on failure.
+        """
+        self.reset()
+        self.in_params.definition = PUT_REDIRECT_DEF
+        self.in_params.fcp_params = {'URI':uri,
+                                     'TargetURI':target_uri,
+                                     'UploadFrom':'redirect'}
+        if mime_type:
+            self.in_params.fcp_params['Metadata.ContentType'] = mime_type
+        return self.conn.start_request(self)
+
+    def put_complex_dir(self, uri, file_infos,
+                        default_mime_type = 'text/plain'):
+        """ Insert a collection of files into a Freenet Container.
+
+            file_infos must be a list of
+            (name, length, mime_type, full_path) tuples.
+
+            file_infos[0] is inserted as the default document.
+
+            mime types:
+            If the mime_type value in the file_infos tuple for the
+            file is not None, it is used.  Otherwise the mime type
+            is guessed from the file extension. Finally, if guessing
+            fails, default_mime_type is used.
+        """
+
+        assert default_mime_type
+        assert file_infos
+
+        self.reset()
+        self.in_params.definition = PUT_COMPLEX_DIR_DEF
+        self.in_params.fcp_params = {'URI': uri}
+
+        # IMPORTANT: Don't set the data length.
+        return self.conn.start_request(self,
+                                       dir_data_source(file_infos,
+                                                       self.in_params,
+                                                       default_mime_type),
+                                       False)
+
+
+# Break out implementation helper so I can use it elsewhere.
+def dir_data_source(file_infos, in_params, default_mime_type):
+    """ Return an IDataSource for a list of file_infos.
+
+        NOTE: Also sets up Files.* fields in in_params as a
+              side effect. """
+
+    for field in in_params.default_fcp_params:
+        if field.startswith("Files"):
+            raise ValueError("You can't set file entries via "
+                             + " default_fcp_params.")
+    if 'DefaultName' in in_params.default_fcp_params:
+        raise ValueError("You can't set 'DefaultName' via "
+                         + "default_fcp_params.")
+
+    # IMPORTANT: Sort the file infos so that the same set of
+    #            file_infos always yields the same inserted data blob.
+    #            file_infos[0] isn't moved.
+    file_infos = sort_file_infos(file_infos)
+
+    files = {}
+    index = 0
+    for info in file_infos:
+        mime_type = info[2]
+        if not mime_type:
+            # First try to guess from the extension.
+            type_tuple = mimetypes.guess_type(info[0])
+            if type_tuple:
+                mime_type = type_tuple[0]
+        if not mime_type:
+            # Fall back to the default.
+            mime_type = default_mime_type
+
+        files['Files.%i.Name' % index] = info[0]
+        files['Files.%i.UploadFrom' % index] = 'direct'
+        files['Files.%i.DataLength' % index] = info[1]
+        files['Files.%i.Metadata.ContentType' % index] = mime_type
+
+        index += 1
+
+    in_params.fcp_params['Files'] = files
+    in_params.fcp_params['DefaultName'] = file_infos[0][0]
+
+    #REDFLAG: Fix
+    in_params.send_data = True
+
+    return FileInfoDataSource(file_infos)
+
+############################################################
+# Helper function for hg changeset bundle handling.
+############################################################
+
+# Saw here:
+# http://sage.math.washington.edu/home/robertwb/trac-bundle/test \
+#       /sage_trac/log/trac.log
+HG_MIME_TYPE = 'application/mercurial-bundle'
+
+def package_metadata(metadata):
+    """ Package the bundle contents metadata into a string which
+        can be inserted into to the Metadata.ContentType field
+        of the Freenet key.
+
+        All args must be full 40 digit hex keys.
+    """
+    return "%s;%s,%s,%s" % (HG_MIME_TYPE, metadata[0], metadata[1], metadata[2])
+
+CHANGESET_REGEX = re.compile('.*;\s*([0-9a-fA-F]{40,40})\s*,'
+                              + '\s*([0-9a-fA-F]{40,40})\s*,'
+                              + '\s*([0-9a-fA-F]{40,40})')
+def parse_metadata(msg):
+    """ INTERNAL: Parse the (base_rev, first_rev, tip) info out of the
+        Metadata.ContentType field of msg.
+
+        FCP2.0 doesn't have support for user defined metadata, so we
+        jam the metadata we need into the mime type field.
+    """
+    match = CHANGESET_REGEX.match(msg[1]['Metadata.ContentType'])
+    if not match or len(match.groups()) != 3:
+        # This happens for bundles inserted with older versions
+        # of hg2fn.py
+        raise ValueError("Couldn't parse changeset info from [%s]." \
+                         % msg[1]['Metadata.ContentType'])
+    return match.groups()
+
+def make_rollup_filename(rollup_info, request_uri):
+    """ Return a filename containing info for a rollup bundle. """
+    if not is_usk_file(request_uri):
+        raise ValueError("request_uri is not a USK file uri.")
+
+    # Hmmmm.... get rid of symbolic names?
+    tip = rollup_info[0][0]
+    parent = rollup_info[0][1]
+    start_index = rollup_info[0][2]
+    end_index = rollup_info[0][3]
+    assert len(tip) == 40 # LATER: is_changset_id_str() func?
+    assert len(parent) == 40
+    assert start_index >= 0
+    assert end_index >= 0
+    assert end_index >= start_index
+
+    human_readable = request_uri.split('/')[1]
+    # hmmmm... always supress .hg
+    if human_readable.lower().endswith('.hg'):
+        human_readable = human_readable[:-3]
+    # <human_name>_<end_index>_<start_index>_<tip>_<parent>_ID<repoid>
+    return "%s_%i_%i_%s_%s_ID%s" % (human_readable, end_index, start_index,
+                                    tip[:12], parent[:12],
+                                    get_usk_hash(request_uri))
+
+def parse_rollup_filename(filename):
+    """ Parse a filename created with make_rollup_filename
+        into a tuple."""
+    fields = filename.split('_')
+    repo_id = fields[-1]
+    if not repo_id.startswith("ID") or len(repo_id) != 14:
+        raise ValueError("Couldn't parse repo usk hash.")
+    repo_id = repo_id[2:]
+    parent = fields[-2]
+    if len(parent) != 12:
+        raise ValueError("Couldn't parse parent.")
+    tip = fields[-3]
+    if len(tip) != 12:
+        raise ValueError("Couldn't parse tip.")
+    start_index = int(fields[-4])
+    end_index = int(fields[-5])
+    human_readable = '_'.join(fields[:-6]) # REDFLAG: dci obo?
+    return (human_readable, start_index, end_index, tip, parent, repo_id)
+
+
+############################################################
+# Stuff moved from updatesm.py, cleanup.
+############################################################
+
+# Hmmm... do better?
+# IIF ends with .R1 second ssk ends with .R0.
+# Makes it easy for paranoid people to disable redundant
+# top key fetching. ie. just request *R0 instead of *R1.
+# Also could intuitively be expanded to higher levels of
+# redundancy.
+def make_redundant_ssk(usk, version):
+    """ Returns a redundant ssk pair for the USK version IFF the file
+        part of usk ends with '.R1', otherwise a single
+        ssk for the usk specified version. """
+    ssk = get_ssk_for_usk_version(usk, version)
+    fields = ssk.split('-')
+    if not fields[-2].endswith('.R1'):
+        return (ssk, )
+    #print "make_redundant_ssk -- is redundant"
+    fields[-2] = fields[-2][:-2] + 'R0'
+    return (ssk, '-'.join(fields))
+
+# For search
+def make_search_uris(uri):
+    """ Returns a redundant USK pair if the file part of uri ends
+        with '.R1', a tuple containing only uri. """
+    if not is_usk_file(uri):
+        return (uri,)
+    fields = uri.split('/')
+    if not fields[-2].endswith('.R1'):
+        return (uri, )
+    #print "make_search_uris -- is redundant"
+    fields[-2] = fields[-2][:-2] + 'R0'
+    return (uri, '/'.join(fields))
+
+def make_frozen_uris(uri, increment=True):
+    """ Returns a possibly redundant SSK tuple for the 'frozen'
+        version of file USK uris, a tuple containing uri for other uris.
+
+        NOTE: This increments the version by 1 if uri is a USK
+              and increment is True.
+    """
+    if uri == 'CHK@':
+        return (uri,)
+    assert is_usk_file(uri)
+    version = get_version(uri)
+    return make_redundant_ssk(uri, version + int(bool(increment)))
+
+def ssk_to_usk(ssk):
+    """ Convert an SSK for a file USK back into a file USK. """
+    fields = ssk.split('-')
+    end = '/'.join(fields[-2:])
+    fields = fields[:-2] + [end, ]
+    return 'USK' + '-'.join(fields)[3:]
+
+
diff --git a/release/fcpconnection.py b/release/fcpconnection.py
new file mode 100644
--- /dev/null
+++ b/release/fcpconnection.py
@@ -0,0 +1,999 @@
+""" Classes to create a multiplexed asynchronous connection to an
+    FCP server.
+
+    Copyright (C) 2008 Darrell Karbott
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public
+    License as published by the Free Software Foundation; either
+    version 2.0 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+    Author: djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks
+
+    OVERVIEW:
+    IAsyncSocket is an abstract interface to an asynchronous
+    socket.  The intent is that client code can plug in a
+    framework appropriate implementation. i.e. for Twisted,
+    asyncore, Tkinter, pyQt, pyGtk, etc.  A platform agnostic
+    implementation, PolledSocket is supplied.
+
+    FCPConnection uses an IAsyncSocket delegate to run the
+    FCP 2.0 protocol over a single socket connection to an FCP server.
+
+    FCPConnection fully(*) supports multiplexing multiple requests.
+    Client code runs requests by passing an instance of MinimalClient
+    into FCPConnection.start_request().  The FCPClient MinimalClient
+    subclass provides convenience wrapper functions for common requests.
+
+    Both blocking and non-blocking client requests are supported.
+    If MinimalClient.in_params.async == True, FCPConnection.start_connection()
+    returns a request id string immediately.  This is the same request
+    id which appears in the 'Identifier' field of subsequent incoming.
+    FCP messages. The MinimalClient.message_callback(client, msg)
+    callback function is called for every incoming client message for
+    the request. Async client code can detect the request has finished
+    by checking client.is_finished() from this callback.
+
+    (*) GOTCHA: If you start a request which writes trailing data
+    to the FCP server, the FCPConnection will transition into the
+    UPLOADING state and you won't be able to start new requests until
+    uploading finishes and it transitions back to the CONNECTED state.
+    It is recommended that you use a dedicated FCPConnection instance
+    for file uploads. You don't have to worry about this if you use
+    blocking requests exclusively.
+"""
+# REDFLAG: get pylint to acknowledge inherited doc strings from ABCs?
+
+import os, os.path, random, select, socket, time
+
+try:
+    from hashlib import sha1
+    def sha1_hexdigest(bytes):
+        """ Return the SHA1 hexdigest of bytes using the hashlib module. """
+        return sha1(bytes).hexdigest()
+except ImportError:
+    # Fall back so that code still runs on pre 2.6 systems.
+    import sha
+    def sha1_hexdigest(bytes):
+        """ Return the SHA1 hexdigest of bytes using the sha module. """
+        return sha.new(bytes).hexdigest()
+
+from fcpmessage import make_request, FCPParser, HELLO_DEF, REMOVE_REQUEST_DEF
+
+FCP_VERSION = '2.0' # Expected version value sent in ClientHello
+
+RECV_BLOCK = 4096 # socket recv
+SEND_BLOCK = 4096 # socket send
+READ_BLOCK = 16 * 1024  # disk read
+
+MAX_SOCKET_READ = 33 * 1024 # approx. max bytes read during IAsyncSocket.poll()
+
+POLL_TIME_SECS = 0.25 # hmmmm...
+
+# FCPConnection states.
+CONNECTING = 1
+CONNECTED  = 2
+CLOSED     = 3
+UPLOADING  = 4
+
+CONNECTION_STATES = {CONNECTING:'CONNECTING',
+                     CONNECTED:'CONNECTED',
+                     CLOSED:'CLOSED',
+                     UPLOADING:'UPLOADING'}
+
+def example_state_callback(dummy, state):
+    """ Example FCPConnection.state_callback function. """
+
+    value = CONNECTION_STATES.get(state)
+    if not value:
+        value = "UNKNOWN"
+    print "FCPConnection State -> [%s]" % value
+
+def make_id():
+    """ INTERNAL: Make a unique id string. """
+    return sha1_hexdigest(str(random.random()) + str(time.time()))
+
+#-----------------------------------------------------------#
+# Byte level socket handling
+#-----------------------------------------------------------#
+
+class IAsyncSocket:
+    """ Abstract interface for an asynchronous socket. """
+    def __init__(self):
+        # lambda's prevent pylint E1102 warning
+
+        # Data arrived on socket
+        self.recv_callback = lambda x:None
+        # Socket closed
+        self.closed_callback = lambda :None
+        # Socket wants data to write. This can be None.
+        self.writable_callback = None
+
+    def write_bytes(self, bytes):
+        """ Write bytes to the socket. """
+        pass
+
+    def close(self):
+        """ Release all resources associated with the socket. """
+        pass
+
+    # HACK to implement waiting on messages.
+    def poll(self):
+        """ Do whatever is required to check for new activity
+            on the socket.
+
+            e.g. run gui framework message pump, explictly poll, etc.
+            MUST call recv_callback, writable_callback
+        """
+        pass
+
+class NonBlockingSocket(IAsyncSocket):
+    """ Base class used for IAsyncSocket implementations based on
+        non-blocking BSD style sockets.
+    """
+    def __init__(self, connected_socket):
+        """ REQUIRES: connected_socket is non-blocking and fully connected. """
+        IAsyncSocket.__init__(self)
+        self.buffer = ""
+        self.socket = connected_socket
+
+    def write_bytes(self, bytes):
+        """ IAsyncSocket implementation. """
+        assert bytes
+        self.buffer += bytes
+        #print "write_bytes: ", self.buffer
+
+    def close(self):
+        """ IAsyncSocket implementation. """
+        if self.socket:
+            self.socket.close() # sync?
+            self.closed_callback()
+        self.socket = None
+
+    def do_write(self):
+        """ INTERNAL: Write to the socket.
+
+            Returns True if data was written, false otherwise.
+
+            REQUIRES: buffer has data or the writable_callback is set.
+        """
+
+        assert self.buffer or self.writable_callback
+        if not self.buffer:
+            # pylint doesn't infer that this must be set.
+            # pylint: disable-msg=E1102
+            self.writable_callback()
+        if self.buffer:
+            chunk = self.buffer[:SEND_BLOCK]
+            sent = self.socket.send(chunk)
+            #print "WRITING:", self.buffer[:sent]
+            assert sent >= 0
+            #print "TO_WIRE:"
+            #print repr(self.buffer[:sent])
+            self.buffer = self.buffer[sent:]
+            return True
+        assert not self.writable_callback # Hmmmm... This is a client error.
+        return False
+
+    def do_read(self):
+        """ INTERNAL: Read from the socket.
+
+            Returns the data read from the socket or None
+            on EOF.
+
+            Closes on EOF as a side effect.
+        """
+        data = self.socket.recv(RECV_BLOCK)
+        if not data:
+            return None
+
+        #print "FROM_WIRE:"
+        #print  repr(data)
+        return data
+
+
+class PolledSocket(NonBlockingSocket):
+    """ Sucky polled IAsyncSocket implementation which should
+        work everywhere. i.e. *nix, Windows, OSX. """
+
+    def __init__(self, host, port):
+        connected_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        # REDFLAG: Can block here.
+        connected_socket.connect((host, port))
+        connected_socket.setblocking(0)
+        NonBlockingSocket.__init__(self, connected_socket)
+
+    def poll(self):
+        """ IAsyncSocket implementation. """
+        #print "PolledSocket.poll -- called"
+        if not self.socket:
+            #print "PolledSocket.poll -- CLOSED"
+            raise IOError("The socket is closed")
+        # Why? Because we don't want to call the recv_callback while
+        # reading... wacky re-entrance issues....
+        read = ''
+        ret = True
+        while len(read) < MAX_SOCKET_READ: # bound read length
+            check_writable  = []
+            if self.buffer or self.writable_callback:
+                check_writable = [self.socket]
+            readable, writable, errs = \
+                      select.select([self.socket], check_writable,
+                                    [self.socket], 0)
+
+            #print "result:", readable, writable, errs
+
+            stop = True
+            if errs:
+                #print "GOT AN ERROR"
+                # Hack. Force an IO exception.
+                self.socket.sendall(RECV_BLOCK)
+                # Err... should never get here.
+                raise IOError("Unknown socket error")
+
+            if readable:
+                data = self.do_read()
+                if not data:
+                    ret = False
+                    break
+
+                read += data
+                stop = False
+
+            if writable:
+                if self.do_write():
+                    stop = False
+
+            if stop:
+                break
+
+        if read:
+            self.recv_callback(read)
+        #print "PolledSocket.poll -- exited"
+        return ret
+
+#-----------------------------------------------------------#
+# Message level FCP protocol handling.
+#-----------------------------------------------------------#
+
+# NOTE:
+# 'DataFound' is sometimes terminal. See msg_is_terminal().
+#
+# NOTE:
+# This list is not complete.  It only lists
+# messages generated by supported FCP commands.
+# Messages which always indicate that an FCP request ended in success.
+SUCCESS_MSGS = frozenset([ \
+    'NodeHello', 'SSKKeypair', 'AllData', 'PutSuccessful', 'NodeData',
+    ])
+
+# Messages which always indicate that an FCP request ended in failure.
+FAILURE_MSGS = frozenset([ \
+    'CloseConnectionDuplicateClientName', 'PutFailed', 'GetFailed',
+    'ProtocolError', 'IdentifierCollision', 'UnknownNodeIdentifier',
+    'UnknownPeerNoteType'
+    ])
+
+# Messages which always indicate that an FCP request ended.
+TERMINAL_MSGS = SUCCESS_MSGS.union(FAILURE_MSGS)
+
+def msg_is_terminal(msg, params):
+    """ INTERNAL: Return True if the message ends an FCP request,
+    False otherwise.
+    """
+
+    if msg[0] in TERMINAL_MSGS:
+        return True
+
+    # Special cases
+    if msg[0] == 'DataFound' and 'ReturnType' in params and \
+           params['ReturnType'] == 'none':
+        return True
+
+    #print "msg_is_terminal: False"
+    #print "MSG:", msg
+    #print "PARAMS:", params
+    
+    return False
+
+def get_code(msg):
+    """ Returns integer error code if msg has a 'Code' field
+        None otherwise.
+    """
+
+    # Hmmmm... does 'Code' ever appear in non-error messages?
+    #if not msg[0] in FAILURE_MSGS:
+    #    # Message is not an error.
+    #    return None
+
+    if not 'Code' in msg[1]:
+        if msg[0] in FAILURE_MSGS:
+            print "WARNING: get_code(msg, code) couldn't read 'Code'."
+        return None
+
+    return int(msg[1]['Code'])
+
+def is_code(msg, error_code):
+    """ Returns True if msg has a 'Code' field and it is
+        equal to error_code, False, otherwise.
+    """
+
+    code = get_code(msg)
+    if code is None:
+        return False
+    return code == error_code
+
+def is_fatal_error(msg):
+    """ Returns True if msg has a 'Fatal' field and it
+        indicates a non-recoverable error, False otherwise.
+    """
+
+    value = msg[1].get('Fatal')
+    if value is None:
+        return False # hmmm...
+    return bool(value.lower() == 'true')
+
+class FCPError(Exception):
+    """ An Exception raised when an FCP command fails. """
+
+    def __init__(self, msg):
+        Exception.__init__(self, msg[0])
+        self.fcp_msg = msg
+        self.last_uri = None
+
+    def __str__(self):
+        text = "FCPError: " + self.fcp_msg[0]
+        if self.fcp_msg[1].has_key('CodeDescription'):
+            text += " -- " + self.fcp_msg[1]['CodeDescription']
+        return text
+
+    def is_code(self, error_code):
+        """ Returns True if the 'Code' field in the FCP error message
+            is equal to error_code, False, otherwise.
+        """
+
+        if not self.fcp_msg or not 'Code' in self.fcp_msg[1]:
+            # YES. This does happen.
+            # Hmmmm... just assert?  Can this really happen.
+            print "WARNING: FCPError.is_code() couldn't read 'Code'."
+            return False
+
+        return is_code(self.fcp_msg, error_code)
+
+def raise_on_error(msg):
+    """ INTERNAL: raise an FCPError if msg indicates an error. """
+
+    assert msg
+    if msg[0] in FAILURE_MSGS:
+        raise FCPError(msg)
+
+class IDataSource:
+    """ Abstract interface which provides data written up to
+        the FCP Server as part of an FCP request. """
+    def __init__(self):
+        pass
+
+    def initialize(self):
+        """ Initialize. """
+        raise NotImplementedError()
+
+    def data_length(self):
+        """ Returns the total length of the data which will be
+            returned by read(). """
+        raise NotImplementedError()
+
+    def release(self):
+        """ Release all resources associated with the IDataSource
+            implementation. """
+        raise NotImplementedError()
+
+    def read(self):
+        """ Returns a raw byte block or None if no more data
+            is available. """
+        raise NotImplementedError()
+
+class FileDataSource(IDataSource):
+    """ IDataSource implementation which get's its data from a single
+        file.
+    """
+    def __init__(self, file_name):
+        IDataSource.__init__(self)
+        self.file_name = file_name
+        self.file = None
+
+    def initialize(self):
+        """ IDataSource implementation. """
+        self.file = open(self.file_name, 'rb')
+
+    def data_length(self):
+        """ IDataSource implementation. """
+        return os.path.getsize(self.file_name)
+
+    def release(self):
+        """ IDataSource implementation. """
+        if self.file:
+            self.file.close()
+            self.file = None
+
+    def read(self):
+        """ IDataSource implementation. """
+        assert self.file
+        return self.file.read(READ_BLOCK)
+
+# MESSAGE LEVEL
+
+class FCPConnection:
+    """Class for a single persistent socket connection
+       to an FCP server.
+
+       Socket level IO is handled by the IAsyncSocket delegate.
+
+       The connection is multiplexed (i.e. it can handle multiple
+       concurrent client requests).
+
+    """
+
+    def __init__(self, socket_, wait_for_connect = False,
+                 state_callback = None):
+        """ Create an FCPConnection from an open IAsyncSocket instance.
+
+            REQUIRES: socket_ ready for writing.
+        """
+        self.running_clients = {}
+        # Delegate handles parsing FCP protocol off the wire.
+        self.parser = FCPParser()
+        self.parser.msg_callback = self.msg_handler
+        self.parser.context_callback = self.get_context
+
+        self.socket = socket_
+        if state_callback:
+            self.state_callback = state_callback
+        else:
+            self.state_callback = lambda x, y: None
+        self.socket.recv_callback = self.parser.parse_bytes
+        self.socket.closed_callback = self.closed_handler
+
+        self.node_hello = None
+
+        # Only used for uploads.
+        self.data_source = None
+
+        # Tell the client code that we are trying to connect.
+        self.state_callback(self, CONNECTING)
+
+        # Send a ClientHello
+        params = {'Name':'FCPConnection[%s]' % make_id(),
+                  'ExpectedVersion': FCP_VERSION}
+        self.socket.write_bytes(make_request(HELLO_DEF, params))
+        if wait_for_connect:
+            # Wait for the reply
+            while not self.is_connected():
+                if not self.socket.poll():
+                    raise IOError("Socket closed")
+                time.sleep(POLL_TIME_SECS)
+
+    def is_connected(self):
+        """ Returns True if the instance is fully connected to the
+            FCP Server and ready to process requests, False otherwise.
+        """
+        return not self.node_hello is None
+
+    def is_uploading(self):
+        """ Returns True if the instance is uploading data, False
+            otherwise.
+        """
+        return (self.data_source or
+                self.socket.writable_callback)
+
+    def close(self):
+        """ Close the connection and the underlying IAsyncSocket
+            delegate.
+        """
+        if self.socket:
+            self.socket.close()
+
+    # set_data_length only applies if data_source is set
+    def start_request(self, client, data_source = None, set_data_length = True):
+        """ Start an FCP request.
+
+            If in_params.async is True this returns immediately, otherwise
+            it blocks until the request finishes.
+
+            If client.in_params.send_data is set, trailing data is sent
+            after the request message.  If data_source is not None, then
+            the data in it is sent.  Otherwise if client.in_params.file is
+            not None, the data in the file is sent. Finally if neither of
+            the other sources are not None the contents of
+            client.in_params.send_data are sent.
+
+            If set_data_length is True the 'DataLength' field is set in the
+            requests FCP message.
+
+            If in_params.async it True, this method returns the identifier
+            for the request, otherwise, returns the FCP message which
+            terminated the request.
+        """
+        assert not self.is_uploading()
+        assert not client.context
+        assert not client.response
+        assert not 'Identifier' in client.in_params.fcp_params
+        identifier = make_id()
+        client.in_params.fcp_params['Identifier'] = identifier
+        write_string = False
+        if client.in_params.send_data:
+            assert not self.data_source
+            if data_source:
+                data_source.initialize()
+                if set_data_length:
+                    client.in_params.fcp_params['DataLength'] = (data_source.
+                                                                 data_length())
+                self.data_source = data_source
+                self.socket.writable_callback = self.writable_handler
+            elif client.in_params.file_name:
+                self.data_source = FileDataSource(client.in_params.file_name)
+                self.data_source.initialize()
+                client.in_params.fcp_params['DataLength'] = (self.
+                                                             data_source.
+                                                             data_length())
+                self.socket.writable_callback = self.writable_handler
+            else:
+                client.in_params.fcp_params['DataLength'] = len(client.
+                                                                in_params.
+                                                                send_data)
+                write_string = True
+
+        self.socket.write_bytes(make_request(client.in_params.definition,
+                                             client.in_params.fcp_params,
+                                             client.in_params.
+                                             default_fcp_params))
+
+        if write_string:
+            self.socket.write_bytes(client.in_params.send_data)
+
+        assert not client.context
+        client.context = RequestContext(client.in_params.allowed_redirects,
+                                        identifier,
+                                        client.in_params.fcp_params.get('URI'))
+        if not client.in_params.send_data:
+            client.context.file_name = client.in_params.file_name
+
+        #print "MAPPED [%s]->[%s]" % (identifier, str(client))
+        self.running_clients[identifier] = client
+
+        if self.data_source:
+            self.state_callback(self, UPLOADING)
+
+        if client.in_params.async:
+            return identifier
+
+        resp = self.wait_for_terminal(client)
+        raise_on_error(resp)
+        return client.response
+
+    def remove_request(self, identifier, is_global = False):
+        """ Cancel a running request.
+            NOT ALLOWED WHILE UPLOADING DATA.
+        """
+        if self.is_uploading():
+            raise Exception("Can't remove while uploading. Sorry :-(")
+
+        if not identifier in self.running_clients:
+            print "FCPConnection.remove_request -- unknown identifier: ", \
+                  identifier
+        params = {'Identifier': identifier,
+                  'Global': is_global}
+        self.socket.write_bytes(make_request(REMOVE_REQUEST_DEF, params))
+
+    def wait_for_terminal(self, client):
+        """ Wait until the request running on client finishes. """
+        while not client.is_finished():
+            if not self.socket.poll():
+                break
+            time.sleep(POLL_TIME_SECS)
+
+        # Doh saw this trip 20080124. Regression from NonBlockingSocket changes?
+        # assert client.response
+        if not client.response:
+            raise IOError("No response. Maybe the socket dropped?")
+
+        return client.response
+
+    def handled_redirect(self, msg, client):
+        """ INTERNAL: Handle code 27 redirects. """
+
+        # BITCH: This is a design flaw in the FCP 2.0 protocol.
+        #        They should have used unique numbers for all error
+        #        codes so that client coders don't need to keep track
+        #        of the initiating request in order to interpret the
+        #        error code.
+        if client.in_params.definition[0] == 'ClientGet' and is_code(msg, 27):
+            #print "Checking for allowed redirect"
+            if client.context.allowed_redirects:
+                #print "Handling redirect"
+                client.context.allowed_redirects -= 1
+                assert client.context.initiating_id
+                assert client.context.initiating_id in self.running_clients
+                assert client.context.running_id
+                if client.context.running_id != client.context.initiating_id:
+                    # Remove the context for the intermediate redirect.
+                    #print "DELETED: ", client.context.running_id
+                    del self.running_clients[client.context.running_id]
+
+                client.context.running_id = make_id()
+                client.context.last_uri = msg[1]['RedirectURI']
+
+                # Copy, don't modify params.
+                params = {}
+                params.update(client.in_params.fcp_params)
+                params['URI'] = client.context.last_uri
+                params['Identifier'] = client.context.running_id
+
+                # Send new request.
+                self.socket.write_bytes(make_request(client.in_params.
+                                                     definition, params))
+
+                #print "MAPPED(1) [%s]->[%s]" % (client.context.running_id,
+                #                                str(client))
+                self.running_clients[client.context.running_id] = client
+
+                # REDFLAG: change callback to include identifier?
+                # Hmmm...fixup identifier in msg?
+                if client.message_callback:
+                    client.message_callback(client, msg)
+                return True
+
+        return False
+
+
+    def handle_unexpected_msgs(self, msg):
+        """ INTERNAL: Process unexpected messages. """
+
+        if not self.node_hello:
+            if msg[0] == 'NodeHello':
+                self.node_hello = msg
+                self.state_callback(self, CONNECTED)
+                return True
+
+            raise Exception("Unexpected message before NodeHello: %s"
+                            % msg[0])
+
+        if not 'Identifier' in msg[1]:
+            print "Saw message without 'Identifier': %s" % msg[0]
+            print msg
+            return True
+
+        if not msg[1]['Identifier'] in self.running_clients:
+            #print "No client for identifier: %s" % msg[1]['Identifier']
+            # BITCH: You get a PersistentRequestRemoved msg even for non
+            #        peristent requests AND you get it after the GetFailed.
+            #print msg[0]
+            return True
+
+        return False
+
+    def get_context(self, request_id):
+        """ INTERNAL: Lookup RequestContexts for the FCPParser delegate.
+        """
+
+        client = self.running_clients.get(request_id)
+        if not client:
+            raise Exception("No client for identifier: %s" % request_id)
+        assert client.context
+        return client.context
+
+    def msg_handler(self, msg):
+        """INTERNAL: Process incoming FCP messages from the FCPParser delegate.
+        """
+
+        if self.handle_unexpected_msgs(msg):
+            return
+
+        client = self.running_clients[msg[1]['Identifier']]
+        assert client.is_running()
+
+        if msg_is_terminal(msg, client.in_params.fcp_params):
+            if self.handled_redirect(msg, client):
+                return
+
+            # Remove running context entries
+            assert msg[1]['Identifier'] == client.context.running_id
+            #print "DELETED: ", client.context.running_id
+            del self.running_clients[client.context.running_id]
+            if client.context.running_id != client.context.initiating_id:
+                #print "DELETED: ", client.context.initiating_id
+                del self.running_clients[client.context.initiating_id]
+
+            if msg[0] == 'DataFound' or msg[0] == 'AllData':
+                # REDFLAG: Always do this? and fix FCPError.last_uri?
+                # Copy URI into final message. i.e. so client
+                # sees the final redirect not the inital URI.
+                msg[1]['URI'] = client.context.last_uri
+            if msg[0] == 'AllData':
+                # Copy metadata into final message
+                msg[1]['Metadata.ContentType'] = client.context.metadata
+
+                # Add a third entry to the msg tuple containing the raw data,
+                # or a comment saying where it was written.
+                assert len(msg) == 2
+                msg = list(msg)
+                if client.context.data_sink.file_name:
+                    msg.append("Wrote raw data to: %s" \
+                               % client.context.file_name)
+                else:
+                    msg.append(client.context.data_sink.raw_data)
+                msg = tuple(msg)
+
+
+            # So that MinimalClient.request_id() returns the
+            # initiating id correctly even after following
+            # redirects.
+            msg[1]['Identifier'] = client.context.initiating_id
+
+            # Reset the context
+            client.context.release()
+            client.context = None
+
+            client.response = msg
+            assert not client.is_running()
+        else:
+            if 'Metadata.ContentType' in msg[1]:
+                # Keep track of metadata as we follow redirects
+                client.context.metadata = msg[1]['Metadata.ContentType']
+
+        # Notify client.
+        if client.message_callback:
+            client.message_callback(client, msg)
+
+    def closed_handler(self):
+        """ INTERNAL: Callback called by the IAsyncSocket delegate when the
+            socket closes. """
+        def dropping(data): # REDFLAG: Harmless but remove eventually.
+            """ INTERNAL: Print warning when data is dropped after close. """
+            print "DROPPING %i BYTES OF DATA AFTER CLOSE!" % len(data)
+
+        self.node_hello = None
+        if not self.socket is None:
+            self.socket.recv_callback = lambda x:None
+            self.socket.recv_callback = dropping # Ignore any subsequent data.
+
+        # Hmmmm... other info, ok to share this?
+        fake_msg = ('ProtocolError', {'CodeDescription':'Socket closed'})
+        #print "NOTIFIED: CLOSED"
+
+        # Hmmmm... iterate over values instead of keys?
+        for identifier in self.running_clients:
+            client = self.running_clients[identifier]
+            # Remove client from list of running clients.
+            #print "CLIENT:", client
+            #print "CLIENT.CONTEXT:", client.context
+            assert client.context
+            assert client.context.running_id
+            # Notify client that it has stopped.
+            if (client.context.initiating_id == client.context.running_id
+                and client.message_callback):
+                client.message_callback(client, fake_msg)
+
+        self.running_clients.clear()
+        self.state_callback(self, CLOSED)
+
+    def writable_handler(self):
+        """ INTERNAL: Callback called by the IAsyncSocket delegate when
+            it needs more data to write.
+        """
+
+        if not self.data_source:
+            return
+        data = self.data_source.read()
+        if not data:
+            self.data_source.release()
+            self.data_source = None
+            self.socket.writable_callback = None
+            if self.is_connected():
+                self.state_callback(self, CONNECTED)
+            return
+        self.socket.write_bytes(data)
+
+# Writes to file if file_name is set, raw_data otherwise
+class DataSink:
+    """ INTERNAL: Helper class used to save trailing data for FCP
+        messages.
+    """
+
+    def __init__(self):
+        self.file_name = None
+        self.file = None
+        self.raw_data = ''
+        self.data_bytes = 0
+
+    def initialize(self, data_length, file_name):
+        """ Initialize the instance.
+            If file_name is not None the data is written into
+            the file, otherwise, it is saved in the raw_data member.
+        """
+        # This should only be called once. You can't reuse the datasink.
+        assert not self.file and not self.raw_data and not self.data_bytes
+        self.data_bytes = data_length
+        self.file_name = file_name
+
+    def write_bytes(self, bytes):
+        """ Write bytes into the instance.
+
+            Multiple calls can be made. The final amount of
+            data written into the instance MUST be equal to
+            the data_length value passed into the initialize()
+            call.
+        """
+
+        #print "WRITE_BYTES called."
+        if self.file_name and not self.file:
+            self.file = open(self.file_name, 'wb')
+
+        if self.file:
+            #print "WRITE_BYTES writing to file"
+            if self.file.closed:
+                print "FileOrStringDataSink -- refusing to write" \
+                      + " to closed file!"
+                return
+            self.file.write(bytes)
+            self.data_bytes -= len(bytes)
+            assert self.data_bytes >= 0
+            if self.data_bytes == 0:
+                self.file.close()
+            return
+
+        self.raw_data += bytes
+        self.data_bytes -= len(bytes)
+        assert self.data_bytes >= 0
+
+    def release(self):
+        """ Release all resources associated with the instance. """
+
+        if self.data_bytes != 0:
+            print "DataSink.release -- DIDN'T FINISH PREVIOUS READ!", \
+                  self.data_bytes
+        if self.file:
+            self.file.close()
+        self.file_name = None
+        self.file = None
+        self.raw_data = ''
+        self.data_bytes = 0
+
+class RequestContext:
+    """ INTERNAL: 'Live' context information which an FCPConnection needs
+        to keep about a single FCP request.
+    """
+    def __init__(self, allowed_redirects, identifier, uri):
+        self.initiating_id = identifier
+        self.running_id = identifier
+
+        # Redirect handling
+        self.allowed_redirects = allowed_redirects
+        self.last_uri = uri
+        self.metadata = "" # Hmmm...
+
+        # Incoming data handling
+        self.data_sink = DataSink()
+
+    def writable(self):
+        """ Returns the number of additional bytes which can be written
+            into the data_sink member.
+        """
+
+        return self.data_sink.data_bytes
+
+    def release(self):
+        """ Release all resources associated with the instance. """
+
+        self.data_sink.release()
+
+
+#-----------------------------------------------------------#
+# Client code
+#-----------------------------------------------------------#
+
+# Hmmmm... created separate class because pylint was complaining
+# about too many attributes in MinimalClient and FCPClient
+class ClientParams:
+    """ A helper class to aggregate request parameters. """
+
+    def __init__(self):
+        self.definition = None
+        # These are default values which can be modified by the client code.
+        # THE IMPLEMENTATION CODE i.e. fcp(connection/client/message)
+        # MUST NOT MODIFY THEM.
+        self.default_fcp_params = {}
+        # These are per request values. They can be modified / reset.
+        self.fcp_params = {}
+        self.async = False
+        self.file_name = None
+        self.send_data = None
+        self.allowed_redirects = 0
+
+    def reset(self):
+        """ Reset all members EXCEPT async, allowed_redirects and
+            default_fcp_params to their default values.
+        """
+
+        self.definition = None
+        self.fcp_params = {}
+        self.file_name = None
+        self.send_data = None
+
+    # HACK: Not really required, but supresses pylint R0903
+    def pretty(self):
+        """Returns a human readable rep of the params. """
+
+        return "%s: %s %s %s %s %s %s" % \
+               ( self.definition[0],
+                 str(self.send_data),
+                 str(self.async),
+                 self.file_name,
+                 self.allowed_redirects,
+                 self.fcp_params,
+                 self.default_fcp_params )
+
+class MinimalClient:
+    """ A single FCP request which can be executed via the
+        FCPConnection.start_request() method.
+
+        If in_params.async is True the request runs asynchronously,
+        otherwise it causes FCPConnection.start_request() to block.
+
+        The message_callback notifier function is called for
+        each incoming FCP message during the request. The first
+        argument is the client instance.  Its is_finished()
+        method will return True for the final message. message_callback
+        implementations MUST NOT modify the state of the client
+        instance while is_finished() is False.
+    """
+
+    def __init__(self):
+        # IN parameters.
+        self.in_params = ClientParams()
+
+        # OUT parameter
+        self.response = None
+
+        # Variables used while client request is running.
+        self.context = None
+
+        # Notification
+        self.message_callback = lambda client, msg:None
+
+    def reset(self, reset_params = True):
+        """ Reset all members EXCEPT self.in_params.allowed_redirects,
+            self.in_params.default_fcp_params and
+            self.in_params.async to their default values.
+        """
+        assert not self.is_running()
+        if reset_params:
+            self.in_params.reset()
+        self.response = None
+        self.context = None
+
+    def is_running(self):
+        """ Returns True if a request is running, False otherwise. """
+
+        return self.context
+
+    def is_finished(self):
+        """ Returns True if the request is finished, False otherwise. """
+
+        return not self.response is None
+
+    def request_id(self):
+        """ Returns the request id. """
+        if self.response and not self.context:
+            return self.response[1]['Identifier']
+        elif self.context:
+            return self.context.initiating_id
+        return None
diff --git a/release/fcpmessage.py b/release/fcpmessage.py
new file mode 100644
--- /dev/null
+++ b/release/fcpmessage.py
@@ -0,0 +1,333 @@
+""" Classes and functions for creating and parsing FCP messages.
+
+    Copyright (C) 2008 Darrell Karbott
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public
+    License as published by the Free Software Foundation; either
+    version 2.0 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+    Author: djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks
+
+    An FCP message is represented as a
+    (msg_name, msg_values_dict) tuple.
+
+    Some message e.g. AllData may have a third entry
+    which contains the raw data string for the FCP
+    message's trailing data.
+"""
+
+#-----------------------------------------------------------#
+# FCP mesage creation helper functions
+#-----------------------------------------------------------#
+
+def merge_params(params, allowed, defaults = None):
+    """ Return a new dictionary instance containing only the values
+        which have keys in the allowed field list.
+
+        Values are taken from defaults only if they are not
+        set in params.
+    """
+
+    ret = {}
+    for param in allowed:
+        if param in params:
+            ret[param] = params[param]
+        elif defaults and param in defaults:
+            ret[param] = defaults[param]
+    return ret
+
+def format_params(params, allowed, required):
+    """ INTERNAL: Format params into an FCP message body string. """
+
+    ret = ''
+    for field in params:
+        if not field in allowed:
+            raise ValueError("Illegal field [%s]." % field)
+
+    for field in allowed:
+        if field in params:
+            if field == 'Files':
+                # Special case Files dictionary.
+                assert params['Files']
+                for subfield in params['Files']:
+                    ret += "%s=%s\n" % (subfield, params['Files'][subfield])
+                continue
+            value = str(params[field])
+            if not value:
+                raise ValueError("Illegal value for field [%s]." % field)
+            if value.lower() == 'true' or value.lower() == 'false':
+                value = value.lower()
+            ret += "%s=%s\n" % (field, value)
+        elif field in required:
+            #print "FIELD:", field, required
+            raise ValueError("A required field [%s] was not set." % field)
+    return ret
+
+# REDFLAG: remove trailing_data?
+def make_request(definition, params, defaults = None, trailing_data = None):
+    """ Make a request message string from a definition tuple
+        and params parameters dictionary.
+
+        Values for allowed parameters not specified in params are
+        taken from defaults if they are present and params IS
+        UPDATED to include these values.
+
+        A definition tuple has the following entries:
+        (msg_name, allowed_fields, required_fields, contraint_func)
+
+        msg_name is the FCP message name.
+        allowed_fields is a sequence of field names which are allowed
+           in params.
+        required_fields is a sequence of field names which are required
+           in params. If this is None all the allowed fields are
+           assumed to be required.
+        constraint_func is a function which takes definitions, params
+           arguments and can raise if contraints on the params values
+           are not met. This can be None.
+    """
+
+    #if 'Identifier' in params:
+    #    print "MAKE_REQUEST: ", definition[0], params['Identifier']
+    #else:
+    #    print "MAKE_REQUEST: ", definition[0], "NO_IDENTIFIER"
+
+    #print "DEFINITION:"
+    #print definition
+    #print "PARAMS:"
+    #print params
+    name, allowed, required, constraint_func = definition
+    assert name
+
+    real_params = merge_params(params, allowed, defaults)
+
+    # Don't force repetition if required is the same.
+    if required is None:
+        required = allowed
+
+    ret = name + '\n' + format_params(real_params, allowed, required) \
+          + 'EndMessage\n'
+
+    # Run extra checks on parameter values
+    # Order is important.  Format_params can raise on missing fields.
+    if constraint_func:
+        constraint_func(definition, real_params)
+
+    if trailing_data:
+        ret += trailing_data
+
+    params.clear()
+    params.update(real_params)
+
+    return ret
+
+#-----------------------------------------------------------#
+# FCP request definitions for make_request()
+#-----------------------------------------------------------#
+
+def get_constraint(dummy, params):
+    """ INTERNAL: Check get params. """
+    if 'ReturnType' in params and params['ReturnType'] != 'disk':
+        if 'Filename' in params or 'TempFilename' in params:
+            raise ValueError("'Filename' and 'TempFileName' only allowed" \
+                             + " when 'ReturnType' is disk.")
+
+def put_file_constraint(dummy, params):
+    """ INTERNAL: Check put_file params. """
+    # Hmmmm... this only checks for required arguments, it
+    # doesn't report values that have no effect.
+    upload_from = 'direct'
+    if 'UploadFrom' in params:
+        upload_from = params['UploadFrom']
+    if upload_from == 'direct':
+        if not 'DataLength' in params:
+            raise ValueError("'DataLength' MUST be set, 'UploadFrom =="
+                                 + " 'direct'.")
+    elif upload_from == 'disk':
+        if not 'Filename' in params:
+            raise ValueError("'Filename' MUST be set, 'UploadFrom =="
+                             + " 'disk'.")
+        elif upload_from == 'redirect':
+            if not 'TargetURI' in params:
+                raise ValueError("'TargetURI' MUST be set, 'UploadFrom =="
+                                 + " 'redirect'.")
+    else:
+        raise ValueError("Unknown value, 'UploadFrom' == %s" % upload_from)
+
+
+HELLO_DEF = ('ClientHello', ('Name', 'ExpectedVersion'), None, None)
+
+# Identifier not included in doc?
+GETNODE_DEF = ('GetNode', ('Identifier', 'GiveOpennetRef', 'WithPrivate',
+                           'WithVolatile'),
+               None, None)
+
+#IMPORTANT: One entry tuple MUST have trailing comma or it will evaluate
+#           to a string instead of a tuple.
+GENERATE_SSK_DEF = ('GenerateSSK', ('Identifier',), None, None)
+GET_REQUEST_URI_DEF = ('ClientPut',
+                       ('URI', 'Identifier', 'MaxRetries', 'PriorityClass',
+                        'UploadFrom', 'DataLength', 'GetCHKOnly'),
+                       None, None)
+GET_DEF = ('ClientGet',
+           ('IgnoreDS', 'DSOnly', 'URI', 'Identifier', 'Verbosity',
+            'MaxSize', 'MaxTempSize', 'MaxRetries', 'PriorityClass',
+            'Persistence', 'ClientToken', 'Global', 'ReturnType',
+            'BinaryBlob', 'AllowedMimeTypes', 'FileName', 'TmpFileName'),
+           ('URI', 'Identifier'),
+           get_constraint)
+PUT_FILE_DEF = ('ClientPut',
+                ('URI', 'Metadata.ContentType', 'Identifier', 'Verbosity',
+                 'MaxRetries', 'PriorityClass', 'GetCHKOnly', 'Global',
+                 'DontCompress','ClientToken', 'Persistence',
+                 'TargetFilename', 'EarlyEncode', 'UploadFrom', 'DataLength',
+                 'Filename', 'TargetURI', 'FileHash', 'BinaryBlob'),
+                ('URI', 'Identifier'),
+                put_file_constraint)
+PUT_REDIRECT_DEF = ('ClientPut',
+                    ('URI', 'Metadata.ContentType', 'Identifier', 'Verbosity',
+                     'MaxRetries', 'PriorityClass', 'GetCHKOnly', 'Global',
+                     'ClientToken', 'Persistence', 'UploadFrom',
+                     'TargetURI'),
+                    ('URI', 'Identifier', 'TargetURI'),
+                    None)
+PUT_COMPLEX_DIR_DEF = ('ClientPutComplexDir',
+                       ('URI', 'Identifier', 'Verbosity',
+                        'MaxRetries', 'PriorityClass', 'GetCHKOnly', 'Global',
+                        'DontCompress', 'ClientToken', 'Persistence',
+                        'TargetFileName', 'EarlyEncode', 'DefaultName',
+                        'Files'), #<- one off code in format_params() for this
+                       ('URI', 'Identifier'),
+                       None)
+
+REMOVE_REQUEST_DEF = ('RemoveRequest', ('Identifier', 'Global'), None, None)
+
+# REDFLAG: Shouldn't assert on bad data! raise instead.
+# Hmmmm... I hacked this together by unwinding a "pull" parser
+# to make a "push" parser.  Feels like there's too much code here.
+class FCPParser:
+    """Parse a raw byte stream into FCP messages and trailing data blobs.
+
+       Push bytes into the parser by calling FCPParser.parse_bytes().
+       Set FCPParser.msg_callback to get the resulting FCP messages.
+       Set FCPParser.context_callback to control how trailing data is written.
+       See RequestContext in the fcpconnection module for an example of how
+       contexts are supposed to work.
+
+       NOTE: This only handles byte level presentation. It DOES NOT validate
+             that the incoming messages are correct w.r.t. the FCP 2.0 spec.
+    """
+    def __init__(self):
+        self.msg = None
+        self.prev_chunk = ""
+        self.data_context = None
+
+        # lambda's prevent pylint E1102 warning
+        # Called for each parsed message.
+        self.msg_callback = lambda msg:None
+
+        # MUST set this callback.
+        # Return the RequestContext for the request_id
+        self.context_callback = None #lambda request_id:RequestContext()
+
+    def handle_line(self, line):
+        """ INTERNAL: Process a single line of an FCP message. """
+        if not line:
+            return False
+
+        if not self.msg:
+            # Start of a new message
+            self.msg = [line, {}]
+            return False
+
+        pos = line.find('=')
+        if pos != -1:
+            # name=value pair
+            fields = (line[:pos], line[pos + 1:])
+            # CANNOT just split
+            # fields = line.split('=')
+            # e.g.
+            # ExtraDescription=Invalid precompressed size: 81588 maxlength=10
+            assert len(fields) ==  2
+            self.msg[1][fields[0].strip()] = fields[1].strip()
+        else:
+            # end of message line
+            if line == 'Data':
+                # Handle trailing data
+                assert self.msg
+                # REDFLAG: runtime protocol error (should never happen)
+                assert 'Identifier' in self.msg[1]
+                assert not self.data_context
+                self.data_context = self.context_callback(self.msg[1]
+                                                          ['Identifier'])
+                self.data_context.data_sink.initialize(int(self.msg[1]
+                                                           ['DataLength']),
+                                                       self.data_context.
+                                                       file_name)
+                return True
+
+            assert line == 'End' or line == 'EndMessage'
+            msg = self.msg
+            self.msg = None
+            assert not self.data_context or self.data_context.writable() == 0
+            self.msg_callback(msg)
+
+        return False
+
+    def handle_data(self, data):
+        """ INTERNAL: Handle trailing data following an FCP message. """
+        #print "RECVD: ", len(data), "bytes of data."
+        assert self.data_context
+        self.data_context.data_sink.write_bytes(data)
+        if self.data_context.writable() == 0:
+            assert self.msg
+            msg = self.msg
+            self.msg = None
+            self.data_context = None
+            self.msg_callback(msg)
+
+    def parse_bytes(self, bytes):
+        """ This method drives an FCP Message parser and eventually causes
+            calls into msg_callback().
+        """
+        #print "FCPParser.parse_bytes -- called"
+        if self.data_context and self.data_context.writable():
+            # Expecting raw data.
+            assert not self.prev_chunk
+            data = bytes[:self.data_context.writable()]
+            self.handle_data(data) # MUST handle msg notification!
+            bytes = bytes[len(data):]
+            if bytes:
+                # Hmmm... recursion depth
+                self.parse_bytes(bytes)
+        else:
+            # Expecting a \n terminated line.
+            bytes = self.prev_chunk + bytes
+            self.prev_chunk = ""
+            last_eol = -1
+            pos = bytes.find('\n')
+            while pos != -1:
+                if last_eol <= 0:
+                    last_eol = 0
+
+                line = bytes[last_eol:pos].strip()
+                last_eol = pos
+                if self.handle_line(line):
+                    # Reading trailing data
+                    # Hmmm... recursion depth
+                    self.parse_bytes(bytes[last_eol + 1:])
+                    return
+                pos = bytes.find('\n', last_eol + 1)
+
+            assert not self.data_context or not self.data_context.writable()
+            self.prev_chunk = bytes[last_eol + 1:]
+
diff --git a/release/fms_message_template.txt b/release/fms_message_template.txt
new file mode 100644
--- /dev/null
+++ b/release/fms_message_template.txt
@@ -0,0 +1,13 @@
+jfniki has been updated to version: __HEAD__
+
+freesite:
+__SITE_USK__
+
+plugin jar:
+__JAR_CHK__
+
+source code:
+__SRC_CHK__
+
+summary:
+__RELEASE_NOTES__
diff --git a/release/generated_freesite/activelink.png b/release/generated_freesite/activelink.png
new file mode 100644
index 0000000000000000000000000000000000000000..a3b7c48e68e312742bcc4d65665dd50bac164818
GIT binary patch
literal 2019
zc$@*_2ORi`P)<h;3K|Lk000e1NJLTq003+N001Ni1^@s6Y;E7n00001b5ch_0Itp)
z=>Px#24YJ`L;(K){{a7>y{D4^000SaNLh0L01FcU01FcV0GgZ_000MeNkl<ZXx{Cb
zdvKIj8Hb;}-*>Y~mLx0*4FM65P_#g`B1o|)j<qtqQE2Hn&>1PVwM<hTP$pUjT4tOn
zMJW`e6gw#blVS@qnJN)Yicu~~AtnvEu*rrdySeNw*~|Cs@sGlWkT3y~HVf=;X8$?o
z?D@{wXWz?t&!-SV2vWLfq)hz;DPT#bK#z>SN4_QYqP~?ubq6J0D|K`Bd<dmN!K2?C
z!~I{&NC8VW`9l(K9J)ZWH_D>B?7Z+uE}^i@;~Q&O_?0w_I#mi-vgr870bVX?=Be*y
zv-aWKK5Gj}j7ig=(<oCGmMq$T)I;T2Kd-Kt$k+^RqFs^&zu478(W7~&l!_FvWYAwv
zb#b_=lixiz3B6X?57w{WKF{4#txO$nOl675psN4**<0@6x6k&6B?PSB)x@N6Mt=D1
z>{OOG8tRF}Q7b`vF&0(T_<6IejaQzUfI-&>tHl#x>p?e@bB#Q<V9YJ!_HPPcQ7O(}
zPIUABKRamgM$u^$jLXyk;Pu7W_S_`Q235kUet-A^?^SiO?y+2Ef6;nNB~^-KE=@&a
zab7EJrM9JquTQse&onE}OdXOeXz@l7LQs&SPgq-^SLSCs8<{=X!jid}35Zv<*04-l
zSB(4|eG1rcC4ddPni%WQvhq7Qs8vH}cdzViMypY<?A|Q=JrY8I0#GUyq+8V3OnnZe
zwz-#|?`lM&Qjn3Rp?Jj|*v;w`up8Bdjwr>uo7ndJ7r53$+|wPSq`aN3fW-8C6MCHz
zA>dqV568{~D4byANB29iThu(grH)mLbC^+Jrt*xB4^DJb{N(sl3>%JxDOJ~ZW^$xy
zc;*MrL3+|zwOFRK!plp4bP<yT_s+C)=R`9A+38w*p;*c#4hxuo;<uWpYYp?QFQ@Un
zuV#>8)r`oza$F=_Y<E!N`Pb`t;o)4S<QY@IhN~7&gk#ly{#_d&)23nVgX2)CMy6Wf
z^~d<>&PG-&%wle#EoEUNspj?w&up#Z?R8Tz>PF_gzp<p51v72TxrtK$q)&_7-U#nj
zc2IfN$4?&29Wht{tsP;UnK##=I%zCSk_C+y!nAfoIMWp3oI8wOrz94O^QC+f56yF6
z(vR%5$o``qx<e8x7w4paU8{n<GH;fBLghz(EG9Ke=4MbZ)<BkBi%GBiOsng%EGXV{
zo~}TQ^~=YTW!I*FT}{1_INRQ9#nm2RUZIWocczhMQll7zPqQb&>G~jEzeI;G#wT4-
z6bc1db`2R;4Mv@kaSk05odz7X>(3<rRTJQy@^-8y6;Ixm%^lhL6tF9`sl<iJpkmGU
zodY0AvY@RiN@+z0ryGI<du3)6n3+GrMw(fT#rUaf425Mn{Sv1eL)5qS;%bXxHL5V`
zRX8&>%$Z`PV5|YNUWHMo95TE7@T`w5rEX?Vw(`V%S!CKYDPWiM;u{UTylTRL_Cr-2
zyi@L>V61`pGi(&*o3NVHLzbq*MHeT@g6@!nt1V1>SBz>`4`&)fC=@U`*T}5N7UmV&
z67|O`6_w(gstZzbq#Z|^mS>kZhpb877Qoj1p@|HOn!Bf3QK>cj{mA?LsjLo_&O+(y
zYsj%*f9yJN-~h8`%_1)^Z$PZ-2uRepdN^{b3tvdaWKc0@iiMd4X2v;m1Llgy1^bVB
zDLdZDibYv`qcAO5Dhi{cNK6+0ITsLr`){|{y1PtVS_&Wz9XfPFa{^-b?%hL<ljOMY
zbVtPpC%VKVFI9`89rdC!5F2!?KO~8#U#%0H_qs$f=<^5mX?3XI=h(4h31FKyZx#&=
z4MR>7i^as1|F@xlWn^TCg$oxBfcKA&y2az0YDC%b&OyH?%W<*u@CC7E%f})dm2Wd(
zmxB{Rc)eaxT3RXq;rII!-O*@NxZQ5i+uJMLZnua;B8k3WFeuz^w+IG<R|V$rctmGs
zAAsiOX5sVsL^vECbUC43Nj$N!M(jJ<F5*{Na%qJ}ytt!&RDq59I~}d9t+?H80BUM#
z6745Xp2X>NQdCq#b#*lsizU(L^ZBT)t;K9M59Z<t27@>p4x-U$qP@Ppo@vvjv3&XR
zLDyl_tJtx25;d+KN{+M*I_80UGKtA>xT^E^i(%rjM^{u-3~+B%RaO1kmMvQ*^7Hfi
zT$v;d>Jp2Kiv=LU;joCu``gO&>C?s1rAvp5Z$&#A#IN==2}vGkYn}d>_|3kS+rYvU
z!>d??!(r@p`;g-{Y}f!mdwV;I8+fKlsbJH}d;|)L-*OG;x0zHJ^`rN0<LB!V6(hG%
zm1^*_(Y}5AkR*w`ygbUw%lmIusYIjET+=ewFU!M`rskJ>&U5*}=j6E{zMy;?0K2qn
zg<=3WwOWl<s~ym$R`*>dn9XLaR_nzn6c`K!GBY#L>-7msN>5KmmSt2b6+#FWELeaL
zg4L^6qu1+kxm+APco2iZ!1nFihlKXy2b^T5YgxOko?x#`I2vc$pIccp%RXwrZkqeB
z(NyIbAMbwHL03Rx(Oq^Pn&(IXODP#+SW3gz{{ip2$WdDnSP}pL002ovPDHLkV1nax
B+T#EK

diff --git a/release/generated_freesite/index.html b/release/generated_freesite/index.html
new file mode 100644
diff --git a/release/index_template.html b/release/index_template.html
new file mode 100644
--- /dev/null
+++ b/release/index_template.html
@@ -0,0 +1,48 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<title>jfniki version: __HEAD__</title>
+</head>
+<body>
+<h4>jfniki version: __HEAD__</h4>
+<div>
+<code>jfniki plugin.jar</code>:<br/>
+<pre><code><a href="/__JAR_CHK__">__JAR_CHK__</a></code></pre><p/>
+<code>source code tarball:</code><br/>
+<pre><code><a href="/__SRC_CHK__">__SRC_CHK__</a></code></pre><p/>
+<a href="/USK@Gq-FBhpgvr11VGpapG~y0rGFOAHVfzyW1WoKGwK-fFw,MpzFUh5Rmw6N~aMKwm9h2Uk~6aTRhYaY0shXVotgBUc,AQACAAE/fniki/__INDEX_FNIKI__/HgInfocalypse">hg infocalpse</a> repo:<br/>
+<code>
+USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw,2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,AQACAAE/jfniki.R1/__INDEX_REPO__
+</code>
+</div>
+<div>
+<h4>release notes</h4>
+<pre><code>__RELEASE_NOTES__</code></pre>
+<h4>about jfniki</h4>
+<ul>
+  <li>The <a href = "/USK@UB0RPdoXvA61bfDbpvIVFyft1JiqUPhTLONHsWVGU0k,gFG9I3679g-1dUZvOorUuudr~JvSgRemmMdfiPxxcY8,AQACAAE/freenetdocwiki_mirror/__INDEX_FDW__/">Freenet Doc Wiki Mirror</a>
+has good documentation on jfniki. Activelink: <br/>
+  <img src = "/USK@UB0RPdoXvA61bfDbpvIVFyft1JiqUPhTLONHsWVGU0k,gFG9I3679g-1dUZvOorUuudr~JvSgRemmMdfiPxxcY8,AQACAAE/freenetdocwiki_mirror/__INDEX_FDW__/activelink.png" alt = "[FWDM activelink]"
+       width="108" height="36"/>
+  </li>
+  <li> The
+  <a href="/USK@Gq-FBhpgvr11VGpapG~y0rGFOAHVfzyW1WoKGwK-fFw,MpzFUh5Rmw6N~aMKwm9h2Uk~6aTRhYaY0shXVotgBUc,AQACAAE/fniki/__INDEX_FNIKI__/JfnikiProtoType">JfnikiProtoType</a> page in the old wikibot based wiki
+  has some technical info.
+  </li>
+  <li> author: <code>djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks</code> </li>
+</ul>
+</div>
+<div>
+<br/>
+<br/>
+<br/>
+</div>
+<div>
+<h4>site info</h4>
+<img src="activelink.png" alt="[jfniki releases activelink.png]" width="108" height="36"/>
+<p>
+<a href="/?newbookmark=__SITE_USK__&desc=jfniki releases">[bookmark!]</a>
+</div>
+</body>
+</html>
diff --git a/release/insert_files.py b/release/insert_files.py
new file mode 100644
--- /dev/null
+++ b/release/insert_files.py
@@ -0,0 +1,52 @@
+import time
+from fcpconnection import FCPConnection, PolledSocket
+from fcpclient import FCPClient
+
+def insert_files(host, port, names):
+    socket = PolledSocket(host, port)
+    connection = FCPConnection(socket, True)
+
+    inserts = []
+    for name in names:
+        client = FCPClient(connection)
+        client.in_params.default_fcp_params['MaxRetries'] = 3
+        client.in_params.default_fcp_params['PriorityClass'] = 1
+
+        inserts.append(client)
+        client.in_params.async = True
+        parts = name.split('/')
+
+        client.put_file('CHK@' + '/' + parts[-1], name)
+         # Hmmmm... Ugly. Add FCPConnection.wait_until_upload_finishes() ?
+        while connection.is_uploading():
+            socket.poll()
+            time.sleep(.25)
+
+    while min([insert.is_finished() for insert in inserts]) == False:
+        if not socket.poll():
+            break
+        time.sleep(.25)
+
+    uris = []
+    for insert in inserts:
+        if insert.response == None or len(insert.response) < 2 or insert.response[0] <> 'PutSuccessful':
+            uris.append(None)
+            continue
+
+        uris.append(insert.response[1]['URI'])
+
+    return uris
+
+FCP_HOST = '127.0.0.1'
+FCP_PORT = 19481
+FILES = ['/tmp/0.txt', '/tmp/1.txt', '/tmp/2.txt' ]
+
+def main():
+    uris = insert_files(FCP_HOST, FCP_PORT, FILES)
+    for name, uri in zip(FILES, uris):
+        print "---"
+        print name
+        print uri
+    print "---"
+
+#main()
diff --git a/release/minimalfms.py b/release/minimalfms.py
new file mode 100644
--- /dev/null
+++ b/release/minimalfms.py
@@ -0,0 +1,66 @@
+""" Code to send fms messages pillaged from hg infocalypse codebase.
+
+    Copyright (C) 2011 Darrell Karbott
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU General Public
+    License as published by the Free Software Foundation; either
+    version 2.0 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+    Author: djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks
+"""
+
+import nntplib
+import StringIO
+
+def get_connection(fms_host, fms_port, user_name):
+    """ Create an fms NNTP connection. """
+    return nntplib.NNTP(fms_host, fms_port, user_name)
+
+MSG_TEMPLATE = """From: %s
+Newsgroups: %s
+Subject: %s
+
+%s"""
+
+# Please use this function for good and not evil.
+def send_msgs(server, msg_tuples, send_quit=False):
+    """ Send messages via fms.
+    msg_tuple format is: (sender, group, subject, text, send_callback)
+
+    send_callback is optional.
+
+    If it is present and not None send_callback(message_tuple)
+    is invoked after each message is sent.
+
+    It is legal to include additional client specific fields.
+    """
+
+    for msg_tuple in msg_tuples:
+        raw_msg = MSG_TEMPLATE % (msg_tuple[0],
+                                  msg_tuple[1],
+                                  msg_tuple[2],
+                                  msg_tuple[3])
+
+        in_file = StringIO.StringIO(raw_msg)
+        try:
+            server.post(in_file)
+
+            if len(msg_tuple) > 4 and not msg_tuple[4] is None:
+                # Sent notifier
+                msg_tuple[4](msg_tuple)
+
+            if send_quit:
+                server.quit()
+        finally:
+            in_file.close()
+