Support for Freenet bookmarks and multiple bugfixes. Features: Added support for Freenet bookmarks via the [[BookMark]] wikitext macro. Allow users to view source of read only pages in piki.py. Fixes: Changed wikibot to force commit of uncommitted files before inserting freesite. Changed fcpclient.dir_datasource() to sort file_infos. The above 2 changes allow freesite versions to be cleanly re-inserted from the repo. Cleaned up page fork creation to prevent spurious forks. Changed wikibot to keep track of .zip CHK's and skip ones that have already been applied. Changed submission.py to use utf8_sha instead of new_sha in a few places. Added somewhat better exception and stdout messages from FMSStub.
diff --git a/fniki/piki.py b/fniki/piki.py --- a/fniki/piki.py +++ b/fniki/piki.py @@ -496,6 +496,39 @@ def _macro_RemoteChanges(): else: buf.write(reject_summary(entry, time_tuple)) return buf.getvalue() + + +def _macro_BookMark(): + # REDFLAG: Revisit. + # Config file is in the directory above the data_dir directory, + # so I don't want to depend on that while running. + # Used info.txt file from the head end instead. + + full_path = os.path.join(data_dir, 'info.txt') + try: + in_file = codecs.open(full_path, 'rb', 'ascii') + usk, desc, link_name = in_file.read().splitlines()[:3] + + except ValueError: + return "[BookMark macro failed: couldn't parse data from info.txt]" + except IOError: + return "[BookMark macro failed: couldn't read data from info.txt]" + except UnicodeError: + # REDFLAG: Untested code path. + return "[BookMark macro failed: illegal encoding in info.txt]" + + if (has_illegal_chars(usk) or + has_illegal_chars(desc) or + has_illegal_chars(link_name)): + return "[BookMark macro failed: illegal html characters in info.txt]" + + if not scrub_links: + return '<a href="%s">%s</a>' % (LINKS_DISABLED_PAGE, link_name) + + return ('<a href="/?newbookmark=%s&desc=%s">%s</a>' + % (usk, desc, link_name)) + + # ---------------------------------------------------------- # REDFLAG: faster way to do this? does it matter? @@ -638,7 +671,7 @@ class PageFormatter: + r"|(?P<pre>(\{\{\{|\}\}\}))" + r"|(?P<macro>\[\[(TitleSearch|FullSearch|WordIndex" + r"|TitleIndex|ActiveLink" - + r"|LocalChanges|RemoteChanges|GoTo)\]\])" + + r"|LocalChanges|RemoteChanges|BookMark|GoTo)\]\])" + r")") blank_re = re.compile("^\s*$") bullet_re = re.compile("^\s+\*") @@ -743,6 +776,8 @@ class Page: print '<hr>' if is_read_only(data_dir, self.page_name): print "<em>The bot owner has marked this page read only.</em>" + print (('<br><a href="?viewunmodifiedsource=%s">' % + self.page_name) + '[View page source]</a><br>') return if unmodified: diff --git a/infocalypse/devnotes.txt b/infocalypse/devnotes.txt --- a/infocalypse/devnotes.txt +++ b/infocalypse/devnotes.txt @@ -1,8 +1,22 @@ !!! experimental branch for testing wiki over hg idea !!! -See: -USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw,2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,AQACAAE/fniki_demo/3/ +See (note updated uri) +reenet:USK@Gq-FBhpgvr11VGpapG~y0rGFOAHVfzyW1WoKGwK-fFw,MpzFUh5Rmw6N~aMKwm9h2Uk~6aTRhYaY0shXVotgBUc,AQACAAE/fniki/2/ !!! +djk20100123 +Saw error reinserting fred staging mirror: +{4}:008c3b951f:(102, 117, 0):PutSuccessful +Exiting because of an IO error. +{3}:4e965d6bb1:(0, 32, 0):ProtocolError:('ProtocolError', {'CodeDescription': 'Socket closed'}) +{3}:24a632ebbe:(32, 102, 0):ProtocolError:('ProtocolError', {'CodeDescription': 'Socket closed'}) +{3}:ec68723c40:(-1, 0, 0):ProtocolError:('ProtocolError', {'CodeDescription': 'Socket closed'}) +FCP connection [CLOSED] +abort: No such file or directory: /home/dkarbott/infocalypse_tmp/_tmp_895966490831 + +Not reproducible. +Bug introduced by changes to wiki_hack branch? + + djk20091208 Failed attempt to DRY out: diff --git a/infocalypse/fcpclient.py b/infocalypse/fcpclient.py --- a/infocalypse/fcpclient.py +++ b/infocalypse/fcpclient.py @@ -80,7 +80,7 @@ def get_file_infos(directory, forced_mim if directory[-1] != os.path.sep: # Force trailing path separator. directory += os.path.sep - file_info = (directory, [], accept_regex) + file_info = (directory, [], accept_regex) #REDFLAG: bad variable name os.path.walk(directory, walk_visitor, file_info) return file_info[1] @@ -108,6 +108,19 @@ def set_index_file(file_infos, file_name file_infos.remove(index) file_infos.insert(0, index) +def sort_file_infos(file_infos): + """ Helper function forces file infos into a fixed order. + + Note: Doesn't move the first entry. + """ + + if len(file_infos) < 3: + return file_infos + rest = file_infos[1:] + rest.sort() + + return file_infos[:1] + rest + class FileInfoDataSource(IDataSource): """ IDataSource which concatenates files in a list of file infos into a contiguous data stream. @@ -705,6 +718,11 @@ def dir_data_source(file_infos, in_param raise ValueError("You can't set 'DefaultName' via " + "default_fcp_params.") + # IMPORTANT: Sort the file infos so that the same set of + # file_infos always yields the same inserted data blob. + # file_infos[0] isn't moved. + file_infos = sort_file_infos(file_infos) + files = {} index = 0 for info in file_infos: diff --git a/infocalypse/fmsstub.py b/infocalypse/fmsstub.py --- a/infocalypse/fmsstub.py +++ b/infocalypse/fmsstub.py @@ -73,16 +73,23 @@ class FMSStub: return NNTPStub() def send_msgs(self, dummy_server, msg_tuples, send_quit=False): + if not os.path.exists(self.base_dir): + print "FMSStub.send_msg -- THE MESSAGE SPOOL DIR DOESN'T EXIST!" + raise IOError("Message spool directory doesn't exist.") + for msg_tuple in msg_tuples: # HACK: use lut to map partial -> full fms ids. - #print "msg_tuple[0]: ", msg_tuple[0] - #print "sender_lut: ", self.sender_lut + # print "msg_tuple[0]: ", msg_tuple[0] + # print "sender_lut: ", self.sender_lut sender = self.sender_lut.get(msg_tuple[0].split('@')[0], msg_tuple[0]) print "sender: ", sender if sender != msg_tuple[0]: print "fmsstub: FIXED UP %s->%s" % (msg_tuple[0], sender) - assert sender.find('@') != -1 + + if sender.find('@') == -1: + raise IOError("Couldn't fixup fms_id: %s. Add it to the LUT." + % sender) full_path = os.path.join(self.base_dir, 'out_going_%s.txt' % make_id()) @@ -110,6 +117,11 @@ class FMSStub: #print name mod_time = os.stat(os.path.join(self.base_dir, name))[stat.ST_MTIME] + + if mod_time in by_mtime: + print "The msg ID hack in FMSStub failed!!!" + print "MANUALLY DELETE MSG FILE: ", name + assert not mod_time in by_mtime by_mtime[mod_time] = name diff --git a/infocalypse/run_wikibot.py b/infocalypse/run_wikibot.py --- a/infocalypse/run_wikibot.py +++ b/infocalypse/run_wikibot.py @@ -181,8 +181,13 @@ def run_wikibot(params): if 'MSG_SPOOL_DIR' in params: print "READING MESSAGES FROM SPOOL DIR INSTEAD OF FMS!" + # This table MUST map all short names to full fms_ids for + # all message senders. MUST contain the bot fms_id. + lut = {'djk':'djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks'} + assert params['FMS_ID'] in lut bot_runner.nntp = FMSStub(params['MSG_SPOOL_DIR'], - params['FMS_GROUP']) + params['FMS_GROUP'], + lut) # Install a single WikiBot instance. wiki_bot = WikiBot('wikibot_' + params['USK_HASH'], diff --git a/infocalypse/submission.py b/infocalypse/submission.py --- a/infocalypse/submission.py +++ b/infocalypse/submission.py @@ -1,3 +1,4 @@ +# DCI: Clean up pylint errors in this file. """ Functions to bundle and unbundle wiki submission zip files. Copyright (C) 2009 Darrell Karbott @@ -132,8 +133,8 @@ class SubmitError(Exception): class NoChangesError(SubmitError): """ Exception to indicate that there are no local changes to be submitted. """ - def __init__(self): - SubmitError. __init__(self, "No changes found." , False) + def __init__(self, is_illegal=False): + SubmitError. __init__(self, "No changes found." , is_illegal) def pack_info(version, submitter): """ INTERNAL: Validate and pack __INFO__ contents into 7-bit ASCII. """ @@ -349,12 +350,24 @@ def raise_if_not_merging(is_merging, msg def handle_conflict(head, full_path, name, bytes, updated_sha): """ INTERNAL: Helper to deal with conflicting merges. """ assert full_path.endswith(name) + assert utf8_sha(bytes).digest() == updated_sha + + prev_sha = EMPTY_FILE_SHA + if head.exists(full_path): + prev_sha = utf8_sha(head.read(full_path)).digest() + + if updated_sha == prev_sha: + # Catch case where we patched to the same final file + # starting from a different base rev. + return 3, name # Already applied + versioned_name = "%s_%s" % (name, hexlify(updated_sha)) # REDFLAG: LATER: explict hg copy to minimize repo size? head.write(os.path.join(os.path.split(full_path)[0], versioned_name), bytes, 'wb') - return versioned_name + + return 4, versioned_name def checked_read_delta(arch, name): """ INTERNAL: Read a raw delta from an archive.""" @@ -362,8 +375,7 @@ def checked_read_delta(arch, name): if len(raw_delta) < 40: raise SubmitError("<40 bytes: %s" % name, True) return raw_delta -# DCI: BUG: Don't fork if final version == current version. i.e. already applied -# bug from a different base version. + def forking_extract_wikitext(arch, overlay, head, name): """ Helper function used by merge_wikitext() to merge a single file. """ @@ -394,9 +406,11 @@ def forking_extract_wikitext(arch, overl # Create a versioned conflict file because the file the # submitter wants to create already exists in the repo. raw_file = unicode_apply_patch('', raw_delta, updated_sha, name) - # Wrote conflicting version. - return 4, handle_conflict(head, full_path, - name, raw_file, updated_sha) + + # Note: This nops (3) instead of forking if the new file + # matches the existing one. + return handle_conflict(head, full_path, + name, raw_file, updated_sha) raw_a = '' ret = 0 @@ -419,8 +433,8 @@ def forking_extract_wikitext(arch, overl # Patch against the SUBMITTER'S version! raw_file = unicode_apply_patch(overlay.read(full_path, 'rb'), raw_delta, updated_sha, name) - return 4, handle_conflict(head, full_path, - name, raw_file, updated_sha) + return handle_conflict(head, full_path, + name, raw_file, updated_sha) raw_a = overlay.read(full_path, 'rb') tmp_sha = utf8_sha(raw_a).digest() @@ -435,8 +449,8 @@ def forking_extract_wikitext(arch, overl # submitter wants to modify already was modified in the repo. # Patch against the SUBMITTER'S version! raw_file = unicode_apply_patch(raw_a, raw_delta, updated_sha, name) - return 4, handle_conflict(head, full_path, name, - raw_file, updated_sha) + return handle_conflict(head, full_path, name, + raw_file, updated_sha) if tmp_sha == updated_sha: return 3, name # Already patched. @@ -640,6 +654,10 @@ class OverlayHasher: def check_merges(submitted_pages, all_pages, hexdigest_func): """ INTERNAL: Raises a SubmitError if the merge constraints aren't met. """ + + if len(submitted_pages) == 0: + raise NoChangesError(True) + #print "SUBMITTED_PAGES: ", submitted_pages conflicts = conflict_table(all_pages) resolved = conflict_table(submitted_pages) @@ -738,7 +756,8 @@ class ForkingSubmissionHandler: def apply_submission(self, msg_id, submission_tuple, raw_zip_bytes, tmp_file): """ Apply a submission zip bundle. """ - code = REJECT_CONFLICT + # REJECT_CONFLICT isn't used anymore. We just fork. + code = REJECT_ILLEGAL try: self.commit_results(msg_id, submission_tuple, merge_wikitext(self.ui_, @@ -749,6 +768,13 @@ class ForkingSubmissionHandler: raw_zip_bytes))) return True + except NoChangesError, err: + self.logger.debug("apply_submission -- no changes, illegal: %s" % + str(err.illegal)) + if not err.illegal: + # i.e. zip contained legal changes that were already applied. + code = REJECT_APPLIED + except SubmitError, err: self.logger.debug("apply_submission -- err: %s" % str(err)) @@ -762,7 +788,7 @@ class ForkingSubmissionHandler: str(submission_tuple)) raise # DCI self.update_change_log(msg_id, submission_tuple, - code, False) + code, False) return False # Sets needs commit on failure, but not success. Hmmm... @@ -859,7 +885,14 @@ class ForkingSubmissionHandler: # IMPLIES SUCCESS. def commit_results(self, msg_id, submission_tuple, results): """ INTERNAL: Commit the results of a submission to the local repo. """ - assert len(results[3]) == 0 + + print "RESULTS: ", results + if len(results[3]) > 0 and sum([len(results[index]) for index in + (0, 1, 2, 4)]) == 0: #HACK, fix order! + raise NoChangesError() + + assert sum([len(results[index]) for index in (0, 1, 2, 4)]) > 0 + wikitext_dir = os.path.join(self.full_base_path(), 'wikitext') raised = True # grrr pylint gives spurious @@ -902,12 +935,13 @@ class ForkingSubmissionHandler: self.logger.debug("commit_results -- popped log:\n%s" % text) - def force_commit(self): + def force_commit(self, msg='F', notify=True): # F -> Failed """ Force a commit to the repository after failure. """ self.logger.trace("force_commit -- Commit local changes " + "after failure.") commands.commit(self.ui_, self.repo, logfile=None, addremove=None, user=None, date=None, - message='F') # Must have failed. - self.notify_committed(False) + message=msg) + if notify: + self.notify_committed(False) diff --git a/infocalypse/test_merging.py b/infocalypse/test_merging.py --- a/infocalypse/test_merging.py +++ b/infocalypse/test_merging.py @@ -158,6 +158,13 @@ def committed(result): DEFAULT_WIKI_ROOT = 'wiki_root' DEFAULT_SUBMITTER = 'freenetizen@this_is_not_a_real_fms_id' +# hard coded path assumptions +def has_forks(overlay): + for name in overlay.list_pages(os.path.join(overlay.base_path, 'wikitext')): + if name.find('_') != -1: + return True + return False + class NoConflictTests(RepoTests): def testrepo(self): repo = self.make_repo('foobar') @@ -404,6 +411,157 @@ class NoConflictTests(RepoTests): + def test_nop_modify_file(self): + # setup the server repository + server_repo = self.make_repo('server') + original_page_bytes = 'This the default front page.\n' + final_page_bytes = 'This the final front page.\n' + self.commit_revision(server_repo, + (('wiki_root/wikitext/FrontPage', + original_page_bytes),), + 'Initial checkin of server repo.') + + self.commit_revision(server_repo, + (('wiki_root/wikitext/FrontPage', + final_page_bytes),), + 'Second commit of server repo.') + + # pull the client repo + client_repo = self.clone_repo(server_repo, 'client', '0') + + # get a write overlay for the client repo + overlay = self.get_write_overlay(client_repo) + + page_path = 'wiki_root/wikitext/FrontPage' + page_bytes = final_page_bytes + + # write the updated file into it. + overlay.write(os.path.join(client_repo.root, + page_path), + page_bytes) + # make a submission bundle + raw_zip_bytes = self.make_submission_zip(client_repo) + + #(fms_id, usk_hash, base_version, chk, length) + msg_id = 'fake_msg_id_000' + submission_tuple = (DEFAULT_SUBMITTER, + '000000000000', + hex_version(server_repo)[:12], + 'CHK@fakechk', + len(raw_zip_bytes)) + + server_overlay = self.get_hg_overlay(server_repo) + server_overlay.version = hex_version(server_repo) # tip + server_page_path = os.path.join(server_repo.root, page_path) + + self.assertTrue(server_overlay.exists(server_page_path)) + self.assertTrue(server_overlay.read(server_page_path) == + final_page_bytes) + + # apply the submission bundle to the server repo + self.get_applier(server_repo).apply_submission(msg_id, + submission_tuple, + raw_zip_bytes, + os.path.join( + self.tmp_dir, + '_tmp__applying')) + self.assertTrue(server_overlay.exists(server_page_path)) + self.assertTrue(server_overlay.read(server_page_path) == + final_page_bytes) + + server_overlay.version = hex_version(server_repo) # new tip + self.assertTrue(server_overlay.exists(server_page_path)) + + # check that the versions are the same + self.assertTrue(server_overlay.read(server_page_path) == + final_page_bytes) + self.assertTrue(not has_forks(server_overlay)) + + def test_partial_nop_apply_file(self): + # setup the server repository + server_repo = self.make_repo('server') + original_page_bytes = 'This the default front page.\n' + final_page_bytes = 'This the final front page.\n' + self.commit_revision(server_repo, + (('wiki_root/wikitext/FrontPage', + original_page_bytes),), + 'Initial checkin of server repo.') + + self.commit_revision(server_repo, + (('wiki_root/wikitext/FrontPage', + final_page_bytes),), + 'Second commit of server repo.') + + # pull the client repo + client_repo = self.clone_repo(server_repo, 'client', '0') + + # get a write overlay for the client repo + overlay = self.get_write_overlay(client_repo) + + page_path = 'wiki_root/wikitext/FrontPage' + page_bytes = final_page_bytes + + new_page_path = 'wiki_root/wikitext/NewPage' + new_page_bytes = 'this is a new page\n' + + # write the updated file into it. + overlay.write(os.path.join(client_repo.root, + page_path), + page_bytes) + + # write a new page + overlay.write(os.path.join(client_repo.root, + new_page_path), + new_page_bytes) + + + # make a submission bundle + raw_zip_bytes = self.make_submission_zip(client_repo) + + #(fms_id, usk_hash, base_version, chk, length) + msg_id = 'fake_msg_id_000' + submission_tuple = (DEFAULT_SUBMITTER, + '000000000000', + hex_version(server_repo)[:12], + 'CHK@fakechk', + len(raw_zip_bytes)) + + server_overlay = self.get_hg_overlay(server_repo) + server_overlay.version = hex_version(server_repo) # tip + server_page_path = os.path.join(server_repo.root, page_path) + server_new_page_path = os.path.join(server_repo.root, + new_page_path) + + self.assertTrue(server_overlay.exists(server_page_path)) + self.assertTrue(server_overlay.read(server_page_path) == + final_page_bytes) + self.assertTrue(not server_overlay.exists(server_new_page_path)) + + # apply the submission bundle to the server repo + self.get_applier(server_repo).apply_submission(msg_id, + submission_tuple, + raw_zip_bytes, + os.path.join( + self.tmp_dir, + '_tmp__applying')) + self.assertTrue(server_overlay.exists(server_page_path)) + self.assertTrue(server_overlay.read(server_page_path) == + final_page_bytes) + self.assertTrue(not server_overlay.exists(server_new_page_path)) + + server_overlay.version = hex_version(server_repo) # new tip + self.assertTrue(server_overlay.exists(server_page_path)) + + # check that the versions are the same + self.assertTrue(server_overlay.read(server_page_path) == + final_page_bytes) + + self.assertTrue(server_overlay.exists(server_new_page_path)) + self.assertTrue(server_overlay.read(server_new_page_path) == + new_page_bytes) + + self.assertTrue(not has_forks(server_overlay)) + class ConflictTests(RepoTests): ############################################################ # Smoketest create, remove, modify with conflict @@ -420,6 +578,10 @@ class ConflictTests(RepoTests): if new_sha(overlay.read(versioned_path)).hexdigest() != sha_value: print "SHA FAILS: ", versioned_path self.assertTrue(False) + + # quick and dirty test for has forks + self.assertTrue(has_forks(overlay)) + return True def test_create_file_conflict(self): @@ -443,13 +605,14 @@ class ConflictTests(RepoTests): overlay = self.get_write_overlay(client_repo) page_path = 'wiki_root/wikitext/NewPage' + client_page_path = os.path.join(client_repo.root, page_path) page_bytes = 'Conflicting client side changes.\n\n' + self.assertTrue(not overlay.exists(client_page_path)) # write a new file into it. - overlay.write(os.path.join(client_repo.root, - page_path), - page_bytes) + overlay.write(client_page_path, page_bytes) # make a submission bundle + self.assertTrue(overlay.exists(client_page_path)) raw_zip_bytes = self.make_submission_zip(client_repo) #(fms_id, usk_hash, base_version, chk, length) diff --git a/infocalypse/wikibot.py b/infocalypse/wikibot.py --- a/infocalypse/wikibot.py +++ b/infocalypse/wikibot.py @@ -36,7 +36,8 @@ from validate import is_hex_string from chk import ENCODED_CHK_SIZE from fms import TrustCache, to_msg_string from fmsbot import FMSBot -from submission import ForkingSubmissionHandler, REJECT_NOTRUST, REJECT_FCPFAIL +from submission import ForkingSubmissionHandler, \ + REJECT_NOTRUST, REJECT_FCPFAIL, REJECT_APPLIED from bundlecache import BundleCache, is_writable, make_temp_file from updatesm import UpdateContext, UpdateStateMachine, QUIESCENT, FINISHING @@ -249,6 +250,7 @@ class WikiBot(FMSBot, RequestQueue): self._send_update_notification() if not self.update_sm is None: + # Wait until fn-push or freesite insert finishes. return # DCI: Is this working as expected? @@ -268,6 +270,16 @@ class WikiBot(FMSBot, RequestQueue): if self.ctx.should_insert_site(): self.trace("Starting freesite insertion.") + + # LATER: fix freesite insert to insert from hg instead of + # local file system. + # HACK: Force commit, so that we can re-insert reliably. + # Tricky, False keeps applier from calling the + # ctx.commited callback. i.e. so we don't force + # a second fn-push. + # S = Site insert, force commit, spurious commit ok. + self.applier.force_commit('S', False) + self.ctx.clear_timeout('COMMIT_COALESCE_SECS') self._start_freesite_insert() # Handle a single message @@ -285,15 +297,22 @@ class WikiBot(FMSBot, RequestQueue): self.trace("recv_fms_msg -- couldn't parse submission: %s" % msg_id) return + self.trace("recv_fms_msg -- parsed: %s" % str(submission)) + if not self._has_enough_trust(msg_id, submission, self.params['NONE_TRUST']): self.trace("recv_fms_msg -- not enough trust: %s" % msg_id) return - self.trace("recv_fms_msg -- parsed: %s" % str(submission)) + # Skip CHK's which have already been applied + if self._already_applied(msg_id, submission): + self.debug("recv_fms_msg -- skipping, already applied CHK") + self.trace("recv_fms_msg -- %s" % submission[3]) + return + # Will get picked up by next_runnable. self.ctx.queue_submission(msg_id, submission) - # Will get picked up by next_runnable. + #----------------------------------------------------------# def _cleanup_temp_files(self): @@ -311,6 +330,22 @@ class WikiBot(FMSBot, RequestQueue): self.update_sm.cancel() self.update_sm.ctx.bundle_cache.remove_files() + # I keep a separate db instead of using accepted.txt and + # rejected.txt because those are only for UI. i.e. can + # be pruned. may not be compleat. + def _already_applied(self, msg_id, submission): + """ Return True and write failure message into rejected.txt if + the CHK has already been applied, False otherwise. """ + + if not submission[3] in self.ctx.store_applied_requests: + return False + + # Store timestamp? how will I prune these values? + self.ctx.store_applied_requests[submission[3]] = '' + self.applier.update_change_log(msg_id, submission, + REJECT_APPLIED, False) + return True + def _has_enough_trust(self, msg_id, submission, none_trust=0): """ INTERNAL: Returns True if the sender is trusted enough to commit to the wiki. diff --git a/infocalypse/wikibotctx.py b/infocalypse/wikibotctx.py --- a/infocalypse/wikibotctx.py +++ b/infocalypse/wikibotctx.py @@ -90,6 +90,8 @@ class WikiBotContext: self.store_handled_ids.sync() if not self.store_running_requests is None: self.store_running_requests.sync() + if not self.store_applied_requests is None: + self.store_applied_requests.sync() def setup_dbs(self, params): """ Initialize the databases used for persistent storage. """ @@ -106,7 +108,9 @@ class WikiBotContext: self.store_running_requests = shelve.open( self.parent.parent.get_path(self.parent, 'store_running_requests')) - + self.store_applied_requests = shelve.open( + self.parent.parent.get_path(self.parent, + 'store_applied_requests')) self.store_info = shelve.open( self.parent.parent.get_path(self.parent, 'store_info')) @@ -147,6 +151,8 @@ class WikiBotContext: self.store_handled_ids.close() if not self.store_running_requests is None: self.store_running_requests.close() + if not self.store_applied_requests is None: + self.store_applied_requests.close() if not self.store_info is None: self.store_info.close() @@ -245,3 +251,15 @@ class WikiBotContext: return (len(self.store_running_requests['running']) > 0 or len(self.store_running_requests['queued']) > 0) +# REDFLAG: revisit during code cleanup +# pylint error about too many public methods. grrrr... +# def already_applied(self, submission): +# """ Return True if the submissions CHK has already been applied, +# False otherwise. + +# SIDE EFFECT: Adds submission's CHK to the applied list. +# """ +# chk = submission[3] +# if chk in self.store_applied_requests: +# return True +# self.store_applied_requests[chk] = '' # Dummy value.