User:Robert Ullmann/Rat Patrol
intro
[edit]This is a patrol program for the English Wiktionary.
It must be run from a sysop/admin account.
It requires Python + the Python Wikipedia framework. It also requires Tkinter, which usually comes with Python.
Updated September 2009, re-written version.
code
[edit]copy everything after the source tag to the end of the page, save as ratpatrol.py in the pywikipedia directory/folder
<source lang="python">
- !/usr/bin/python
- -*- coding: utf-8 -*-
""" this application patrols the en.wikt
requires some version of the "pywikipedia framework", preferably recent, but need not be updated all the time (;-), most code is within this file
requires Python 2.6
requires Tkinter (which you should have with Python)
in user-config.py, make sure one line reads:
sysopnames['wiktionary']['en'] = "(your username)"
then run! will exit more cleanly if you only use the "quit" button, rather than closing the window
the "console" (on Windoze) or the command shell you started from will display lots of log messages to tell you what it is doing
Buttons:
Edit page -- open a new tab (or window) in default browser in page edit Show diffs -- open a new tab etc showing diffs for revision Skip user -- skip edits by this user for 24 hours Whitelist -- mark all edits seen by by this user for 24 hours (new or old) Skip -- skip this edit for 72 hours (someone else ought to look at it) Mark -- mark this edit patrolled (;-)
large edits won't fit in the differences boxes, use the "show diffs" button to see more the revision list at the bottom will often tell you if a later editor fixed/reverted a bad edit, which you can then mark
"""
import wikipedia import sys import re import pickle import time import xmlreader import socket from math import floor from Tkinter import * import Queue import threading from webbrowser import open_new_tab from difflib import ndiff, unified_diff import urllib import config
def srep(s):
return repr(u+s)[2:-1]
plock = threading.Lock()
def log(s):
with plock: print srep(s)
- slightly odd string syntax in the following is so that it can be saved in wikitext
def unescape(s):
if '&' not in s: return s s = s.replace("&" "lt;", "<") s = s.replace("&" "gt;", ">") s = s.replace("&" "apos;", "'") s = s.replace("&" "quot;", '"') s = s.replace("&" "#039;", "'") s = s.replace("&" "amp;", "&") # Must be last return s
site = None Quit = False
- ------------------------------------------------------------------------------------------------
class Task():
def __init__(t, revid = , pid = , title = , user = , oldid = , rcid = , ts = , summary = ): t.revid = revid t.pid = pid t.title = title t.urlname = urllib.quote(title.encode(site.encoding())) t.user = user t.oldid = oldid t.rcid = rcid t.ts = ts t.summary = summary t.allrevs = None t.oldlines = t.newlines = t.revlines = t.reason = t.done = False
def __cmp__(s, o): # inverse, decreasing [ temp back to increase ] if s.ts < o.ts: return -1 if s.ts > o.ts: return 1 return 0
- timeout set: a set that elements magically disappear from after a time
- [may need to add thread locks]
from weakref import WeakValueDictionary from heapq import heappush, heappop
class tmo(float): pass
class timeoutset():
def __init__(s, timeout): s.timeout = timeout s.wdict = WeakValueDictionary() s.theap = []
def add(s, key): t = tmo(time.clock()) s.wdict[key] = t heappush(s.theap, t)
def __contains__(s, key): while s.theap and s.theap[0] < time.clock() - s.timeout: heappop(s.theap) return key in s.wdict
def __len__(s): return len(s.theap)
- all active tasks, not in skip list, key is revid
active = WeakValueDictionary()
- tasks skipped for a few days, values are revids
skipped = timeoutset(96 * 3600)
- whitelisted users
whitelist = timeoutset(24 * 3600)
- users to skip for a while
skipusers = timeoutset(24 * 3600)
- queues: tasks, ready to present, ready to mark
tasq = Queue.PriorityQueue() readyq = Queue.Queue() patrolq = Queue.Queue()
- prompt "queue": one task rechecked and ready to display
promptq = Queue.Queue(1)
- check a task to see where it should go, used at several steps
def checktask(task):
if task.done: return None # drops task from active presumably
if task.user in whitelist: task.reason = 'user ' + task.user + ' in whitelist' patrolq.put(task) return None
if task.title in whitelistpages: task.reason = 'page in whitelist' patrolq.put(task) return None
if task.user in skipusers: skipped.add(task.revid) stat("Skipped", len(skipped)) return None
return task # next step as normal
- fairly static list of white listed pages (some also annoying to load lots of revs, very big ;-)
whitelistpages = [ 'Wiktionary:Requests for verification',
'Wiktionary:Requests for deletion', 'Wiktionary:Requests for cleanup', 'Wiktionary:Tea room', 'Wiktionary:Beer parlour', 'Wiktionary:Grease pit', 'Wiktionary:Requested entries:English', 'Wiktionary:Requested entries:Spanish', 'Wiktionary:List of protologisms', 'Wiktionary:Translation requests', 'Wiktionary:Featured word candidates', 'Wiktionary:Feedback', 'Wiktionary:Information desk', 'Wiktionary:Sandbox', 'Wiktionary talk:Sandbox' ]
- ------------------------------------------------------------------------------------------------
- mwapi interface, a few mods here
from StringIO import StringIO from gzip import GzipFile
- first, our own read url routine, so we can accept gzip, and be much faster:
class MyURLopener(urllib.FancyURLopener):
version="PythonWikipediaBot/1.0"
- since we aren't using the framework 'throttle', do something better
- this is a "tick-tock" timer, shared on all threads
- clicked down each success, up on each network failure of any type
ticktocklock = threading.Lock() ticktock = 1.0 def getticktock():
global ticktock return ticktock
relagged = re.compile(r'<error.*"maxlag".* (\d+) seconds')
def readapi(site, request, sysop = True, nomaxlag = False):
global ticktocklock, ticktock
url = "http://" + site.hostname() + "/w/api.php?" + request
done = False nap = 5 maxl = 5 maxlag = "&maxlag=%d" % maxl if nomaxlag: maxlag =
with ticktocklock: ticktock *= 0.95 # is -0.025 if 5 seconds, -1.0 at 20 seconds ticktock = max(ticktock, 0.1) ticktock = min(ticktock, 20.0) if ticktock >= 10.0: with plock: print "(mwapi readapi: tick tock is %.1f)" % ticktock time.sleep(ticktock) ticktock -= 1.0 # undo first increment in loop
while not done: ticktock += 1.0 # done w/o lock, race condition is rare, not a serious problem, ignored! try: uo = MyURLopener() uo.addheader('Cookie', site.cookies(sysop = sysop) or ) uo.addheader('Accept-Encoding', 'gzip') f = uo.open(url + maxlag) text = f.read() try: if 'gzip' in f.info()['Content-Encoding']: text = GzipFile(fileobj=StringIO(text)).read() except KeyError: pass text = unicode(text, 'UTF-8' , errors = 'ignore') done = True except Exception, e: """ report all errors for now: if '10054' in repr(e) and nap < 15: time.sleep(nap) continue # quietly """ with plock: print "(%s: exception reading API: %s)" % (threading.currentThread().name, repr(e)) text = time.sleep(nap) nap = min(nap + nap/2, 300) continue
# following is specific to the net I am on; won't break anything for anyone else # so don't worry about it; but you can remove this block if desired if '<api' not in text and 'NdxICC' in text: # silently ignore bad return from Nomadix box time.sleep(5) done = False continue
# use MW "server lag" feature to slow when servers are under heavy load: mo = relagged.search(text) if mo: replag = int(mo.group(1)) with plock: print "(%s: server lagged %s seconds)" % \ (threading.currentThread().name, replag) # allow more lag the next time maxl += max(maxl/4, replag/20) maxlag = "&maxlag=%d" % maxl # make some progress even when server crocked ... if maxl > 600: maxlag = "" if maxlag and maxl > 60: with plock: print "(mwapi readapi: next with %s)" % maxlag # sleep replag if not more than 70 time.sleep(min(replag, 70)) done = False continue
return text
- ------------------------------------------------------------------------------------------------
- recent changes, stuff into task queue
def readrc():
# keep track of recently seen revids, they can appear to be un-patrolled in the reply # but we've just now done them ... seen = timeoutset(7200)
# use regex. this will break sometimes if API is changed, so we will fix it (:-)
rerev = re.compile(r'<rc [^>]*title="([^"]*)" rcid="([^"]*)" ' + r'pageid="([^"]*)" revid="([^"]*)" old_revid="([^"]*)" user="([^"]*)"' + r'[^>]*timestamp="([^"]*)" comment="([^"]*)"')
restar = re.compile(r'rcstart="([^"]*)"')
nap = 0 rcstart = # start at maxage # temp: # rcstart = "&rcstart=2009-09-19T00:00:00Z" lastr = time.time()
while not Quit: nf = 0
with plock: print "reading rc" rcs = readapi(site, "action=query&list=recentchanges&rcprop=title|ids|user|timestamp|comment" + "&rclimit=max&rcshow=!patrolled&rcdir=newer&format=xml" + rcstart)
for mo in rerev.finditer(rcs): title = unescape(mo.group(1)) rcid = mo.group(2) pid = mo.group(3) revid = mo.group(4) oldid = mo.group(5) if oldid == "0": oldid = "" user = unescape(mo.group(6)) ts = mo.group(7) summary = unescape(mo.group(8))
if revid in active: continue if revid in skipped: continue if revid in seen: continue seen.add(revid)
# probably a good task nf += 1 task = Task(title=title, user=user, revid=revid, oldid=oldid, rcid=rcid, pid=pid, ts=ts, summary=summary) active[revid] = task stat("Unpatrolled", len(active))
task = checktask(task) if task: tasq.put(task)
task = None
# next timestamp forward (not given when we catch up to current) mo = restar.search(rcs) if mo: rcstart = "&rcstart=" + mo.group(1) else: # 40 minutes ago (must be >> max naptime) rcstart = '&rcstart=' + time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(time.time() - 2400))
# housekeeping (things that otherwise don't get updated when timed out) stat("Whitelist users", len(whitelist)) stat("Skipped users", len(skipusers)) stat("Unpatrolled", len(active))
# every few hours or so, start at beginning again (pick up skips, anything missed) if lastr < time.time() - (4 * 3600): rcstart = lastr = time.time()
# nap time ... if nf > 2: nap /= 2 else: nap = min(nap+20, 350) with plock: print "rc found", nf, "next in", nap for i in range(nap/5): time.sleep(5) if Quit: break
with plock: print "recent changes thread ends"
- ------------------------------------------------------------------------------------------------
- read patrol log
def readpl():
markedothers = 0
repat = re.compile(r'<item [^>]*title="([^"]*)"[^>]*user="([^"]*)"[^>]*>' + r'\s*<patrol[^>]*prev="([^"]*)" cur="([^"]*)"')
restar = re.compile(r'lestart="([^"]*)"')
# start a while ago lestart = '&lestart=' + time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(time.time() - 2400))
nap = 10
while not Quit:
nf = 0
if len(active): with plock: print "reading patrol log" pats = readapi(site, "action=query&list=logevents&letype=patrol&lelimit=max&format=xml" + "&ledir=newer" + lestart) else: pats = # little point, eh? sleep some more ...
for mo in repat.finditer(pats): title = unescape(mo.group(1)) user = unescape(mo.group(2)) prev = mo.group(3) # oldid cur = mo.group(4) # revid
task = None if cur in active: try: task = active[cur] except KeyError: pass # race with GC if not task or task.done: continue
with plock: print "rev %s of %s patrolled by %s" % (srep(cur), srep(title), srep(user)) task.done = True # we have no idea where it is ... (:-) markedothers += 1 stat("Marked by others", markedothers) nf += 1
task = None
# next timestamp forward (not given when we catch up to current) mo = restar.search(pats) if mo: lestart = "&lestart=" + mo.group(1) else: # 40 minutes ago (must be >> max naptime) lestart = '&lestart=' + time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(time.time() - 2400))
if nf: nap /= 2 else: nap = min(nap+20, 700) for i in range(0, nap/5): if Quit: break time.sleep(5)
with plock: print "read patrol log thread ends"
- ------------------------------------------------------------------------------------------------
- mark edits
def patrol():
markedbyme = 0 whitelisted = 0
while not Quit: try: task = patrolq.get(timeout=20) except Queue.Empty: continue
if task.done: task = None continue # can be marked and patrolled by someone else # set here, race with read log, that often picks it before we get reply! task.done = True
resp = site.getUrl("/w/index.php?title=%s&action=markpatrolled&rcid=%s" % \ (task.urlname, task.rcid), sysop = True)
if "Marked as patrolled" not in resp: log("failed to patrol %s of %s" % (task.rcid, task.title)) for line in resp.splitlines(): print srep(line) task = None time.sleep(30) # will presumably pick it up again eventually, if not marked by another # if it did in fact succeed, no matter ... continue
if not task.reason: markedbyme += 1 stat("Marked by me", markedbyme) with plock: print "patrolled %s of %s, my mark" % (srep(task.revid), srep(task.title)) else: # presume was marked by whitelisting user or page whitelisted += 1 stat("Whitelisted", whitelisted) with plock: print "patrolled %s of %s, %s" % \ (srep(task.revid), srep(task.title), srep(task.reason))
task = None stat("Unpatrolled", len(active))
time.sleep(5) # no hurry
with plock: print "patrol thread ends"
- ------------------------------------------------------------------------------------------------
- preload, read from task q, write to ready q
def preload():
rever = re.compile(r'<rev revid="([^"]*)"[^>]*user="([^"]*)"' + r'[^>]*timestamp="([^"]*)"\s*(comment="[^"]*"|)[^>]*>(.*?)</rev>', re.S)
# cache of previously read revisions, kept as long as some other task has them # key is title, value is other task (can't just be unicode string, must be object) revcache = WeakValueDictionary()
while not Quit: try: task = tasq.get(timeout=20) except Queue.Empty: continue task = checktask(task) if not task: continue
log('preload for ' + task.title)
# see if we have revs already
revs = None if task.title in revcache: try: ot = revcache[task.title] revs = ot.allrevs # from other task ot = None except KeyError: pass
if not revs: with plock: print "reading 20 revs for", srep(task.title) revs = readapi(site, "action=query&prop=revisions|info&rvprop=timestamp|user|comment|content|ids&format=xml" "&titles=" + task.urlname + "&rvlimit=20")
# now we want to see if we have enough; maybe not, and maybe stale if 'revid="' + task.revid + '"' not in revs: revs = if task.oldid and 'revid="' + task.oldid + '"' not in revs: revs =
# if not, do it again at 200 if not revs: with plock: print "reading 200 revs for", srep(task.title) revs = readapi(site, "action=query&prop=revisions|info&rvprop=timestamp|user|comment|content|ids&format=xml" "&titles=" + task.urlname + "&rvlimit=200")
# check again! if 'revid="' + task.revid + '"' not in revs: revs = if task.oldid and 'revid="' + task.oldid + '"' not in revs: revs =
if not revs: # can happen on page deletes, moves, stuff with plock: "can't find needed old revs for %s, skipping task!" % srep(task.title) skipped.add(task.revid) stat("Skipped", len(skipped)) task = None stat("Unpatrolled", len(active)) continue
task.allrevs = revs revcache[task.title] = task # now available for other tasks on same title
# now find the revs we want, and make a list ... oldrevtext = newrevtext = replinelist = [ ]
for mo in rever.finditer(revs): revid = mo.group(1) if revid == task.revid: mark = '*' else: mark = ' ' user = unescape(mo.group(2)) ts = mo.group(3) ts = ts.replace('T', ' ') ts = ts.replace('Z', ' ') comment = unescape(mo.group(4)[9:-1]) text = unescape(mo.group(5))
if len(replinelist) < 10: replinelist.append("%s %s %s: (%s)" % (ts, mark, user, comment)) # with plock: print "debug match rev", srep(ts), srep(user), srep(comment) elif revid == task.revid: # add to list with ellipsis (;-) replinelist = replinelist[:-2] replinelist.append(" [ ... ]") replinelist.append("%s %s %s: (%s)" % (ts, mark, user, comment))
if revid == task.revid: newrevtext = text if revid == task.oldid: oldrevtext = text
# should have always been found? if not newrevtext: with plock: print "what? can't match new revtext in revs?" continue # discard task?
task.revlines = u'\n'.join(replinelist)
# differences
""" old for delta in ndiff(oldrevtext.splitlines(), newrevtext.splitlines()): delta = unescape(delta) if delta.startswith('- '): task.oldlines += delta[2:] + '\n' elif delta.startswith('+ '): task.newlines += delta[2:] + '\n' # ignore ' ' and '? ' lines, might do something with context later? """ for delta in unified_diff(oldrevtext.splitlines(), newrevtext.splitlines(), n=1): delta = unescape(delta) if delta.startswith('--'): pass elif delta.startswith('++'): pass elif delta.startswith('-'): task.oldlines += delta[1:] + '\n' elif delta.startswith('+'): task.newlines += delta[1:] + '\n' elif delta.startswith(' '): task.oldlines += delta[1:] + '\n' task.newlines += delta[1:] + '\n' # ignore other lines
readyq.put(task) task = None
nap = readyq.qsize() / 20 for i in range(nap/5): time.sleep(5) if Quit: break
with plock: print "preload thread ends"
- ------------------------------------------------------------------------------------------------
- recheck task, just before presenting to user
- reverted edits don't show as patrolled in the log
- and page may be (often is) deleted
- so we look for one row in the RC table to tell us if the edit is still there
- has to run as a thread or it hangs the UI
- takes a task off of the readyq and stuffs it in the prompt "queue" (placeholder for one entry)
- note we can't select on rcid (which would be cool, and obvious), so we look for everything
- with the exact timestamp
reclines = re.compile(r'<rc[^>]*rcid="(\d*)"[^>]*>')
def recheck():
while not Quit:
# get a new task:
try: task = readyq.get_nowait() except Queue.Empty: time.sleep(5) continue
task = checktask(task) if not task or task.done: task = None continue
rcs = readapi(site, "action=query&list=recentchanges&rcprop=ids|patrolled" + "&rclimit=20&format=xml&rcstart=%s&rcend=%s" % (task.ts, task.ts), nomaxlag = True)
# print srep(rcs)
for mo in reclines.finditer(rcs): if mo.group(1) != task.rcid: continue if "patrolled" in mo.group(0): with plock: print "rev %s of %s patrolled by someone" % (srep(task.revid), srep(task.title)) task = None else: promptq.put(task) task = None while promptq.qsize() > 0: time.sleep(1) # wait for it to be eaten break
# not found? if task: with plock: print "rev %s of %s apparently deleted" % (srep(task.revid), srep(task.title)) task = None continue
with plock: print "recheck thread ends"
- ------------------------------------------------------------------------------------------------
- now the tkinter stuff:
- could do a fancy class with attributes and methods, but instead keep it simple, just share some stuff
root = None status = None statboxes = { }
tkmessages = Queue.Queue()
def stat(n, v): tkmessages.put( (n, v) )
- messages to update stats from other threads
def tkmess():
while tkmessages.qsize(): try: lab, val = tkmessages.get() statboxes[lab].config(text=val) statboxes[lab].update_idletasks()
except Queue.Empty: pass
root.after(200, tkmess)
- oldest (?) unpatrolled page data
- shared things, just easier this way, all belong to tk run thread
oldboxes = { } oldedit = None # current task being presented oldlines = None newlines = None revlines = None showdiffb = None oldlineslabel = None newlineslabel = None
def get_next_oldpage():
global oldboxes, oldedit global showdiffb, oldlineslabel, newlineslabel
oldedit = None
oldboxes['Title'].config(text=) oldboxes['User'].config(text=) oldboxes['Summary'].config(text=) oldlines.config(text=) newlines.config(text=) revlines.config(text=)
try: oldedit = promptq.get_nowait() except Queue.Empty: root.after(5000, get_next_oldpage) return
oldedit = checktask(oldedit) if not oldedit: # recall immediately: root.after(20, get_next_oldpage) return if oldedit.oldid: oldlineslabel.config(text='Old lines') newlineslabel.config(text='New lines') showdiffb.config(text='Show diffs') else: oldlineslabel.config(text=) newlineslabel.config(text='Page text') showdiffb.config(text='Show page')
oldboxes['Title'].config(text=oldedit.title) oldboxes['User'].config(text=oldedit.user) oldboxes['Summary'].config(text=oldedit.summary) oldlines.config(text=oldedit.oldlines) newlines.config(text=oldedit.newlines) revlines.config(text=oldedit.revlines) newlines.update_idletasks()
return
def mark_edit():
if not oldedit: return
patrolq.put(oldedit)
get_next_oldpage() return
def show_diff():
if not oldedit: return
if oldedit.oldid: open_new_tab("http://en.wiktionary.org/w/index.php?title=%s&diff=next&oldid=%s" % (oldedit.urlname, oldedit.oldid)) else: open_new_tab("http://en.wiktionary.org/w/index.php?title=%s" % oldedit.urlname)
return
def edit_page():
if not oldedit: return
open_new_tab("http://en.wiktionary.org/w/index.php?title=%s&action=edit" % oldedit.urlname)
return
def skip_edit():
global oldedit
if not oldedit: return
skipped.add(oldedit.revid) stat("Skipped", len(skipped)) oldedit = None stat("Unpatrolled", len(active))
get_next_oldpage() return
def skip_user():
global oldedit if not oldedit: return
skipusers.add(oldedit.user) stat("Skipped users", len(skipusers))
# continue to skip this edit skip_edit() return
def whitelist_user():
global oldedit if not oldedit: return
whitelist.add(oldedit.user) stat("Whitelist users", len(whitelist))
# continue to mark this edit mark_edit() return
def rats_quit():
global Quit
Quit = True log("Quitting ...") time.sleep(20)
root.quit()
- main program runs tkinter loop
def main():
global site
socket.setdefaulttimeout(140)
site = wikipedia.getSite("en", "wiktionary") site.forceLogin(sysop = True) # force this off, sysop is never bot (framework bug) config.notify_unflagged_bot = False
# things shared with subs global root, oldlines, newlines, revlines global showdiffb, oldlineslabel, newlineslabel
rct = threading.Thread(target=readrc) rct.daemon = True rct.name = 'read recent changes'
plt = threading.Thread(target=readpl) plt.daemon = True plt.name = 'read patrol log'
prt = threading.Thread(target=preload) prt.daemon = True prt.name = 'preload'
pat = threading.Thread(target=patrol) pat.daemon = True pat.name = 'mark patrol'
rkt = threading.Thread(target=recheck) rkt.daemon = True rkt.name = 'recheck revision'
root = Tk() root.title('Rat Patrol')
font = ('Arial', 10) fontb = ('Arial', 10, 'bold')
# pack from bottom, then left to right at top:
revlines = Label(root, width=97, height=10, justify=LEFT, anchor=W, font=font, bg='#fff', relief=RIDGE) revlines.pack(side=BOTTOM, padx=5, pady=5)
# button bar
bbox = Frame(root) bbox.pack(side=BOTTOM, fill=X, padx=10, pady=5)
editpageb = Button(bbox, text="Edit page", width=11, font=font, command=edit_page) editpageb.pack(side=LEFT)
showdiffb = Button(bbox, text="Show diffs", width=11, font=font, command=show_diff) showdiffb.pack(side=LEFT)
skipuserb = Button(bbox, text="Skip user", width=11, font=font, command=skip_user) skipuserb.pack(side=LEFT)
wluserb = Button(bbox, text="Whitelist", width=11, font=font, command=whitelist_user) wluserb.pack(side=LEFT)
skipeditb = Button(bbox, text="Skip", width=8, font=font, command=skip_edit) skipeditb.pack(side=LEFT)
markeditb = Button(bbox, text="Mark", width=8, font=font, command=mark_edit) markeditb.pack(side=LEFT)
quitb = Button(bbox, text='Quit', width=10, font=font, command=rats_quit) quitb.pack(side=RIGHT)
# differences
dbox = Frame(root) dbox.pack(side=BOTTOM, padx=10, pady=5)
obox = Frame(dbox) obox.pack(side=LEFT) oldlineslabel = Label(obox, text="Old lines", width=24, font=fontb, justify=LEFT, anchor=W) oldlineslabel.pack(side=TOP, fill=X) oldlines = Label(obox, width=48, height=8, justify=LEFT, anchor=W, font=font, bg='#fff', relief=RIDGE) oldlines.config(wraplength = oldlines.winfo_reqwidth() - 8) oldlines.pack(side=TOP)
nbox = Frame(dbox) nbox.pack(side=LEFT) newlineslabel = Label(nbox, text="New lines", width=24, font=fontb, justify=LEFT, anchor=W) newlineslabel.pack(side=TOP, fill=X) newlines = Label(nbox, width=48, height=8, justify=LEFT, anchor=W, font=font, bg='#fff', relief=RIDGE) newlines.config(wraplength = newlines.winfo_reqwidth() - 8) newlines.pack(side=TOP)
# statistics frame, boxes stats = Frame(root) stats.pack(side=LEFT, padx=10, pady=5)
statframes = { } statlabels = { } for lab in [ 'Unpatrolled', 'Marked by me', 'Marked by others', 'Whitelisted', \ 'Whitelist users', 'Skipped users', 'Skipped' ]: statframes[lab] = Frame(stats) statframes[lab].pack(side=TOP) statlabels[lab] = Label(statframes[lab], text=lab+':', width=15, font=font, justify=LEFT, anchor=W) statlabels[lab].pack(side=LEFT) statboxes[lab] = Label(statframes[lab], text='0', width=10, font=font, justify=RIGHT, anchor=E, bg='#fff', relief=RIDGE) statboxes[lab].pack(side=RIGHT)
ebox = Frame(root) ebox.pack(side=LEFT, padx=10, pady=5, fill=X)
oldframes = { } oldlabels = { } oldtoplabel = Label(ebox, text="Oldest unpatrolled edit", width=24, font=fontb, justify=LEFT, anchor=W) oldtoplabel.pack(side=TOP, fill=X)
for lab in [ 'Title', 'User', 'Summary' ]: oldframes[lab] = Frame(ebox) oldframes[lab].pack(side=TOP) oldlabels[lab] = Label(oldframes[lab], text=lab+':', width=15, font=font, justify=LEFT, anchor=NW) oldlabels[lab].pack(side=LEFT) oldboxes[lab] = Label(oldframes[lab], text=, width=48, font=font, justify=LEFT, anchor=W, bg='#fff', relief=RIDGE) oldboxes[lab].pack(side=RIGHT) oldlabels['Summary'].config(text='Summary:\n\n\n') # (hack ;-) oldboxes['Summary'].config(height = 4) oldboxes['Summary'].config(wraplength = oldboxes['Summary'].winfo_reqwidth() - 8)
root.after(200, tkmess) root.after(200, get_next_oldpage)
rct.start() plt.start() prt.start() pat.start() rkt.start() root.mainloop()
if __name__ == "__main__":
try: main() finally: wikipedia.stopme()