Resync all running ubottu code to bzr branch, they should now be in sync again
This commit is contained in:
@ -21,7 +21,7 @@ import supybot.world as world
|
|||||||
__version__ = "0.3.1"
|
__version__ = "0.3.1"
|
||||||
__author__ = supybot.Author("Dennis Kaarsemaker","Seveas","dennis@kaarsemaker.net")
|
__author__ = supybot.Author("Dennis Kaarsemaker","Seveas","dennis@kaarsemaker.net")
|
||||||
__contributors__ = {supybot.Author("Terence Simpson", "stdin", "stdin@stdin.me.uk"): ['Alow configurable bantracker URL']}
|
__contributors__ = {supybot.Author("Terence Simpson", "stdin", "stdin@stdin.me.uk"): ['Alow configurable bantracker URL']}
|
||||||
__url__ = 'https://ubotu.ubuntu-nl.org'
|
__url__ = 'http://ubottu.com/'
|
||||||
|
|
||||||
import config
|
import config
|
||||||
reload(config)
|
reload(config)
|
||||||
|
@ -15,15 +15,12 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
# This needs to be set to the location of the commoncgi.py file
|
# This needs to be set to the location of the commoncgi.py file
|
||||||
sys.path.append('/home/ubottu/bot/plugins/')
|
sys.path.append('/var/www/bot')
|
||||||
from commoncgi import *
|
from commoncgi import *
|
||||||
|
|
||||||
### Variables
|
### Variables
|
||||||
# Location of the bans database
|
db = '/home/bot/data/bans.db'
|
||||||
db = '/home/ubottu/data/bans.db'
|
|
||||||
# Number of bans to show per page
|
|
||||||
num_per_page = 100
|
num_per_page = 100
|
||||||
### You shouldn't have to change anything under this line ###
|
|
||||||
|
|
||||||
con = sqlite.connect(db)
|
con = sqlite.connect(db)
|
||||||
cur = con.cursor()
|
cur = con.cursor()
|
||||||
@ -33,18 +30,10 @@ error = ''
|
|||||||
user = None
|
user = None
|
||||||
|
|
||||||
# Delete old sessions
|
# Delete old sessions
|
||||||
# FAIL!
|
|
||||||
try:
|
try:
|
||||||
cur.execute("DELETE FROM sessions WHERE time < %d", int(time.mktime(time.gmtime())) - 2592000 * 3)
|
cur.execute("""DELETE FROM sessions WHERE time < %d""", int(time.time()) - 2592000 * 3)
|
||||||
con.commit()
|
|
||||||
con.close()
|
|
||||||
except:
|
except:
|
||||||
try:
|
pass
|
||||||
con.commit()
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
con.close()
|
|
||||||
|
|
||||||
# Session handling
|
# Session handling
|
||||||
if form.has_key('sess'):
|
if form.has_key('sess'):
|
||||||
@ -52,15 +41,10 @@ if form.has_key('sess'):
|
|||||||
if cookie.has_key('sess'):
|
if cookie.has_key('sess'):
|
||||||
try:
|
try:
|
||||||
sess = cookie['sess'].value
|
sess = cookie['sess'].value
|
||||||
con = sqlite.connect(db)
|
cur.execute("""SELECT user FROM sessions WHERE session_id=%s""",sess)
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute("SELECT user FROM sessions WHERE session_id=%s",sess)
|
|
||||||
user = cur.fetchall()[0][0]
|
user = cur.fetchall()[0][0]
|
||||||
con.commit()
|
|
||||||
con.close()
|
|
||||||
except:
|
except:
|
||||||
con.commit()
|
con.commit()
|
||||||
con.close()
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not user:
|
if not user:
|
||||||
@ -70,12 +54,9 @@ if not user:
|
|||||||
|
|
||||||
# Log
|
# Log
|
||||||
if form.has_key('log'):
|
if form.has_key('log'):
|
||||||
con = sqlite.connect(db)
|
cur.execute("""SELECT log FROM bans WHERE id=%s""", form['log'].value)
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute("SELECT log FROM bans WHERE id=%s", form['log'].value)
|
|
||||||
log = cur.fetchall()
|
log = cur.fetchall()
|
||||||
con.commit()
|
con.commit()
|
||||||
con.close()
|
|
||||||
if form.has_key('mark'):
|
if form.has_key('mark'):
|
||||||
marked = form['mark'].value
|
marked = form['mark'].value
|
||||||
lines = log[0][0].splitlines()
|
lines = log[0][0].splitlines()
|
||||||
@ -91,19 +72,12 @@ if form.has_key('log'):
|
|||||||
# Main page
|
# Main page
|
||||||
# Process comments
|
# Process comments
|
||||||
if form.has_key('comment') and form.has_key('comment_id') and user:
|
if form.has_key('comment') and form.has_key('comment_id') and user:
|
||||||
con = sqlite.connect(db)
|
cur.execute("""SELECT ban_id FROM comments WHERE ban_id=%s and comment=%s""", (form['comment_id'].value, form['comment'].value))
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute("SELECT ban_id FROM comments WHERE ban_id=%s and comment=%s", (form['comment_id'].value, form['comment'].value))
|
|
||||||
comm = cur.fetchall()
|
comm = cur.fetchall()
|
||||||
con.commit()
|
|
||||||
con.close()
|
|
||||||
if not len(comm):
|
if not len(comm):
|
||||||
con = sqlite.connect(db)
|
cur.execute("""INSERT INTO comments (ban_id, who, comment, time) VALUES (%s, %s, %s, %s)""",
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute("INSERT INTO comments (ban_id, who, comment, time) VALUES (%s, %s, %s, %s)",
|
|
||||||
(form['comment_id'].value,user,form['comment'].value,pickle.dumps(datetime.datetime.now(pytz.UTC))))
|
(form['comment_id'].value,user,form['comment'].value,pickle.dumps(datetime.datetime.now(pytz.UTC))))
|
||||||
con.commit()
|
con.commit()
|
||||||
con.close()
|
|
||||||
|
|
||||||
# Write the page
|
# Write the page
|
||||||
print '<form action="bans.cgi" method="POST">'
|
print '<form action="bans.cgi" method="POST">'
|
||||||
@ -180,14 +154,11 @@ if not form.has_key('query'):
|
|||||||
if form.has_key('sort'):
|
if form.has_key('sort'):
|
||||||
sort='&sort=' + form['sort'].value
|
sort='&sort=' + form['sort'].value
|
||||||
print '<div style="clear: both">·'
|
print '<div style="clear: both">·'
|
||||||
con = sqlite.connect(db)
|
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute('SELECT COUNT(id) FROM bans')
|
cur.execute('SELECT COUNT(id) FROM bans')
|
||||||
nump = math.ceil(int(cur.fetchall()[0][0]) / float(num_per_page))
|
nump = int(math.ceil(int(cur.fetchall()[0][0]) / float(num_per_page)))
|
||||||
for i in range(int(nump)):
|
for i in range(nump):
|
||||||
print '<a href="bans.cgi?page=%d%s">%d</a> ·' % (i, sort, i+1)
|
print '<a href="bans.cgi?page=%d%s">%d</a> ·' % (i, sort, i+1)
|
||||||
print '</div>'
|
print '</div>'
|
||||||
con.close()
|
|
||||||
|
|
||||||
# Empty log div, will be filled with AJAX
|
# Empty log div, will be filled with AJAX
|
||||||
print '<div id="log" class="log"> </div>'
|
print '<div id="log" class="log"> </div>'
|
||||||
@ -206,11 +177,8 @@ for h in [['Channel',0], ['Nick/Mask',1], ['Operator',2], ['Time',6]]:
|
|||||||
print '<th>Log</th></tr>'
|
print '<th>Log</th></tr>'
|
||||||
|
|
||||||
# Select and filter bans
|
# Select and filter bans
|
||||||
con = sqlite.connect(db)
|
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute("SELECT channel,mask,operator,time,removal,removal_op,id FROM bans ORDER BY id DESC")
|
cur.execute("SELECT channel,mask,operator,time,removal,removal_op,id FROM bans ORDER BY id DESC")
|
||||||
bans = cur.fetchall()
|
bans = cur.fetchall()
|
||||||
con.close()
|
|
||||||
|
|
||||||
def myfilter(item, regex, kick, ban, oldban, mute, oldmute, floods, operator, channel):
|
def myfilter(item, regex, kick, ban, oldban, mute, oldmute, floods, operator, channel):
|
||||||
if operator:
|
if operator:
|
||||||
@ -323,11 +291,8 @@ for b in bans[start:end]:
|
|||||||
print ' class="bg2"'
|
print ' class="bg2"'
|
||||||
print '>'
|
print '>'
|
||||||
print '<td colspan="5" class="comment">'
|
print '<td colspan="5" class="comment">'
|
||||||
con = sqlite.connect(db)
|
cur.execute("""SELECT who, comment, time FROM comments WHERE ban_id = %s""" % b[6])
|
||||||
cur = con.cursor()
|
|
||||||
cur.execute("SELECT who, comment, time FROM comments WHERE ban_id = %s" % b[6])
|
|
||||||
comments = cur.fetchall()
|
comments = cur.fetchall()
|
||||||
con.close()
|
|
||||||
if len(comments) == 0:
|
if len(comments) == 0:
|
||||||
print '<span class="removal">(No comments) </span>'
|
print '<span class="removal">(No comments) </span>'
|
||||||
else:
|
else:
|
||||||
@ -336,14 +301,15 @@ for b in bans[start:end]:
|
|||||||
print u' <span class="removal"><br />%s, %s</span><br />' % \
|
print u' <span class="removal"><br />%s, %s</span><br />' % \
|
||||||
(c[0],pickle.loads(c[2]).astimezone(tz).strftime("%b %d %Y %H:%M:%S"))
|
(c[0],pickle.loads(c[2]).astimezone(tz).strftime("%b %d %Y %H:%M:%S"))
|
||||||
if user:
|
if user:
|
||||||
print '<span class="pseudolink" onclick="toggle(\'%s\',\'comment\')">Add comment</span>' % b[6]
|
print """<span class="pseudolink" onclick="toggle('%s','comment')">Add comment</span>""" % b[6]
|
||||||
print '<div class="invisible" id="comment_%s"><br />' % b[6]
|
print """<div class="invisible" id="comment_%s"><br />""" % b[6]
|
||||||
print '<form action="bans.cgi" method="POST"><textarea cols="50" rows="5" class="input" name="comment"></textarea><br />'
|
print """<form action="bans.cgi" method="POST"><textarea cols="50" rows="5" class="input" name="comment"></textarea><br />"""
|
||||||
print '<input type="hidden" name="comment_id" value="%s" />' % b[6]
|
print """<input type="hidden" name="comment_id" value="%s" />""" % b[6]
|
||||||
print '<input class="submit" type="submit" value="Send" /></form>'
|
print """<input class="submit" type="submit" value="Send" /></form>"""
|
||||||
print '</div></td></tr>'
|
print '</td></tr>'
|
||||||
|
|
||||||
print '</table>'
|
print '</table>'
|
||||||
|
|
||||||
if not bans and form.has_key('query'):
|
if not bans and form.has_key('query'):
|
||||||
if chan and oper:
|
if chan and oper:
|
||||||
print "<center><u>No matches for:</u> "%s" in %s by %s;</center>" % (form['query'].value, chan, oper)
|
print "<center><u>No matches for:</u> "%s" in %s by %s;</center>" % (form['query'].value, chan, oper)
|
||||||
|
@ -22,6 +22,6 @@ Bantracker = conf.registerPlugin('Bantracker')
|
|||||||
conf.registerChannelValue(conf.supybot.plugins.Bantracker, 'enabled',
|
conf.registerChannelValue(conf.supybot.plugins.Bantracker, 'enabled',
|
||||||
registry.Boolean(False, """Enable the bantracker"""))
|
registry.Boolean(False, """Enable the bantracker"""))
|
||||||
conf.registerGlobalValue(conf.supybot.plugins.Bantracker, 'database',
|
conf.registerGlobalValue(conf.supybot.plugins.Bantracker, 'database',
|
||||||
registry.String('', "Filename of the bans database",private=True))
|
registry.String('', "Filename of the bans database", private=True))
|
||||||
conf.registerGlobalValue(conf.supybot.plugins.Bantracker, 'bansite',
|
conf.registerGlobalValue(conf.supybot.plugins.Bantracker, 'bansite',
|
||||||
registry.String('', "Web site for the bantracker, without the 'bans.cgi' appended", private=True))
|
registry.String('', "Web site for the bantracker, without the 'bans.cgi' appended", private=True))
|
||||||
|
@ -103,11 +103,11 @@ class MsgQueue(object):
|
|||||||
def clear(self):
|
def clear(self):
|
||||||
self.msgcache = []
|
self.msgcache = []
|
||||||
def dequeue(self, parent, irc):
|
def dequeue(self, parent, irc):
|
||||||
parent.thread_timer = threading.Timer(30.0, dequeue, args=(parent, irc))
|
parent.thread_timer.cancel()
|
||||||
|
parent.thread_timer = threading.Timer(10.0, dequeue, args=(parent, irc))
|
||||||
if len(self.msgcache):
|
if len(self.msgcache):
|
||||||
msg = self.msgcache.pop(0)
|
msg = self.msgcache.pop(0)
|
||||||
if irc:
|
irc.queueMsg(msg)
|
||||||
irc.queueMsg(msg)
|
|
||||||
parent.thread_timer.start()
|
parent.thread_timer.start()
|
||||||
|
|
||||||
queue = MsgQueue()
|
queue = MsgQueue()
|
||||||
@ -153,11 +153,10 @@ class Bantracker(callbacks.Plugin):
|
|||||||
self.replies = {}
|
self.replies = {}
|
||||||
self.logs = {}
|
self.logs = {}
|
||||||
self.nicks = {}
|
self.nicks = {}
|
||||||
self.bans = {}
|
|
||||||
self.nicks = {}
|
|
||||||
self.hosts = {}
|
self.hosts = {}
|
||||||
|
self.bans = {}
|
||||||
|
|
||||||
self.thread_timer = threading.Timer(30.0, dequeue, args=(self,irc))
|
self.thread_timer = threading.Timer(10.0, dequeue, args=(self,irc))
|
||||||
self.thread_timer.start()
|
self.thread_timer.start()
|
||||||
|
|
||||||
db = self.registryValue('database')
|
db = self.registryValue('database')
|
||||||
@ -199,9 +198,7 @@ class Bantracker(callbacks.Plugin):
|
|||||||
"""/whois"""
|
"""/whois"""
|
||||||
nick = msg.args[1].lower()
|
nick = msg.args[1].lower()
|
||||||
mask = "%s!%s@%s" % (nick, msg.args[2].lower(), msg.args[3].lower())
|
mask = "%s!%s@%s" % (nick, msg.args[2].lower(), msg.args[3].lower())
|
||||||
mask = mask.lower()
|
|
||||||
self.nicks[nick] = mask
|
self.nicks[nick] = mask
|
||||||
host = mask.split('@', 1)[1]
|
|
||||||
if nick in self.replies:
|
if nick in self.replies:
|
||||||
f = getattr(self, "real_%s" % self.replies[nick][0])
|
f = getattr(self, "real_%s" % self.replies[nick][0])
|
||||||
args = self.replies[nick][1]
|
args = self.replies[nick][1]
|
||||||
@ -213,7 +210,6 @@ class Bantracker(callbacks.Plugin):
|
|||||||
"""/whowas"""
|
"""/whowas"""
|
||||||
nick = msg.args[1].lower()
|
nick = msg.args[1].lower()
|
||||||
mask = "%s!%s@%s" % (nick, msg.args[2].lower(), msg.args[3].lower())
|
mask = "%s!%s@%s" % (nick, msg.args[2].lower(), msg.args[3].lower())
|
||||||
mask = mask.lower()
|
|
||||||
if not nick in self.nicks:
|
if not nick in self.nicks:
|
||||||
self.nicks[nick] = mask
|
self.nicks[nick] = mask
|
||||||
if nick in self.replies:
|
if nick in self.replies:
|
||||||
@ -253,14 +249,17 @@ class Bantracker(callbacks.Plugin):
|
|||||||
try:
|
try:
|
||||||
return irc.state.nickToHostmask(target)
|
return irc.state.nickToHostmask(target)
|
||||||
except:
|
except:
|
||||||
self.sendWhois(irc, target)
|
|
||||||
if reply_now:
|
if reply_now:
|
||||||
self.sendWhois(irc, target)
|
|
||||||
if with_nick:
|
if with_nick:
|
||||||
return "%s!*@*" % target
|
return "%s!*@*" % target
|
||||||
return "*@*"
|
return "*@*"
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if target in self.nicks:
|
||||||
|
return self.nicks[target]
|
||||||
|
else:
|
||||||
|
return "%s!*@*" % target
|
||||||
|
|
||||||
def die(self):
|
def die(self):
|
||||||
global queue
|
global queue
|
||||||
if self.db:
|
if self.db:
|
||||||
@ -283,9 +282,9 @@ class Bantracker(callbacks.Plugin):
|
|||||||
pass
|
pass
|
||||||
queue.clear()
|
queue.clear()
|
||||||
# self.logs.clear()
|
# self.logs.clear()
|
||||||
# self.nicks.clear()
|
|
||||||
self.lastMsgs.clear()
|
self.lastMsgs.clear()
|
||||||
self.lastStates.clear()
|
self.lastStates.clear()
|
||||||
|
# self.nicks.clear()
|
||||||
|
|
||||||
def __call__(self, irc, msg):
|
def __call__(self, irc, msg):
|
||||||
try:
|
try:
|
||||||
@ -306,13 +305,14 @@ class Bantracker(callbacks.Plugin):
|
|||||||
cur = self.db.cursor()
|
cur = self.db.cursor()
|
||||||
cur.execute(query, parms)
|
cur.execute(query, parms)
|
||||||
except:
|
except:
|
||||||
|
cur = None
|
||||||
if n_tries > 5:
|
if n_tries > 5:
|
||||||
print "Tried more than 5 times, aborting"
|
print "Tried more than 5 times, aborting"
|
||||||
raise
|
raise
|
||||||
n_tries += 1
|
n_tries += 1
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
data = None
|
data = None
|
||||||
if expect_result: data = cur.fetchall()
|
if expect_result and cur: data = cur.fetchall()
|
||||||
if expect_id: data = self.db.insert_id()
|
if expect_id: data = self.db.insert_id()
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
return data
|
return data
|
||||||
@ -328,16 +328,18 @@ class Bantracker(callbacks.Plugin):
|
|||||||
s = time.strftime(format, time.gmtime()) + " " + ircutils.stripFormatting(s)
|
s = time.strftime(format, time.gmtime()) + " " + ircutils.stripFormatting(s)
|
||||||
self.logs[channel] = self.logs[channel][-199:] + [s.strip()]
|
self.logs[channel] = self.logs[channel][-199:] + [s.strip()]
|
||||||
|
|
||||||
def doKickban(self, irc, channel, nick, target, kickmsg = None, use_time = None):
|
def doKickban(self, irc, channel, nick, target, kickmsg = None, use_time = None, extra_comment = None):
|
||||||
if not self.registryValue('enabled', channel):
|
if not self.registryValue('enabled', channel):
|
||||||
return
|
return
|
||||||
n = now()
|
n = now()
|
||||||
if use_time:
|
if use_time:
|
||||||
n = fromTime(use_time)
|
n = fromTime(use_time)
|
||||||
id = self.db_run("INSERT INTO bans (channel, mask, operator, time, log) values(%s, %s, %s, %s, %s)",
|
id = self.db_run("INSERT INTO bans (channel, mask, operator, time, log) values(%s, %s, %s, %s, %s)",
|
||||||
(channel, target, nick, n, '\n'.join(self.logs[channel])), expect_id=True)
|
(channel, target, nick, n, '\n'.join(self.logs[channel])), expect_id=True)
|
||||||
if kickmsg and id and not (kickmsg == nick):
|
if kickmsg and id and not (kickmsg == nick):
|
||||||
self.db_run("INSERT INTO comments (ban_id, who, comment, time) values(%s,%s,%s,%s)", (id, nick, kickmsg, n))
|
self.db_run("INSERT INTO comments (ban_id, who, comment, time) values(%s,%s,%s,%s)", (id, nick, kickmsg, n))
|
||||||
|
if extra_comment:
|
||||||
|
self.db_run("INSERT INTO comments (ban_id, who, comment, time) values(%s,%s,%s,%s)", (id, nick, extra_comment, n))
|
||||||
if channel not in self.bans:
|
if channel not in self.bans:
|
||||||
self.bans[channel] = []
|
self.bans[channel] = []
|
||||||
self.bans[channel].append(Ban(mask=target, who=nick, when=time.mktime(time.gmtime())))
|
self.bans[channel].append(Ban(mask=target, who=nick, when=time.mktime(time.gmtime())))
|
||||||
@ -383,8 +385,8 @@ class Bantracker(callbacks.Plugin):
|
|||||||
if newNick in c.users:
|
if newNick in c.users:
|
||||||
self.doLog(irc, channel,
|
self.doLog(irc, channel,
|
||||||
'*** %s is now known as %s\n' % (oldNick, newNick))
|
'*** %s is now known as %s\n' % (oldNick, newNick))
|
||||||
if oldNick in self.nicks:
|
if oldNick.lower() in self.nicks:
|
||||||
del self.nicks[oldNick]
|
del self.nicks[oldNick.lower()]
|
||||||
nick = newNick.lower()
|
nick = newNick.lower()
|
||||||
hostmask = nick + "!".join(msg.prefix.lower().split('!')[1:])
|
hostmask = nick + "!".join(msg.prefix.lower().split('!')[1:])
|
||||||
self.nicks[nick] = hostmask
|
self.nicks[nick] = hostmask
|
||||||
@ -392,8 +394,12 @@ class Bantracker(callbacks.Plugin):
|
|||||||
def doJoin(self, irc, msg):
|
def doJoin(self, irc, msg):
|
||||||
global queue
|
global queue
|
||||||
for channel in msg.args[0].split(','):
|
for channel in msg.args[0].split(','):
|
||||||
self.doLog(irc, channel,
|
if msg.nick:
|
||||||
'*** %s has joined %s\n' % (msg.nick or msg.prefix, channel))
|
self.doLog(irc, channel,
|
||||||
|
'*** %s (%s) has joined %s\n' % (msg.nick, msg.prefix.split('!', 1)[1], channel))
|
||||||
|
else:
|
||||||
|
self.doLog(irc, channel,
|
||||||
|
'*** %s has joined %s\n' % (msg.prefix, channel))
|
||||||
if not channel in self.bans.keys():
|
if not channel in self.bans.keys():
|
||||||
self.bans[channel] = []
|
self.bans[channel] = []
|
||||||
if msg.prefix.split('!', 1)[0] == irc.nick:
|
if msg.prefix.split('!', 1)[0] == irc.nick:
|
||||||
@ -407,28 +413,25 @@ class Bantracker(callbacks.Plugin):
|
|||||||
else:
|
else:
|
||||||
(channel, target) = msg.args
|
(channel, target) = msg.args
|
||||||
kickmsg = ''
|
kickmsg = ''
|
||||||
|
host = self.nick_to_host(irc, target, True)
|
||||||
|
if host == "%s!*@*" % host:
|
||||||
|
host = None
|
||||||
if kickmsg:
|
if kickmsg:
|
||||||
self.doLog(irc, channel,
|
self.doLog(irc, channel,
|
||||||
'*** %s was kicked by %s (%s)\n' % (target, msg.nick, kickmsg))
|
'*** %s was kicked by %s (%s)\n' % (target, msg.nick, kickmsg))
|
||||||
else:
|
else:
|
||||||
self.doLog(irc, channel,
|
self.doLog(irc, channel,
|
||||||
'*** %s was kicked by %s\n' % (target, msg.nick))
|
'*** %s was kicked by %s\n' % (target, msg.nick))
|
||||||
self.doKickban(irc, channel, msg.nick, target, kickmsg)
|
self.doKickban(irc, channel, msg.nick, target, kickmsg, extra_comment=host)
|
||||||
|
|
||||||
def doPart(self, irc, msg):
|
def doPart(self, irc, msg):
|
||||||
for channel in msg.args[0].split(','):
|
for channel in msg.args[0].split(','):
|
||||||
self.doLog(irc, channel, '*** %s (%s) has left %s (%s)\n' % (msg.nick, msg.prefix, channel, msg.args[1]))
|
self.doLog(irc, channel, '*** %s (%s) has left %s (%s)\n' % (msg.nick, msg.prefix, channel, msg.args[1]))
|
||||||
if msg.args[1].startswith('requested by'):
|
if msg.args[1].startswith('requested by'):
|
||||||
args = msg.args[1].split()
|
args = msg.args[1].split()
|
||||||
self.doKickban(irc, channel, args[2].replace(':',''), msg.nick, ' '.join(args[3:])[1:-1].strip())
|
self.doKickban(irc, channel, args[2].replace(':',''), msg.nick, ' '.join(args[3:])[1:-1].strip(), extra_comment=msg.prefix)
|
||||||
|
|
||||||
def doMode(self, irc, msg):
|
def doMode(self, irc, msg):
|
||||||
user = None
|
|
||||||
if msg.tagged('identified'):
|
|
||||||
try:
|
|
||||||
user = ircdb.users.getUser(msg.prefix[:msg.prefix.find('!')].lower())
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
channel = msg.args[0]
|
channel = msg.args[0]
|
||||||
if irc.isChannel(channel) and msg.args[1:]:
|
if irc.isChannel(channel) and msg.args[1:]:
|
||||||
self.doLog(irc, channel,
|
self.doLog(irc, channel,
|
||||||
@ -444,8 +447,8 @@ class Bantracker(callbacks.Plugin):
|
|||||||
else:
|
else:
|
||||||
if c == 'b':
|
if c == 'b':
|
||||||
if plusmin:
|
if plusmin:
|
||||||
id = self.doKickban(irc, channel, msg.nick, msg.args[i])
|
comment = self.getHostFromBan(irc, msg, msg.args[i])
|
||||||
msg = ircmsgs.privmsg(msg.nick, "Ban %s (ID: %s)" % (msg.args[i], id))
|
self.doKickban(irc, channel, msg.nick, msg.args[i], extra_comment=comment)
|
||||||
else: self.doUnban(irc,channel, msg.nick, msg.args[i])
|
else: self.doUnban(irc,channel, msg.nick, msg.args[i])
|
||||||
i += 1
|
i += 1
|
||||||
if c == 'd':
|
if c == 'd':
|
||||||
@ -453,6 +456,23 @@ class Bantracker(callbacks.Plugin):
|
|||||||
else: self.doUnban(irc,channel, msg.nick, msg.args[i] + ' (realname)')
|
else: self.doUnban(irc,channel, msg.nick, msg.args[i] + ' (realname)')
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
|
def getHostFromBan(self, irc, msg, mask):
|
||||||
|
if irc not in self.lastStates:
|
||||||
|
self.lastStates[irc] = irc.state.copy()
|
||||||
|
if mask[0] == '%':
|
||||||
|
mask = mask[1:]
|
||||||
|
(nick, ident, host) = ircutils.splitHostmask(mask)
|
||||||
|
channel = None
|
||||||
|
chan = None
|
||||||
|
if mask[0] not in ('*', '?'): # Nick ban
|
||||||
|
if nick in self.nicks:
|
||||||
|
return self.nicks[nick]
|
||||||
|
else: # Host/ident ban
|
||||||
|
for (inick, ihost) in self.nicks.iteritems():
|
||||||
|
if ircutils.hostmaskPatternEqual(mask, ihost):
|
||||||
|
return ihost
|
||||||
|
return None
|
||||||
|
|
||||||
def doTopic(self, irc, msg):
|
def doTopic(self, irc, msg):
|
||||||
if len(msg.args) == 1:
|
if len(msg.args) == 1:
|
||||||
return # It's an empty TOPIC just to get the current topic.
|
return # It's an empty TOPIC just to get the current topic.
|
||||||
@ -461,11 +481,13 @@ class Bantracker(callbacks.Plugin):
|
|||||||
'*** %s changes topic to "%s"\n' % (msg.nick, msg.args[1]))
|
'*** %s changes topic to "%s"\n' % (msg.nick, msg.args[1]))
|
||||||
|
|
||||||
def doQuit(self, irc, msg):
|
def doQuit(self, irc, msg):
|
||||||
|
if irc not in self.lastStates:
|
||||||
|
self.lastStates[irc] = irc.state.copy()
|
||||||
for (channel, chan) in self.lastStates[irc].channels.iteritems():
|
for (channel, chan) in self.lastStates[irc].channels.iteritems():
|
||||||
if msg.nick in chan.users:
|
if msg.nick in chan.users:
|
||||||
self.doLog(irc, channel, '*** %s has quit IRC (%s)\n' % (msg.nick, msg.args[0]))
|
self.doLog(irc, channel, '*** %s (%s) has quit IRC (%s)\n' % (msg.nick, msg.prefix, msg.args[0]))
|
||||||
if msg.nick in self.nicks:
|
# if msg.nick in self.user:
|
||||||
del self.nicks[msg.nick]
|
# del self.user[msg.nick]
|
||||||
|
|
||||||
def outFilter(self, irc, msg):
|
def outFilter(self, irc, msg):
|
||||||
# Gotta catch my own messages *somehow* :)
|
# Gotta catch my own messages *somehow* :)
|
||||||
@ -477,11 +499,7 @@ class Bantracker(callbacks.Plugin):
|
|||||||
return msg
|
return msg
|
||||||
|
|
||||||
def callPrecedence(self, irc):
|
def callPrecedence(self, irc):
|
||||||
before = []
|
return (['IRCLogin'], [])
|
||||||
for cb in irc.callbacks:
|
|
||||||
if cb.name() == 'IRCLogin':
|
|
||||||
before.append(cb)
|
|
||||||
return (before, [])
|
|
||||||
|
|
||||||
def check_auth(self, irc, msg, args, cap='bantracker'):
|
def check_auth(self, irc, msg, args, cap='bantracker'):
|
||||||
hasIRCLogin = False
|
hasIRCLogin = False
|
||||||
@ -511,7 +529,10 @@ class Bantracker(callbacks.Plugin):
|
|||||||
if not user:
|
if not user:
|
||||||
return
|
return
|
||||||
user.addAuth(msg.prefix)
|
user.addAuth(msg.prefix)
|
||||||
ircdb.users.setUser(user, flush=False)
|
try:
|
||||||
|
ircdb.users.setUser(user, flush=False)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if not capab(user, 'bantracker'):
|
if not capab(user, 'bantracker'):
|
||||||
irc.error(conf.supybot.replies.noCapability() % 'bantracker')
|
irc.error(conf.supybot.replies.noCapability() % 'bantracker')
|
||||||
@ -521,7 +542,7 @@ class Bantracker(callbacks.Plugin):
|
|||||||
return
|
return
|
||||||
sessid = md5.new('%s%s%d' % (msg.prefix, time.time(), random.randint(1,100000))).hexdigest()
|
sessid = md5.new('%s%s%d' % (msg.prefix, time.time(), random.randint(1,100000))).hexdigest()
|
||||||
self.db_run("INSERT INTO sessions (session_id, user, time) VALUES (%s, %s, %d);",
|
self.db_run("INSERT INTO sessions (session_id, user, time) VALUES (%s, %s, %d);",
|
||||||
( sessid, msg.prefix[:msg.prefix.find('!')], int(time.mktime(time.gmtime())) ) )
|
( sessid, msg.nick, int(time.mktime(time.gmtime())) ) )
|
||||||
irc.reply('Log in at %s/bans.cgi?sess=%s' % (self.registryValue('bansite'), sessid), private=True)
|
irc.reply('Log in at %s/bans.cgi?sess=%s' % (self.registryValue('bansite'), sessid), private=True)
|
||||||
|
|
||||||
btlogin = wrap(btlogin)
|
btlogin = wrap(btlogin)
|
||||||
@ -536,6 +557,10 @@ class Bantracker(callbacks.Plugin):
|
|||||||
if not user:
|
if not user:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if target == '*' or target[0] == '*':
|
||||||
|
irc.error("Can not create a mark for '%s'" % target)
|
||||||
|
return
|
||||||
|
|
||||||
if not channel:
|
if not channel:
|
||||||
irc.error('<channel> must be given if not in a channel')
|
irc.error('<channel> must be given if not in a channel')
|
||||||
return
|
return
|
||||||
@ -598,12 +623,11 @@ class Bantracker(callbacks.Plugin):
|
|||||||
match.append((e[0], e[1]))
|
match.append((e[0], e[1]))
|
||||||
return match
|
return match
|
||||||
|
|
||||||
|
def bansearch(self, irc, msg, args, target, channel, from_reply=False, reply=None):
|
||||||
def real_bansearch(self, irc, msg, args, target, channel, from_reply=False, reply=None):
|
|
||||||
"""<nick|hostmask> [<channel>]
|
"""<nick|hostmask> [<channel>]
|
||||||
|
|
||||||
Search bans database for a ban on nick/host,
|
Search bans database for a ban on <nick|hostmask>,
|
||||||
if channel is not given search all channel bans.
|
if <channel> is not given search all channel bans.
|
||||||
"""
|
"""
|
||||||
def format_entry(entry):
|
def format_entry(entry):
|
||||||
ret = list(entry[:-1])
|
ret = list(entry[:-1])
|
||||||
@ -629,18 +653,35 @@ class Bantracker(callbacks.Plugin):
|
|||||||
return
|
return
|
||||||
match = self.getBans(hostmask, channel)
|
match = self.getBans(hostmask, channel)
|
||||||
|
|
||||||
if capab(user, 'admin'):
|
if capab(user, 'owner'):
|
||||||
if len(queue.msgcache) > 0:
|
if len(queue.msgcache) > 0:
|
||||||
irc.reply("Warning: still syncing (%i)" % len(queue.msgcache))
|
irc.reply("Warning: still syncing (%i)" % len(queue.msgcache))
|
||||||
|
|
||||||
|
if channel:
|
||||||
|
if not ircutils.isChannel(channel):
|
||||||
|
channel = None
|
||||||
|
|
||||||
|
if '*' in target or '?' in target:
|
||||||
|
irc.error("Can only search for a complete hostmask")
|
||||||
|
return
|
||||||
|
hostmask = target
|
||||||
|
if '!' not in target or '@' not in target:
|
||||||
|
hostmask = self.nick_to_host(irc, target)
|
||||||
|
if '!' not in hostmask:
|
||||||
|
if "n=" in hostmask:
|
||||||
|
hostmask = hostmask.replace("n=", "!n=", 1)
|
||||||
|
elif "i=" in hostmask:
|
||||||
|
hostmask = hostmask.replace("i=", "!i=", 1)
|
||||||
|
match = self.getBans(hostmask, channel)
|
||||||
|
|
||||||
if not match:
|
if not match:
|
||||||
irc.reply("No matches found for %s in %s" % (hostmask, True and channel or "any channel"))
|
irc.reply("No matches found for %s in %s" % (hostmask, True and channel or "any channel"))
|
||||||
return
|
return
|
||||||
ret = []
|
ret = []
|
||||||
|
replies = []
|
||||||
for m in match:
|
for m in match:
|
||||||
if m[1]:
|
if m[1]:
|
||||||
ret.append((format_entry(self.db_run("SELECT mask, operator, channel, time FROM bans WHERE id=%d", m[1], expect_result=True)[0]), m[1]))
|
ret.append((format_entry(self.db_run("SELECT mask, operator, channel, time FROM bans WHERE id=%d", m[1], expect_result=True)[0]), m[1]))
|
||||||
|
|
||||||
if not ret:
|
if not ret:
|
||||||
done = []
|
done = []
|
||||||
for c in self.bans:
|
for c in self.bans:
|
||||||
@ -651,11 +692,21 @@ class Bantracker(callbacks.Plugin):
|
|||||||
irc.reply("Match %s in %s" % (b, c))
|
irc.reply("Match %s in %s" % (b, c))
|
||||||
done.append(c)
|
done.append(c)
|
||||||
return
|
return
|
||||||
|
|
||||||
for i in ret:
|
for i in ret:
|
||||||
irc.reply("Match: %s by %s in %s on %s (ID: %s)" % (i[0] + (i[1],)))
|
if '*' in i[0][0] or '?' in i[0][0]:
|
||||||
|
banstr = "Match: %s by %s in %s on %s (ID: %s)" % (i[0] + (i[1],))
|
||||||
|
else:
|
||||||
|
banstr = "Mark: by %s in %s on %s (ID: %s)" % (i[0][1:] + (i[1],))
|
||||||
|
if (banstr, False) not in replies:
|
||||||
|
replies.append((banstr, False))
|
||||||
|
|
||||||
bansearch = wrap(real_bansearch, ['something', optional('anything', default=None)])
|
if replies:
|
||||||
|
for r in replies:
|
||||||
|
irc.reply(r[0], private=r[1])
|
||||||
|
return
|
||||||
|
irc.error("Something not so good happened, please tell stdin about it")
|
||||||
|
|
||||||
|
bansearch = wrap(bansearch_real, ['something', optional('something', default=None)])
|
||||||
|
|
||||||
def banlog(self, irc, msg, args, target, channel):
|
def banlog(self, irc, msg, args, target, channel):
|
||||||
"""<nick|hostmask> [<channel>]
|
"""<nick|hostmask> [<channel>]
|
||||||
@ -664,13 +715,12 @@ class Bantracker(callbacks.Plugin):
|
|||||||
the nick/host has to have an active ban/mute against it.
|
the nick/host has to have an active ban/mute against it.
|
||||||
If channel is not given search all channel bans.
|
If channel is not given search all channel bans.
|
||||||
"""
|
"""
|
||||||
user = self.check_auth(irc, msg, args)
|
user = self.check_auth(irc, msg, args):
|
||||||
if not user:
|
if not user:
|
||||||
return
|
return
|
||||||
|
|
||||||
if capab(user, 'admin'):
|
if capab(user, 'owner') and len(queue.msgcache) > 0:
|
||||||
if len(queue.msgcache) > 0:
|
irc.reply("Warning: still syncing (%i)" % len(queue.msgcache))
|
||||||
irc.reply("Warning: still syncing (%i)" % len(queue.msgcache))
|
|
||||||
|
|
||||||
hostmask = self.nick_to_host(irc, target)
|
hostmask = self.nick_to_host(irc, target)
|
||||||
target = target.split('!', 1)[0]
|
target = target.split('!', 1)[0]
|
||||||
@ -682,9 +732,12 @@ class Bantracker(callbacks.Plugin):
|
|||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
for m in match:
|
for m in match:
|
||||||
ret.append((self.db_run("SELECT log, channel FROM bans WHERE id=%d", m[1], expect_result=True), m[1]))
|
if m[1]:
|
||||||
|
ret.append((self.db_run("SELECT log, channel FROM bans WHERE id=%d", m[1], expect_result=True), m[1]))
|
||||||
|
|
||||||
sent = []
|
sent = []
|
||||||
|
if not ret:
|
||||||
|
irc.reply("No matches in tracker")
|
||||||
for logs in ret:
|
for logs in ret:
|
||||||
log = logs[0]
|
log = logs[0]
|
||||||
id = logs[1]
|
id = logs[1]
|
||||||
@ -728,6 +781,7 @@ class Bantracker(callbacks.Plugin):
|
|||||||
for ban in remove_bans:
|
for ban in remove_bans:
|
||||||
self.log.info("Removing ban %s from %s" % (ban.replace('%', '%%'), chan))
|
self.log.info("Removing ban %s from %s" % (ban.replace('%', '%%'), chan))
|
||||||
self.doUnban(irc, channel, "Automated-Removal", ban)
|
self.doUnban(irc, channel, "Automated-Removal", ban)
|
||||||
|
|
||||||
return len(remove_bans)
|
return len(remove_bans)
|
||||||
|
|
||||||
def addBans(chan):
|
def addBans(chan):
|
||||||
@ -750,27 +804,28 @@ class Bantracker(callbacks.Plugin):
|
|||||||
if not self.check_auth(irc, msg, args, 'owner'):
|
if not self.check_auth(irc, msg, args, 'owner'):
|
||||||
return
|
return
|
||||||
|
|
||||||
a_res = 0
|
add_res = 0
|
||||||
r_res = 0
|
rem_res = 0
|
||||||
|
|
||||||
if len(queue.msgcache) > 0:
|
if len(queue.msgcache) > 0:
|
||||||
irc.reply("Error: still syncing (%i)" % len(queue.msgcache))
|
irc.reply("Error: still syncing (%i)" % len(queue.msgcache))
|
||||||
return
|
return
|
||||||
|
|
||||||
if channel:
|
try:
|
||||||
r_res += remBans(channel)
|
if channel:
|
||||||
a_res += addBans(channel)
|
rem_res += remBans(channel)
|
||||||
else:
|
add_res += addBans(channel)
|
||||||
for channel in irc.state.channels.keys():
|
else:
|
||||||
if channel not in self.bans:
|
for channel in irc.state.channels.keys():
|
||||||
self.bans[channel] = []
|
if channel not in self.bans:
|
||||||
r_res += remBans(channel)
|
self.bans[channel] = []
|
||||||
a_res += addBans(channel)
|
rem_res += remBans(channel)
|
||||||
|
add_res += addBans(channel)
|
||||||
|
except KeyError, e:
|
||||||
|
irc.error("%s, Please wait longer" % e)
|
||||||
|
return
|
||||||
|
|
||||||
irc.reply("Cleared %i obsolete bans" % r_res)
|
irc.reply("Cleared %i obsolete bans, Added %i new bans" % (rem_res, add_res))
|
||||||
irc.reply("Added %i new bans" % a_res)
|
|
||||||
delta = r_res + a_res
|
|
||||||
irc.reply("Delta: %s%i" % (str(delta and ' +')[-1], delta))
|
|
||||||
|
|
||||||
updatebt = wrap(updatebt, [optional('anything', default=None)])
|
updatebt = wrap(updatebt, [optional('anything', default=None)])
|
||||||
|
|
||||||
@ -802,8 +857,8 @@ class Bantracker(callbacks.Plugin):
|
|||||||
def banlink(self, irc, msg, args, id, highlight):
|
def banlink(self, irc, msg, args, id, highlight):
|
||||||
"""<id> [<highlight>]
|
"""<id> [<highlight>]
|
||||||
|
|
||||||
Returns a direct link to the log of kick/ban <id>
|
Returns a link to the log of the ban/kick with id <id>.
|
||||||
if <highlight> is given, lines containing that will be highlighted
|
If <highlight> is given, lines containing that term will be highlighted
|
||||||
"""
|
"""
|
||||||
if not self.check_auth(irc, msg, args):
|
if not self.check_auth(irc, msg, args):
|
||||||
return
|
return
|
||||||
|
@ -22,7 +22,7 @@ import supybot.world as world
|
|||||||
__version__ = "2.5.1"
|
__version__ = "2.5.1"
|
||||||
__author__ = supybot.Author("Dennis Kaarsemaker","Seveas","dennis@kaarsemaker.net")
|
__author__ = supybot.Author("Dennis Kaarsemaker","Seveas","dennis@kaarsemaker.net")
|
||||||
__contributors__ = {}
|
__contributors__ = {}
|
||||||
__url__ = 'http://ubotu.ubuntu-nl.org'
|
__url__ = 'http://ubottu.com/'
|
||||||
|
|
||||||
import config
|
import config
|
||||||
reload(config)
|
reload(config)
|
||||||
|
@ -49,7 +49,7 @@ conf.registerChannelValue(conf.supybot.plugins.Bugtracker, 'showassignee',
|
|||||||
registry.Boolean(False, """Whether to show the assignee in bug reports"""))
|
registry.Boolean(False, """Whether to show the assignee in bug reports"""))
|
||||||
|
|
||||||
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'reportercache',
|
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'reportercache',
|
||||||
registry.String('', """Name of the basedir for the bugreporter cache""",private=True))
|
registry.String('', """Name of the basedir for the bugreporter cache""", private=True))
|
||||||
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_server',
|
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_server',
|
||||||
registry.String('', """IMAP server for bugmail account""",private=True))
|
registry.String('', """IMAP server for bugmail account""",private=True))
|
||||||
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_user',
|
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_user',
|
||||||
@ -57,5 +57,5 @@ conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_user',
|
|||||||
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_password',
|
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_password',
|
||||||
registry.String('', """IMAP password for bugmail account""", private=True))
|
registry.String('', """IMAP password for bugmail account""", private=True))
|
||||||
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_ssl',
|
conf.registerGlobalValue(conf.supybot.plugins.Bugtracker, 'imap_ssl',
|
||||||
registry.Boolean(False, """Use SSL for imap connections""",private=True))
|
registry.Boolean(False, """Use SSL for imap connections""", private=True))
|
||||||
|
|
||||||
|
@ -27,6 +27,15 @@ from htmlentitydefs import entitydefs as entities
|
|||||||
import email.FeedParser
|
import email.FeedParser
|
||||||
import SOAPpy
|
import SOAPpy
|
||||||
|
|
||||||
|
bad_words = ["fuck","fuk","fucking","fuking","fukin","fuckin","fucked","fuked","fucker","shit","cunt","bastard","nazi","nigger","nigga","cock","bitches","bitch"]
|
||||||
|
|
||||||
|
def makeClean(s):
|
||||||
|
words = s.split()
|
||||||
|
for word in words:
|
||||||
|
if word.lower() in bad_words:
|
||||||
|
words[words.index(word)] = "<censored>"
|
||||||
|
return " ".join(words)
|
||||||
|
|
||||||
def registerBugtracker(name, url='', description='', trackertype=''):
|
def registerBugtracker(name, url='', description='', trackertype=''):
|
||||||
conf.supybot.plugins.Bugtracker.bugtrackers().add(name)
|
conf.supybot.plugins.Bugtracker.bugtrackers().add(name)
|
||||||
group = conf.registerGroup(conf.supybot.plugins.Bugtracker.bugtrackers, name)
|
group = conf.registerGroup(conf.supybot.plugins.Bugtracker.bugtrackers, name)
|
||||||
@ -83,7 +92,7 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
def __init__(self, irc):
|
def __init__(self, irc):
|
||||||
callbacks.PluginRegexp.__init__(self, irc)
|
callbacks.PluginRegexp.__init__(self, irc)
|
||||||
self.db = ircutils.IrcDict()
|
self.db = ircutils.IrcDict()
|
||||||
self.events = []
|
events = []
|
||||||
for name in self.registryValue('bugtrackers'):
|
for name in self.registryValue('bugtrackers'):
|
||||||
registerBugtracker(name)
|
registerBugtracker(name)
|
||||||
group = self.registryValue('bugtrackers.%s' % name.replace('.','\\.'), value=False)
|
group = self.registryValue('bugtrackers.%s' % name.replace('.','\\.'), value=False)
|
||||||
@ -94,7 +103,7 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
self.shorthand = utils.abbrev(self.db.keys())
|
self.shorthand = utils.abbrev(self.db.keys())
|
||||||
|
|
||||||
# Schedule bug reporting
|
# Schedule bug reporting
|
||||||
self.shown = {}
|
self.shown {}
|
||||||
if self.registryValue('imap_server') and self.registryValue('reportercache'):
|
if self.registryValue('imap_server') and self.registryValue('reportercache'):
|
||||||
try:
|
try:
|
||||||
schedule.removeEvent(self.name() + '.bugreporter')
|
schedule.removeEvent(self.name() + '.bugreporter')
|
||||||
@ -106,7 +115,7 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
|
|
||||||
def die(self):
|
def die(self):
|
||||||
try:
|
try:
|
||||||
for event in self.events:
|
for event in self.events:
|
||||||
self.log.info('Removing scheduled event "%s"' % event)
|
self.log.info('Removing scheduled event "%s"' % event)
|
||||||
schedule.removeEvent(event)
|
schedule.removeEvent(event)
|
||||||
schedule.removeEvent(self.name())
|
schedule.removeEvent(self.name())
|
||||||
@ -154,9 +163,10 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
sc.store(m, '+FLAGS', "(\Deleted)") # Mark message deleted so we don't have to process it again
|
sc.store(m, '+FLAGS', "(\Deleted)") # Mark message deleted so we don't have to process it again
|
||||||
fp.feed(msg)
|
fp.feed(msg)
|
||||||
bug = fp.close()
|
bug = fp.close()
|
||||||
|
tag = None
|
||||||
|
|
||||||
if 'X-Launchpad-Bug' not in bug.keys():
|
if 'X-Launchpad-Bug' not in bug.keys():
|
||||||
self.log.info('Ignoring e-mail with no detectable bug (Not from Launchpad)')
|
self.log.info('Ignoring e-mail with no detectable bug (Not from Launchpad)')
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
tag = bug['X-Launchpad-Bug']
|
tag = bug['X-Launchpad-Bug']
|
||||||
@ -168,7 +178,8 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
|
|
||||||
if not tag:
|
if not tag:
|
||||||
self.log.info('Ignoring e-mail with no detectible bug (bad tag)')
|
self.log.info('Ignoring e-mail with no detectible bug (bad tag)')
|
||||||
|
|
||||||
|
tag = tag[tag.find('+')+1:tag.find('@')]
|
||||||
if tag not in bugs:
|
if tag not in bugs:
|
||||||
bugs[tag] = {}
|
bugs[tag] = {}
|
||||||
|
|
||||||
@ -200,6 +211,7 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
self.log.info('Ignoring e-mail with no detectable bug')
|
self.log.info('Ignoring e-mail with no detectable bug')
|
||||||
|
|
||||||
reported_bugs = 0
|
reported_bugs = 0
|
||||||
|
|
||||||
for c in irc.state.channels:
|
for c in irc.state.channels:
|
||||||
tags = self.registryValue('bugReporter', channel=c)
|
tags = self.registryValue('bugReporter', channel=c)
|
||||||
if not tags:
|
if not tags:
|
||||||
@ -210,7 +222,6 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
for b in sorted(bugs[tag].keys()):
|
for b in sorted(bugs[tag].keys()):
|
||||||
irc.queueMsg(ircmsgs.privmsg(c,'New bug: #%s' % bugs[tag][b][bugs[tag][b].find('bug ')+4:]))
|
irc.queueMsg(ircmsgs.privmsg(c,'New bug: #%s' % bugs[tag][b][bugs[tag][b].find('bug ')+4:]))
|
||||||
reported_bugs = reported_bugs+1
|
reported_bugs = reported_bugs+1
|
||||||
self.log.info("Reported %d/%d bugs" % (reported_bugs, len(new_mail)))
|
|
||||||
|
|
||||||
def add(self, irc, msg, args, name, trackertype, url, description):
|
def add(self, irc, msg, args, name, trackertype, url, description):
|
||||||
"""<name> <type> <url> [<description>]
|
"""<name> <type> <url> [<description>]
|
||||||
@ -345,7 +356,7 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
if not name:
|
if not name:
|
||||||
snarfTarget = self.registryValue('snarfTarget', msg.args[0]).lower()
|
snarfTarget = self.registryValue('snarfTarget', msg.args[0]).lower()
|
||||||
if not snarfTarget:
|
if not snarfTarget:
|
||||||
self.log.info("no snarfTarget")
|
self.log.warning("no snarfTarget for Bugtracker")
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
name = self.shorthand[snarfTarget.lower()]
|
name = self.shorthand[snarfTarget.lower()]
|
||||||
@ -360,6 +371,9 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
else:
|
else:
|
||||||
for bugid in bugids:
|
for bugid in bugids:
|
||||||
bugid = int(bugid)
|
bugid = int(bugid)
|
||||||
|
if bugid == 1 and tracker == self.db["lp"]:
|
||||||
|
irc.reply("https://bugs.launchpad.net/ubuntu/+bug/1 (Not reporting large bug)")
|
||||||
|
continue
|
||||||
try:
|
try:
|
||||||
report = self.get_bug(msg.args[0],tracker,bugid,self.registryValue('showassignee', msg.args[0]))
|
report = self.get_bug(msg.args[0],tracker,bugid,self.registryValue('showassignee', msg.args[0]))
|
||||||
except BugNotFoundError:
|
except BugNotFoundError:
|
||||||
@ -374,7 +388,7 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
irc.error(str(e))
|
irc.error(str(e))
|
||||||
else:
|
else:
|
||||||
for r in report:
|
for r in report:
|
||||||
irc.reply(r, prefixNick=False)
|
irc.reply(makeClean(r), prefixNick=False)
|
||||||
|
|
||||||
def turlSnarfer(self, irc, msg, match):
|
def turlSnarfer(self, irc, msg, match):
|
||||||
r"(?P<tracker>https?://\S*?)/(Bugs/0*|str.php\?L|show_bug.cgi\?id=|bugreport.cgi\?bug=|(bugs|\+bug)/|ticket/|tracker/|\S*aid=)(?P<bug>\d+)(?P<sfurl>&group_id=\d+&at_id=\d+)?"
|
r"(?P<tracker>https?://\S*?)/(Bugs/0*|str.php\?L|show_bug.cgi\?id=|bugreport.cgi\?bug=|(bugs|\+bug)/|ticket/|tracker/|\S*aid=)(?P<bug>\d+)(?P<sfurl>&group_id=\d+&at_id=\d+)?"
|
||||||
@ -392,18 +406,23 @@ class Bugtracker(callbacks.PluginRegexp):
|
|||||||
report = self.get_bug(msg.args[0],tracker,int(match.group('bug')),self.registryValue('showassignee', msg.args[0]), do_url = False)
|
report = self.get_bug(msg.args[0],tracker,int(match.group('bug')),self.registryValue('showassignee', msg.args[0]), do_url = False)
|
||||||
except BugtrackerError, e:
|
except BugtrackerError, e:
|
||||||
irc.error(str(e))
|
irc.error(str(e))
|
||||||
|
except BugNotFoundError, e:
|
||||||
|
irc.error("%s bug %s not found" % (tracker, match.group('bug')))
|
||||||
else:
|
else:
|
||||||
for r in report:
|
for r in report:
|
||||||
irc.reply(r, prefixNick=False)
|
irc.reply(makeClean(r), prefixNick=False)
|
||||||
turlSnarfer = urlSnarfer(turlSnarfer)
|
turlSnarfer = urlSnarfer(turlSnarfer)
|
||||||
|
|
||||||
# Only useful for launchpad developers
|
# Only useful for launchpad developers
|
||||||
def oopsSnarfer(self, irc, msg, match):
|
def oopsSnarfer(self, irc, msg, match):
|
||||||
r"OOPS-(?P<oopsid>\d*[A-Z]\d+)"
|
r"OOPS-(?P<oopsid>\d*[\dA-Z]+)"
|
||||||
if msg.args[0][0] == '#' and not self.registryValue('bugSnarfer', msg.args[0]):
|
if msg.args[0][0] == '#' and not self.registryValue('bugSnarfer', msg.args[0]):
|
||||||
return
|
return
|
||||||
oopsid = match.group(1)
|
oopsid = match.group(1)
|
||||||
irc.reply("https://devpad.canonical.com/~jamesh/oops.cgi/%s" % oopsid, prefixNick=False)
|
if oopsid.lower() == "tools":
|
||||||
|
return
|
||||||
|
irc.reply("https://lp-oops.canonical.com/oops.py/?oopsid=%s" % oopsid, prefixNick=False)
|
||||||
|
#irc.reply("https://devpad.canonical.com/~jamesh/oops.cgi/%s" % oopsid, prefixNick=False)
|
||||||
|
|
||||||
def cveSnarfer(self, irc, msg, match):
|
def cveSnarfer(self, irc, msg, match):
|
||||||
r"(cve[- ]\d{4}[- ]\d{4})"
|
r"(cve[- ]\d{4}[- ]\d{4})"
|
||||||
@ -503,7 +522,7 @@ class Bugzilla(IBugtracker):
|
|||||||
bugxml = utils.web.getUrl(url)
|
bugxml = utils.web.getUrl(url)
|
||||||
zilladom = minidom.parseString(bugxml)
|
zilladom = minidom.parseString(bugxml)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse XML returned by %s: %s' % (self.description, e)
|
s = 'Could not parse XML returned by %s: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
bug_n = zilladom.getElementsByTagName('bug')[0]
|
bug_n = zilladom.getElementsByTagName('bug')[0]
|
||||||
if bug_n.hasAttribute('error'):
|
if bug_n.hasAttribute('error'):
|
||||||
@ -527,7 +546,7 @@ class Bugzilla(IBugtracker):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse XML returned by %s bugzilla: %s' % (self.description, e)
|
s = 'Could not parse XML returned by %s bugzilla: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
return [(id, component, title, severity, status, assignee, "%s/show_bug.cgi?id=%d" % (self.url, id))]
|
return [(id, component, title, severity, status, assignee, "%s/show_bug.cgi?id=%d" % (self.url, id))]
|
||||||
|
|
||||||
@ -538,7 +557,7 @@ class Issuezilla(IBugtracker):
|
|||||||
bugxml = utils.web.getUrl(url)
|
bugxml = utils.web.getUrl(url)
|
||||||
zilladom = minidom.parseString(bugxml)
|
zilladom = minidom.parseString(bugxml)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse XML returned by %s: %s' % (self.description, e)
|
s = 'Could not parse XML returned by %s: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
bug_n = zilladom.getElementsByTagName('issue')[0]
|
bug_n = zilladom.getElementsByTagName('issue')[0]
|
||||||
if not (bug_n.getAttribute('status_code') == '200'):
|
if not (bug_n.getAttribute('status_code') == '200'):
|
||||||
@ -557,7 +576,7 @@ class Issuezilla(IBugtracker):
|
|||||||
severity = _getnodetxt(bug_n.getElementsByTagName('issue_type')[0])
|
severity = _getnodetxt(bug_n.getElementsByTagName('issue_type')[0])
|
||||||
assignee = _getnodetxt(bug_n.getElementsByTagName('assigned_to')[0])
|
assignee = _getnodetxt(bug_n.getElementsByTagName('assigned_to')[0])
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse XML returned by %s bugzilla: %s' % (self.description, e)
|
s = 'Could not parse XML returned by %s bugzilla: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
return [(id, component, title, severity, status, assignee, "%s/show_bug.cgi?id=%d" % (self.url, id))]
|
return [(id, component, title, severity, status, assignee, "%s/show_bug.cgi?id=%d" % (self.url, id))]
|
||||||
|
|
||||||
@ -587,13 +606,13 @@ class Launchpad(IBugtracker):
|
|||||||
return 0
|
return 0
|
||||||
return 0
|
return 0
|
||||||
def get_bug(self, id):
|
def get_bug(self, id):
|
||||||
bug_url = '%s/bugs/%d' % (self.url,id)
|
|
||||||
try:
|
try:
|
||||||
bugdata = utils.web.getUrl("%s/+text" % bug_url)
|
# print("%s/bugs/%d/+text" % (self.url,id))
|
||||||
|
bugdata = utils.web.getUrl("%s/bugs/%d/+text" % (self.url,id))
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
if '404' in str(e):
|
if '404' in str(e):
|
||||||
raise BugNotFoundError
|
raise BugNotFoundError
|
||||||
s = 'Could not parse data returned by %s: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s: %s (%s/bugs/%d)' % (self.description, e, self.url, id)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
summary = {}
|
summary = {}
|
||||||
# Trap private bugs
|
# Trap private bugs
|
||||||
@ -612,7 +631,7 @@ class Launchpad(IBugtracker):
|
|||||||
taskdata = taskdata[-1]
|
taskdata = taskdata[-1]
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse data returned by %s: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s: %s (%s/bugs/%d)' % (self.description, e, self.url, id)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
# Try and find duplicates
|
# Try and find duplicates
|
||||||
t = taskdata['task']
|
t = taskdata['task']
|
||||||
@ -621,9 +640,9 @@ class Launchpad(IBugtracker):
|
|||||||
if bugdata['duplicate-of']:
|
if bugdata['duplicate-of']:
|
||||||
dupbug = self.get_bug(int(bugdata['duplicate-of']))
|
dupbug = self.get_bug(int(bugdata['duplicate-of']))
|
||||||
return [(id, t, bugdata['title'] + (' (dup-of: %d)' % dupbug[0][0]), taskdata['importance'],
|
return [(id, t, bugdata['title'] + (' (dup-of: %d)' % dupbug[0][0]), taskdata['importance'],
|
||||||
taskdata['status'], taskdata['assignee'], bug_url)] + dupbug
|
taskdata['status'], taskdata['assignee'], "%s/bugs/%s" % (self.url, id))] + dupbug
|
||||||
return [(id, t, bugdata['title'], taskdata['importance'],
|
return [(id, t, bugdata['title'], taskdata['importance'],
|
||||||
taskdata['status'], taskdata['assignee'], bug_url)]
|
taskdata['status'], taskdata['assignee'], "%s/bugs/%s" % (self.url, id))]
|
||||||
|
|
||||||
# <rant>
|
# <rant>
|
||||||
# Debbugs sucks donkeyballs
|
# Debbugs sucks donkeyballs
|
||||||
@ -640,8 +659,9 @@ class Debbugs(IBugtracker):
|
|||||||
IBugtracker.__init__(self, *args, **kwargs)
|
IBugtracker.__init__(self, *args, **kwargs)
|
||||||
self.soap_proxy = SOAPpy.SOAPProxy("bugs.debian.org/cgi-bin/soap.cgi", "Debbugs/SOAP/Status")
|
self.soap_proxy = SOAPpy.SOAPProxy("bugs.debian.org/cgi-bin/soap.cgi", "Debbugs/SOAP/Status")
|
||||||
self.soap_proxy.soapaction = "Debbugs/SOAP/Status#get_status"
|
self.soap_proxy.soapaction = "Debbugs/SOAP/Status#get_status"
|
||||||
|
|
||||||
def get_bug(self, id):
|
def get_bug(self, id):
|
||||||
|
bug_url = "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%d" % id
|
||||||
try:
|
try:
|
||||||
raw = self.soap_proxy.get_status(id)
|
raw = self.soap_proxy.get_status(id)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
@ -657,7 +677,7 @@ class Debbugs(IBugtracker):
|
|||||||
status = 'Open'
|
status = 'Open'
|
||||||
return [(id, raw['package'], raw['subject'], raw['severity'], status, '', "%s/%s" % (self.url, id))]
|
return [(id, raw['package'], raw['subject'], raw['severity'], status, '', "%s/%s" % (self.url, id))]
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse data returned by %s bugtracker: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s bugtracker: %s (%s)' % (self.description, e, bug_url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
|
|
||||||
class Mantis(IBugtracker):
|
class Mantis(IBugtracker):
|
||||||
@ -671,14 +691,14 @@ class Mantis(IBugtracker):
|
|||||||
try:
|
try:
|
||||||
raw = self.soap_proxy.mc_issue_get('', "", id)
|
raw = self.soap_proxy.mc_issue_get('', "", id)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse data returned by %s: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
if not raw:
|
if not raw:
|
||||||
raise BugNotFoundError
|
raise BugNotFoundError
|
||||||
try:
|
try:
|
||||||
return [(id, raw['project']['name'], raw['summary'], raw['priority']['name'], raw['resolution']['name'], '', url)]
|
return [(id, raw['project']['name'], raw['summary'], raw['priority']['name'], raw['resolution']['name'], '', url)]
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse data returned by %s bugtracker: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s bugtracker: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
|
|
||||||
# For trac based trackers we get the tab-separated-values format.
|
# For trac based trackers we get the tab-separated-values format.
|
||||||
@ -693,7 +713,7 @@ class Trac(IBugtracker):
|
|||||||
except Exception, e:
|
except Exception, e:
|
||||||
if 'HTTP Error 500' in str(e):
|
if 'HTTP Error 500' in str(e):
|
||||||
raise BugNotFoundError
|
raise BugNotFoundError
|
||||||
s = 'Could not parse data returned by %s: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s: %s' % (self.description, e, bug_url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
raw = raw.replace("\r\n", '\n')
|
raw = raw.replace("\r\n", '\n')
|
||||||
(headers, rest) = raw.split('\n', 1)
|
(headers, rest) = raw.split('\n', 1)
|
||||||
@ -728,7 +748,7 @@ class WikiForms(IBugtracker):
|
|||||||
except Exception, e:
|
except Exception, e:
|
||||||
if 'HTTP Error 404' in str(e):
|
if 'HTTP Error 404' in str(e):
|
||||||
raise BugNotFoundError
|
raise BugNotFoundError
|
||||||
s = 'Could not parse data returned by %s: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
for l in bugdata.split("\n"):
|
for l in bugdata.split("\n"):
|
||||||
l2 = l.lower()
|
l2 = l.lower()
|
||||||
@ -752,7 +772,7 @@ class Str(IBugtracker):
|
|||||||
try:
|
try:
|
||||||
bugdata = utils.web.getUrl(url)
|
bugdata = utils.web.getUrl(url)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse data returned by %s: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
for l in bugdata.split("\n"):
|
for l in bugdata.split("\n"):
|
||||||
l2 = l.lower()
|
l2 = l.lower()
|
||||||
@ -791,7 +811,7 @@ class Sourceforge(IBugtracker):
|
|||||||
try:
|
try:
|
||||||
bugdata = utils.web.getUrl(url)
|
bugdata = utils.web.getUrl(url)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
s = 'Could not parse data returned by %s: %s' % (self.description, e)
|
s = 'Could not parse data returned by %s: %s (%s)' % (self.description, e, url)
|
||||||
raise BugtrackerError, s
|
raise BugtrackerError, s
|
||||||
try:
|
try:
|
||||||
reo = sfre.search(bugdata)
|
reo = sfre.search(bugdata)
|
||||||
@ -801,7 +821,6 @@ class Sourceforge(IBugtracker):
|
|||||||
status += ' ' + resolution
|
status += ' ' + resolution
|
||||||
return [(id, None, reo.group('title'), "Pri: %s" % reo.group('priority'), status, reo.group('assignee'),self._sf_url % id)]
|
return [(id, None, reo.group('title'), "Pri: %s" % reo.group('priority'), status, reo.group('assignee'),self._sf_url % id)]
|
||||||
except:
|
except:
|
||||||
raise
|
|
||||||
raise BugNotFoundError
|
raise BugNotFoundError
|
||||||
|
|
||||||
# Introspection is quite cool
|
# Introspection is quite cool
|
||||||
@ -812,6 +831,7 @@ for k in v.keys():
|
|||||||
defined_bugtrackers[k.lower()] = v[k]
|
defined_bugtrackers[k.lower()] = v[k]
|
||||||
|
|
||||||
registerBugtracker('mozilla', 'http://bugzilla.mozilla.org', 'Mozilla', 'bugzilla')
|
registerBugtracker('mozilla', 'http://bugzilla.mozilla.org', 'Mozilla', 'bugzilla')
|
||||||
|
#registerBugtracker('ubuntu', 'http://bugzilla.ubuntu.com', 'Ubuntu', 'bugzilla')
|
||||||
registerBugtracker('ubuntu', 'https://launchpad.net', 'Ubuntu', 'launchpad')
|
registerBugtracker('ubuntu', 'https://launchpad.net', 'Ubuntu', 'launchpad')
|
||||||
registerBugtracker('gnome', 'http://bugzilla.gnome.org', 'Gnome', 'bugzilla')
|
registerBugtracker('gnome', 'http://bugzilla.gnome.org', 'Gnome', 'bugzilla')
|
||||||
registerBugtracker('gnome2', 'http://bugs.gnome.org', 'Gnome', 'bugzilla')
|
registerBugtracker('gnome2', 'http://bugs.gnome.org', 'Gnome', 'bugzilla')
|
||||||
|
@ -9,7 +9,8 @@ but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
GNU General Public License for more details.
|
GNU General Public License for more details.
|
||||||
|
|
||||||
This plugin is a factoid plugin. Here's how to set it up:
|
This plugin used to have package lookup, this was mooved to the PackageInfo
|
||||||
|
plugin.
|
||||||
|
|
||||||
Pick a name for your database. A lowercase-only name without spaces is probably
|
Pick a name for your database. A lowercase-only name without spaces is probably
|
||||||
best, this example wil use myfactoids as name. Then create a directory to store
|
best, this example wil use myfactoids as name. Then create a directory to store
|
||||||
@ -52,5 +53,19 @@ commoncgi.py file from the bzr tree. Make sure you set the variables datadir and
|
|||||||
database in factoids.cgi to the correct values. Also set default_db to the one
|
database in factoids.cgi to the correct values. Also set default_db to the one
|
||||||
you want to show by default.
|
you want to show by default.
|
||||||
|
|
||||||
This plugin used to have package lookup, this was mooved to the PackageInfo
|
To get package lookup working, you need to set the variable
|
||||||
plugin.
|
supybot.plugins.encyclopedia.aptdir to the name of a new, empty directory. In
|
||||||
|
this directory, you create sources.list files for every distrorelease you want to
|
||||||
|
search. The name of the file is important, since the filename (without the .list
|
||||||
|
suffix) is the name that is used to refer to the distrorelease.
|
||||||
|
|
||||||
|
Whenever you create a new .list file, it is important to run the update_apt
|
||||||
|
and update_apt_file scripts that comes with this plugin. Before you run these,
|
||||||
|
you have to edit them to point to your apt dir. It's also useful to run them
|
||||||
|
periodically from cron (say, once per week for update_apt and once per moth for
|
||||||
|
update_apt_file). You also need to reload the plugin to make it pick up the new
|
||||||
|
releases.
|
||||||
|
|
||||||
|
It is very useful to set the supybot.plugins.encyclopedia.searchorder value to a
|
||||||
|
space separated list of release names. That way you can limit the (expensive)
|
||||||
|
searching for packages to a small set of releases.
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
###
|
###
|
||||||
# Copyright (c) 2006,2007 Dennis Kaarsemaker
|
# Copyright (c) 2006,2007 Dennis Kaarsemaker
|
||||||
# Copyright (C) 2008 Terence Simpson <tsimpson@ubuntu.com> (stdin on irc.freenode.net)
|
# Copyright (c) 2008, 2009 Terence Simpson
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
# it under the terms of version 2 of the GNU General Public License as
|
# it under the terms of version 2 of the GNU General Public License as
|
||||||
@ -16,17 +16,17 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
# This needs to be set to the location of the commoncgi.py file
|
# This needs to be set to the location of the commoncgi.py file
|
||||||
sys.path.append('/var/www/')
|
sys.path.append('/var/www/bot')
|
||||||
from commoncgi import *
|
from commoncgi import *
|
||||||
|
|
||||||
### Variables
|
### Variables
|
||||||
NUM_PER_PAGE=50.0
|
NUM_PER_PAGE=50.0
|
||||||
# Directory of the factoids database
|
# Directory containing the factoid database
|
||||||
datadir = '/home/ubotu/data'
|
datadir = '/home/bot/'
|
||||||
# Database name (without .db)
|
# Database filename (without the .db extention)
|
||||||
default_database = 'ubuntu'
|
default_database = 'ubuntu'
|
||||||
|
|
||||||
#### You shouldn't have to change anything under this line ###
|
### Nothing below this line should be edited unless you know what you're doing ###
|
||||||
|
|
||||||
databases = [x for x in os.listdir(datadir)]
|
databases = [x for x in os.listdir(datadir)]
|
||||||
|
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
||||||
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>Ubotu factoids</title>
|
<title>Ubottu factoids</title>
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||||
<link rel="stylesheet" href="bot.css" />
|
<link rel="favicon" href="favicon.ico" type="image/png" />
|
||||||
<link rel="shortcut icon" href="favicon.ico" type="image/png" />
|
<link rel="stylesheet" href="bot.css" />
|
||||||
<script type="text/javascript">
|
<script type="text/javascript">
|
||||||
var DHTML = (document.getElementById || document.all || document.layers);
|
var DHTML = (document.getElementById || document.all || document.layers);
|
||||||
|
|
||||||
function getObj(name) {
|
function getObj(name) {
|
||||||
@ -36,28 +38,31 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="main">
|
<div class="main">
|
||||||
<h1>Ubotu factoids</h1>
|
<h1>Ubotu factoids</h1>
|
||||||
%e
|
%e
|
||||||
<p>
|
<p>
|
||||||
More help: <a href="http://wiki.ubuntu.com/">wiki.ubuntu.com</a> ·
|
More help: <a href="http://wiki.ubuntu.com/">wiki.ubuntu.com</a> ·
|
||||||
<a href="http://help.ubuntu.com/">help.ubuntu.com</a><br />
|
<a href="http://help.ubuntu.com/">help.ubuntu.com</a><br />
|
||||||
More factoids: <a href="factoids.cgi?db=ubuntu">Ubuntu</a> ·
|
More factoids: <a href="factoids.cgi?db=ubuntu">Ubuntu</a> ·
|
||||||
<a href="factoids.cgi?db=falcon">Falcon</a> ·
|
<a href="factoids.cgi?db=buntudot">buntudot</a> ·
|
||||||
<a href="factoids.cgi?db=buntudot">buntudot</a> ·
|
<a href="factoids.cgi?db=gnewsense">GNewSense</a><br />
|
||||||
<a href="factoids.cgi?db=gnewsense">GNewSense</a><br />
|
<form action="factoids.cgi" method="GET">
|
||||||
<form action="factoids.cgi" method="GET"><input class="input" type="text" name="search" />
|
<input class="input" type="text" name="search" />
|
||||||
<input class="input" type="submit" value="Search"></form>
|
<input class="input" type="submit" value="Search">
|
||||||
<p>
|
</form>
|
||||||
%s
|
<p>
|
||||||
</p>
|
%s
|
||||||
<p>
|
</p>
|
||||||
<a href="ubuntu.db">Ubuntu factoid database file</a><br />
|
<p>
|
||||||
©2006 Dennis Kaarsemaker
|
<a href="ubuntu.db">Ubuntu factoid database file</a><br />
|
||||||
</p>
|
©2006 Dennis Kaarsemaker<br/>
|
||||||
</div>
|
Edited by Terence Simpson
|
||||||
</body>
|
</p>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>Bot logs</title>
|
<title>Bot logs</title>
|
||||||
<link rel="stylesheet" href="bot.css" />
|
<link rel="stylesheet" href="/bot.css" />
|
||||||
<link rel="shortcut icon" href="favicon.ico" type="image/png" />
|
<link rel="shortcut icon" href="favicon.ico" type="image/png" />
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
|
@ -23,6 +23,11 @@ import supybot.utils as utils
|
|||||||
import supybot.ircutils as ircutils
|
import supybot.ircutils as ircutils
|
||||||
import sys, os, re, md5, random, time
|
import sys, os, re, md5, random, time
|
||||||
|
|
||||||
|
if sys.version_info >= (2, 5, 0):
|
||||||
|
import re
|
||||||
|
else:
|
||||||
|
import sre as re
|
||||||
|
|
||||||
def checkIgnored(hostmask, recipient='', users=ircdb.users, channels=ircdb.channels):
|
def checkIgnored(hostmask, recipient='', users=ircdb.users, channels=ircdb.channels):
|
||||||
if ircdb.ignores.checkIgnored(hostmask):
|
if ircdb.ignores.checkIgnored(hostmask):
|
||||||
return True
|
return True
|
||||||
@ -128,6 +133,7 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
def __init__(self, irc):
|
def __init__(self, irc):
|
||||||
callbacks.Plugin.__init__(self, irc)
|
callbacks.Plugin.__init__(self, irc)
|
||||||
self.databases = {}
|
self.databases = {}
|
||||||
|
self.times = {}
|
||||||
self.edits = {}
|
self.edits = {}
|
||||||
self.alert = False
|
self.alert = False
|
||||||
|
|
||||||
@ -180,25 +186,35 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
def get_target(self, nick, text, orig_target):
|
def get_target(self, nick, text, orig_target):
|
||||||
target = orig_target
|
target = orig_target
|
||||||
retmsg = ''
|
retmsg = ''
|
||||||
|
rettext = text[:]
|
||||||
|
hasPipe = False
|
||||||
|
hasRedir = False
|
||||||
|
|
||||||
if text.startswith('tell '):
|
if text.startswith('tell '):
|
||||||
text = ' ' + text
|
text = ' ' + text
|
||||||
|
|
||||||
if '|' in text:
|
if '|' in text and not text.strip().endswith('|'):
|
||||||
if not retmsg:
|
hasPipe = True
|
||||||
retmsg = text[text.find('|')+1:].strip() + ': '
|
retmsg = text[text.find('|')+1:].strip() + ': '
|
||||||
text = text[:text.find('|')].strip()
|
rettext = text[:text.find('|')].strip()
|
||||||
|
|
||||||
if '>' in text:
|
if ' tell ' in text and ' about ' in text:
|
||||||
target = text[text.rfind('>')+1:].strip().split()[0]
|
|
||||||
text = text[:text.rfind('>')].strip()
|
|
||||||
retmsg = "%s wants you to know: " % nick
|
|
||||||
|
|
||||||
elif ' tell ' in text and ' about ' in text:
|
|
||||||
target = text[text.find(' tell ')+6:].strip().split(None,1)[0]
|
target = text[text.find(' tell ')+6:].strip().split(None,1)[0]
|
||||||
text = text[text.find(' about ')+7:].strip()
|
rettext = text[text.find(' about ')+7:].strip()
|
||||||
retmsg = "<%s> wants you to know: " % nick
|
retmsg = "<%s> wants you to know: " % nick
|
||||||
|
|
||||||
|
if '>' in text:
|
||||||
|
if hasPipe:
|
||||||
|
if text.index('|') > text.index('>'):
|
||||||
|
target = text[text.rfind('>')+1:].strip().split()[0]
|
||||||
|
rettext = text[:text.rfind('>')].strip()
|
||||||
|
retmsg = "<%s> wants you to know: " % nick
|
||||||
|
else:
|
||||||
|
target = text[text.rfind('>')+1:].strip().split()[0]
|
||||||
|
rettext = text[:text.rfind('>')].strip()
|
||||||
|
retmsg = "<%s> wants you to know: " % nick
|
||||||
|
|
||||||
|
|
||||||
if target == 'me':
|
if target == 'me':
|
||||||
target = nick
|
target = nick
|
||||||
if target.lower() != orig_target.lower() and target.startswith('#'):
|
if target.lower() != orig_target.lower() and target.startswith('#'):
|
||||||
@ -209,7 +225,7 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
target = nick
|
target = nick
|
||||||
retmsg = '(In the future, please use a private message to investigate) '
|
retmsg = '(In the future, please use a private message to investigate) '
|
||||||
|
|
||||||
return (text, target, retmsg)
|
return (rettext, target, retmsg)
|
||||||
|
|
||||||
def get_db(self, channel):
|
def get_db(self, channel):
|
||||||
db = self.registryValue('database',channel)
|
db = self.registryValue('database',channel)
|
||||||
@ -221,7 +237,23 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
self.databases[channel] = sqlite.connect(os.path.join(self.registryValue('datadir'), '%s.db' % db))
|
self.databases[channel] = sqlite.connect(os.path.join(self.registryValue('datadir'), '%s.db' % db))
|
||||||
self.databases[channel].name = db
|
self.databases[channel].name = db
|
||||||
self.databases[channel].time = time.time()
|
self.databases[channel].time = time.time()
|
||||||
#self.log.info(os.path.join(self.registryValue('datadir'), '%s.db' % db))
|
return self.databases[channel]
|
||||||
|
|
||||||
|
def get_log_db(self, channel=None):
|
||||||
|
db = "%s-log" % self.registryValue('database',channel)
|
||||||
|
db_path = os.path.join(self.registryValue('datadir'), "%s.db" % db)
|
||||||
|
if not os.access(db_path, os.R_OK | os.W_OK):
|
||||||
|
self.log.warning("Could not access log database at '%s.db'" % db_path)
|
||||||
|
return None
|
||||||
|
channel = "%s-log" % channel
|
||||||
|
if channel in self.databases:
|
||||||
|
if self.databases[channel].time < time.time() - 3600 or self.databases[channel].name != db:
|
||||||
|
self.databases[channel].close()
|
||||||
|
self.databases.pop(channel)
|
||||||
|
if channel not in self.databases:
|
||||||
|
self.databases[channel] = sqlite.connect(db_path)
|
||||||
|
self.databases[channel].name = db
|
||||||
|
self.databases[channel].time = time.time()
|
||||||
return self.databases[channel]
|
return self.databases[channel]
|
||||||
|
|
||||||
def addressed(self, recipients, text, irc):
|
def addressed(self, recipients, text, irc):
|
||||||
@ -402,7 +434,7 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
elif term[0] == "seen" or term[0].startswith("seen "):
|
elif term[0] == "seen" or term[0].startswith("seen "):
|
||||||
ret = "I have no seen command"
|
ret = "I have no seen command"
|
||||||
retmsg = term[2] and "%s: " % msg.prefix.split('!', 1)[0] or ''
|
retmsg = term[2] and "%s: " % msg.prefix.split('!', 1)[0] or ''
|
||||||
elif term[0] in ("what", "whats", "what's") or term[0].startswith("what ") or term[0].startswith("what ") or term[0].startswith("whats ") or term[0].startswith("what's "): # Try and catch people saying "what is ...?"
|
elif term[0] in ("what", "whats", "what's") or term[0].startswith("what ") or term[0].startswith("what ") or term[0].startswith("whats ") or term[0].startswith("what's "): # Try and catch people saying "ubottu: what is ...?"
|
||||||
ret = "I am only a bot, please don't think I'm intelligent :)"
|
ret = "I am only a bot, please don't think I'm intelligent :)"
|
||||||
retmsg = term[2]
|
retmsg = term[2]
|
||||||
elif beginswith(lower_text, self.registryValue('ignores', channel)):
|
elif beginswith(lower_text, self.registryValue('ignores', channel)):
|
||||||
@ -418,20 +450,10 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
self.registryValue('relaychannel',channel),private=True)
|
self.registryValue('relaychannel',channel),private=True)
|
||||||
irc.queueMsg(ircmsgs.privmsg(self.registryValue('relaychannel',channel), "In %s, %s said: %s" %
|
irc.queueMsg(ircmsgs.privmsg(self.registryValue('relaychannel',channel), "In %s, %s said: %s" %
|
||||||
(msg.args[0], msg.nick, msg.args[1])))
|
(msg.args[0], msg.nick, msg.args[1])))
|
||||||
|
self.logRequest(msg.args[0], msg.nick, text)
|
||||||
return
|
return
|
||||||
ret = self.factoid_edit(text, channel, msg.prefix)
|
ret = self.factoid_edit(text, channel, msg.prefix)
|
||||||
elif ' is ' in lower_text and '|' in lower_text and lower_text.index('|') > lower_text.index(' is '):
|
elif (' is ' in lower_text and '|' in lower_text and lower_text.index('|') > lower_text.index(' is ')) or (' is ' in lower_text and '|' not in lower_text):
|
||||||
if not capab(msg.prefix, 'editfactoids'):
|
|
||||||
if len(text[:text.find('is')]) > 15:
|
|
||||||
irc.error("I am only a bot, please don't think I'm intelligent :)")
|
|
||||||
else:
|
|
||||||
irc.reply("Your edit request has been forwarded to %s. Thank you for your attention to detail" %
|
|
||||||
self.registryValue('relaychannel',channel),private=True)
|
|
||||||
irc.queueMsg(ircmsgs.privmsg(self.registryValue('relaychannel',channel), "In %s, %s said: %s" %
|
|
||||||
(msg.args[0], msg.nick, msg.args[1])))
|
|
||||||
return
|
|
||||||
ret = self.factoid_add(text, channel, msg.prefix)
|
|
||||||
elif ' is ' in lower_text:
|
|
||||||
if not capab(msg.prefix, 'editfactoids'):
|
if not capab(msg.prefix, 'editfactoids'):
|
||||||
if len(text[:text.find('is')]) > 15:
|
if len(text[:text.find('is')]) > 15:
|
||||||
irc.error("I am only a bot, please don't think I'm intelligent :)")
|
irc.error("I am only a bot, please don't think I'm intelligent :)")
|
||||||
@ -440,6 +462,7 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
self.registryValue('relaychannel',channel),private=True)
|
self.registryValue('relaychannel',channel),private=True)
|
||||||
irc.queueMsg(ircmsgs.privmsg(self.registryValue('relaychannel',channel), "In %s, %s said: %s" %
|
irc.queueMsg(ircmsgs.privmsg(self.registryValue('relaychannel',channel), "In %s, %s said: %s" %
|
||||||
(msg.args[0], msg.nick, msg.args[1])))
|
(msg.args[0], msg.nick, msg.args[1])))
|
||||||
|
self.logRequest(msg.args[0], msg.nick, text)
|
||||||
return
|
return
|
||||||
ret = self.factoid_add(text, channel, msg.prefix)
|
ret = self.factoid_add(text, channel, msg.prefix)
|
||||||
else:
|
else:
|
||||||
@ -456,6 +479,11 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
ret = self.registryValue('notfoundmsg')
|
ret = self.registryValue('notfoundmsg')
|
||||||
if ret.count('%') == ret.count('%s') == 1:
|
if ret.count('%') == ret.count('%s') == 1:
|
||||||
ret = ret % repr(text)
|
ret = ret % repr(text)
|
||||||
|
if channel.lower() == irc.nick.lower():
|
||||||
|
queue(irc, msg.nick, ret)
|
||||||
|
return
|
||||||
|
queue(irc, channel, ret)
|
||||||
|
return
|
||||||
if doChanMsg and channel.lower() != irc.nick.lower() and target[0] != '#': # not /msg
|
if doChanMsg and channel.lower() != irc.nick.lower() and target[0] != '#': # not /msg
|
||||||
if target in irc.state.channels[channel].users:
|
if target in irc.state.channels[channel].users:
|
||||||
queue(irc, channel, "%s, please see my private message" % target)
|
queue(irc, channel, "%s, please see my private message" % target)
|
||||||
@ -608,6 +636,51 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
break
|
break
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def sanatizeRequest(self, channel, msg):
|
||||||
|
def normalize(s):
|
||||||
|
while s.count(" "):
|
||||||
|
s = s.replace(" ", '')
|
||||||
|
return s.strip()
|
||||||
|
|
||||||
|
msg = normalize(msg)
|
||||||
|
if msg[0] == self.registryValue('prefixchar', channel):
|
||||||
|
msg = msg[1:]
|
||||||
|
if msg.startswith("no "):
|
||||||
|
msg = msg[3:]
|
||||||
|
if " is " in msg:
|
||||||
|
msg = msg.replace(" is ", " ", 1)
|
||||||
|
(name, msg) = msg.split(None, 1)
|
||||||
|
factoid = self.get_single_factoid(channel, name)
|
||||||
|
oldval = ''
|
||||||
|
if factoid:
|
||||||
|
oldval = factoid.value
|
||||||
|
return (name, msg, oldval)
|
||||||
|
|
||||||
|
def logRequest(self, channel, nick, msg):
|
||||||
|
(name, msg, oldval) = self.sanatizeRequest(channel, msg)
|
||||||
|
if msg.strip() == oldval.strip():
|
||||||
|
return
|
||||||
|
if oldval:
|
||||||
|
self.doLogRequest(0, channel, nick, name, msg, oldval)
|
||||||
|
else:
|
||||||
|
self.doLogRequest(1, channel, nick, name, msg)
|
||||||
|
|
||||||
|
def doLogRequest(self, tp, channel, nick, name, msg, oldval = ''):
|
||||||
|
db = self.get_log_db(channel)
|
||||||
|
if not db:
|
||||||
|
return
|
||||||
|
cur = db.cursor()
|
||||||
|
now = str(datetime.datetime.now(pytz.timezone("UTC")))
|
||||||
|
cur.execute("SELECT value FROM requests WHERE name = %s", name)
|
||||||
|
items = cur.fetchall()
|
||||||
|
if len(items):
|
||||||
|
for item in items:
|
||||||
|
if item[0] == msg:
|
||||||
|
return
|
||||||
|
cur.execute("INSERT INTO requests (type, name, value, oldval, who, date, rank) VALUES (%i, %s, %s, %s, %s, %s, 0)",
|
||||||
|
(int(bool(tp)), name, msg, oldval, nick, now))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
def search_factoid(self, factoid, channel):
|
def search_factoid(self, factoid, channel):
|
||||||
keys = factoid.split()[:5]
|
keys = factoid.split()[:5]
|
||||||
db = self.get_db(channel)
|
db = self.get_db(channel)
|
||||||
@ -667,6 +740,9 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
os.rename(tmp_db, dpath)
|
os.rename(tmp_db, dpath)
|
||||||
try:
|
try:
|
||||||
self.databases[channel].close()
|
self.databases[channel].close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
self.databases.pop(channel)
|
self.databases.pop(channel)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@ -711,4 +787,179 @@ class Encyclopedia(callbacks.Plugin):
|
|||||||
|
|
||||||
sync = wrap(sync, [optional("somethingWithoutSpaces")])
|
sync = wrap(sync, [optional("somethingWithoutSpaces")])
|
||||||
|
|
||||||
|
def lookup(self, irc, msg, args, author):
|
||||||
|
"""--Future Command-- [<author>]
|
||||||
|
|
||||||
|
Looks up factoids created or edited by <author>,
|
||||||
|
<author> defaults to you.
|
||||||
|
"""
|
||||||
|
if not capab(msg.prefix, "editfactoids"):
|
||||||
|
irc.error("Sorry, you can't do that")
|
||||||
|
return
|
||||||
|
channel = self.registryValue('database')
|
||||||
|
if not channel:
|
||||||
|
irc.reply("Umm, I don't know")
|
||||||
|
return
|
||||||
|
if not author:
|
||||||
|
author = msg.prefix
|
||||||
|
def isLastEdit(name, id):
|
||||||
|
cur.execute("SELECT MAX(id) FROM log WHERE name=%s", (name,))
|
||||||
|
return int(cur.fetchall()[0][0]) == id
|
||||||
|
author = author.split('!', 1)[0]
|
||||||
|
db = self.get_db(channel)
|
||||||
|
cur = db.cursor()
|
||||||
|
ret = {}
|
||||||
|
log_ret = {}
|
||||||
|
cur.execute("SELECT name,value FROM facts WHERE author LIKE '%s%%'" % (author,))
|
||||||
|
res = cur.fetchall()
|
||||||
|
cur.execute("SELECT id, name, oldvalue FROM log WHERE author LIKE '%s%%'" % (author,))
|
||||||
|
log_res = cur.fetchall()
|
||||||
|
for r in res:
|
||||||
|
val = r[1]
|
||||||
|
d = r[1].startswith('<deleted>')
|
||||||
|
a = r[1].startswith('<alias>')
|
||||||
|
r = r[0]
|
||||||
|
if d:
|
||||||
|
r += '*'
|
||||||
|
if a:
|
||||||
|
r += '@' + val[7:].strip()
|
||||||
|
try:
|
||||||
|
ret[r] += 1
|
||||||
|
except:
|
||||||
|
ret[r] = 1
|
||||||
|
|
||||||
|
for r in log_res:
|
||||||
|
if isLastEdit(r[1], r[0]):
|
||||||
|
val = r[2]
|
||||||
|
d = r[2].startswith('<deleted>')
|
||||||
|
a = r[2].startswith('<alias>')
|
||||||
|
r = r[1]
|
||||||
|
if d:
|
||||||
|
r += '*'
|
||||||
|
if a:
|
||||||
|
r += '@' + val[7:].strip()
|
||||||
|
try:
|
||||||
|
log_ret[r] += 1
|
||||||
|
except:
|
||||||
|
log_ret[r] = 1
|
||||||
|
|
||||||
|
if not ret:
|
||||||
|
rmsg = "Authored: None found"
|
||||||
|
else:
|
||||||
|
rmsg = 'Authored Found: %s' % ', '.join(sorted(ret.keys(), lambda x, y: cmp(ret[x], ret[y]))[:10])
|
||||||
|
if not log_ret:
|
||||||
|
log_rmsg = "Edited: None found"
|
||||||
|
else:
|
||||||
|
log_rmsg = 'Edited Found: %s' % ', '.join(sorted(log_ret.keys(), lambda x, y: cmp(log_ret[x], log_ret[y]))[:10])
|
||||||
|
irc.reply(rmsg)
|
||||||
|
irc.reply(log_rmsg)
|
||||||
|
lookup = wrap(lookup, [optional('otherUser')])
|
||||||
|
|
||||||
|
def ftlogin(self, irc, msg, args):
|
||||||
|
"""--Future Command-- Takes no arguments
|
||||||
|
|
||||||
|
Login to the Factoid Edit System
|
||||||
|
"""
|
||||||
|
user = None
|
||||||
|
if not msg.tagged('identified'):
|
||||||
|
irc.error("Not identified")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
user = ircdb.users.getUser(msg.prefix)
|
||||||
|
except:
|
||||||
|
irc.error(conf.supybot.replies.incorrectAuthentication())
|
||||||
|
return
|
||||||
|
|
||||||
|
if not capab(msg.prefix, "editfactoids"):
|
||||||
|
irc.error(conf.supybot.replies.noCapability() % "editfactoids")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
return
|
||||||
|
|
||||||
|
db = self.get_log_db()
|
||||||
|
if not db:
|
||||||
|
irc.error("Could not open database, contact stdin")
|
||||||
|
return
|
||||||
|
cur = db.cursor()
|
||||||
|
|
||||||
|
sessid = md5.new('%s%s%d' % (msg.prefix, time.time(), random.randint(1,100000))).hexdigest()
|
||||||
|
cur.execute("INSERT INTO sessions (session_id, user, time) VALUES (%s, %s, %d)",
|
||||||
|
(sessid, msg.nick, int(time.mktime(time.gmtime())) ))
|
||||||
|
db.commit()
|
||||||
|
irc.reply("Login at http://jussi01.com/stdin/test/facts.cgi?sessid=%s" % sessid, private=True)
|
||||||
|
|
||||||
|
ftlogin = wrap(ftlogin)
|
||||||
|
|
||||||
|
def ignore(self, irc, msg, args, banmask, expires, channel):
|
||||||
|
"""<hostmask|nick> [<expires>] [<channel>]
|
||||||
|
|
||||||
|
Ignores commands/requests from <hostmask> or <nick>. If <expires> is
|
||||||
|
given the ignore will expire after that ammount of seconds. If
|
||||||
|
<channel> is given, the ignore will only apply in that channel.
|
||||||
|
"""
|
||||||
|
if not capab(msg.prefix, "editfactoids"):
|
||||||
|
irc.errorNoCapability("editfactoids")
|
||||||
|
return
|
||||||
|
if channel:
|
||||||
|
c = ircdb.channels.getChannel(channel)
|
||||||
|
c.addIgnore(banmask, expires)
|
||||||
|
ircdb.channels.setChannel(channel, c)
|
||||||
|
irc.replySuccess()
|
||||||
|
else:
|
||||||
|
ircdb.ignores.add(banmask, expires)
|
||||||
|
irc.replySuccess()
|
||||||
|
|
||||||
|
ignore = wrap(ignore, ['hostmask', optional("expiry", 0), optional("channel", None)])
|
||||||
|
|
||||||
|
def unignore(self, irc, msg, args, banmask, channel):
|
||||||
|
"""<hostmask|nick> [<channel>]
|
||||||
|
|
||||||
|
Remove an ignore previously set by @ignore. If <channel> was given
|
||||||
|
in the origional @ignore command it must be given here.
|
||||||
|
"""
|
||||||
|
if not capab(msg.prefix, "editfactoids"):
|
||||||
|
irc.errorNoCapability("editfactoids")
|
||||||
|
return
|
||||||
|
if channel:
|
||||||
|
c = ircdb.channels.getChannel(channel)
|
||||||
|
try:
|
||||||
|
c.removeIgnore(banmask)
|
||||||
|
ircdb.channels.setChannel(channel, c)
|
||||||
|
irc.replySuccess()
|
||||||
|
except KeyError:
|
||||||
|
irc.error('There are no ignores for that hostmask in %s.' % channel)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
ircdb.ignores.remove(banmask)
|
||||||
|
irc.replySuccess()
|
||||||
|
except KeyError:
|
||||||
|
irc.error("%s wasn't in the ignores database." % banmask)
|
||||||
|
|
||||||
|
unignore = wrap(unignore, ['hostmask', optional("channel", None)])
|
||||||
|
|
||||||
|
def ignorelist(self, irc, msg, args, channel):
|
||||||
|
"""<hostmask|nick> [<channel>]
|
||||||
|
|
||||||
|
Lists all ignores set by @ignore. If <channel> is given this will
|
||||||
|
only list ignores set in that channel.
|
||||||
|
"""
|
||||||
|
if not capab(msg.prefix, "editfactoids"):
|
||||||
|
irc.errorNoCapability("editfactoids")
|
||||||
|
return
|
||||||
|
if channel:
|
||||||
|
c = ircdb.channels.getChannel(channel)
|
||||||
|
if len(c.ignores) == 0:
|
||||||
|
irc.reply("I'm not currently ignoring any hostmasks in '%s'" % channel)
|
||||||
|
else:
|
||||||
|
L = sorted(c.ignores)
|
||||||
|
irc.reply(utils.str.commaAndify(map(repr, L)))
|
||||||
|
else:
|
||||||
|
if ircdb.ignores.hostmasks:
|
||||||
|
irc.reply(format('%L', (map(repr,ircdb.ignores.hostmasks))))
|
||||||
|
else:
|
||||||
|
irc.reply("I'm not currently globally ignoring anyone.")
|
||||||
|
|
||||||
|
ignorelist = wrap(ignorelist, [optional("channel", None)])
|
||||||
|
|
||||||
Class = Encyclopedia
|
Class = Encyclopedia
|
||||||
|
@ -76,7 +76,7 @@ If errors is True print any errors to stderr, defaults to True
|
|||||||
if e <= s:
|
if e <= s:
|
||||||
break
|
break
|
||||||
# Limit to freenode nicks
|
# Limit to freenode nicks
|
||||||
if 'on network <code style="font-size: 120%">irc.freenode.net</code>' in data[s:e]:
|
if 'freenode' in data[s:e] or 'ubuntu' in data[s:e]:
|
||||||
ircnames.append(data[s:e])
|
ircnames.append(data[s:e])
|
||||||
del data[s:e]
|
del data[s:e]
|
||||||
except:
|
except:
|
||||||
|
@ -165,8 +165,14 @@ launchpad"""
|
|||||||
if not user:
|
if not user:
|
||||||
irc.error(conf.supybot.replies.incorrectAuthentication())
|
irc.error(conf.supybot.replies.incorrectAuthentication())
|
||||||
return
|
return
|
||||||
|
try:
|
||||||
user.addAuth(msg.prefix)
|
user.addAuth(msg.prefix)
|
||||||
ircdb.users.setUser(user, flush=False)
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
ircdb.users.setUser(user, flush=False)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
irc.replySuccess()
|
irc.replySuccess()
|
||||||
login = wrap(login)
|
login = wrap(login)
|
||||||
|
|
||||||
|
58
Lart/Lart.flat.db
Normal file
58
Lart/Lart.flat.db
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
000057
|
||||||
|
000001:1150873085.902302,1,'strangles $who with a doohicky mouse cord'
|
||||||
|
000002:1150873161.7318439,1,"""pours hot grits down the front of $who's pants"""
|
||||||
|
000003:1150873175.1154349,1,'beats $who senseless with a 50lb Unix manual'
|
||||||
|
000004:1150873544.0124331,1,'whacks $who with the cluebat'
|
||||||
|
000005:1150873549.5681801,1,"""resizes $who's terminal to 40x24"""
|
||||||
|
000006:1150873567.3416319,1,'drops a truckload of VAXen on $who'
|
||||||
|
000007:1150873584.5915289,1,"""pulls out his louisville slugger and uses $who's head to break the homerun record"""
|
||||||
|
000008:1150873593.953506,1,'stabs $who'
|
||||||
|
000009:1150873602.0038359,1,"""steals $who's mojo"""
|
||||||
|
000010:1150873625.408884,1,'holds $who to the floor and spanks him with a cat-o-nine-tails'
|
||||||
|
000011:1150873635.824439,1,"""installs WindowsME on $who's computer"""
|
||||||
|
000012:1150873676.669996,1,'makes Jack Bauer chase $who'
|
||||||
|
000013:1150873684.938952,1,'pushes the wall down onto $who whilst whistling innocently'
|
||||||
|
000014:1150873708.7875321,1,'--purges $who'
|
||||||
|
000015:1150873726.932162,1,'decapitates $who conan the destroyer style'
|
||||||
|
000016:1150873745.169296,1,"""cats /dev/urandom into $who's ear"""
|
||||||
|
000017:1150873762.182699,1,"""does a little 'renice 20 -u $who'"""
|
||||||
|
000018:1150880977.1693139,1,"""tackles $who, sits on $who and starts scratching at $who's chest"""
|
||||||
|
000019:1151419669.2793391,1,'slaps $who with a soggy sock'
|
||||||
|
000020:1151962663.4319079,1,'drops $who from a helicopter 5 miles in the sky. Without parachute'
|
||||||
|
000021:1152091319.803565,1,'throws $who into /dev/null'
|
||||||
|
000022:1152805300.7266941,1,'chases $who with a big pointy stick'
|
||||||
|
000023:1152920154.2286861,1,'smacks $who with a big clue-by-four'
|
||||||
|
000024:1153087484.0331881,1,'bites $who'
|
||||||
|
000025:1153135286.679605,1,'sends FesterAnvil hurtling through the sky to land on $who'
|
||||||
|
000026:1153600051.46187,1,'shoots $who in the face with a rocket'
|
||||||
|
000027:1153769010.2483661,1,'@$chan:~$ deluser $who'
|
||||||
|
000028:1154013487.0735919,1,'thwacks $who with a BIG POINTY HOBBSЕЕ OF DOOM'
|
||||||
|
000029:1154449886.891526,1,"""tickles $who's feet with a feather"""
|
||||||
|
000030:1154456213.370611,1,'splats $who with a large hammer'
|
||||||
|
000031:1155921496.616538,1,'divides $who by zero'
|
||||||
|
000032:1166737658.117898,1,"""sets $who's keyboard layout to gaelic"""
|
||||||
|
000033:1169714572.779995,7,"""breaks $who's machine by running automatix on it. Twice."""
|
||||||
|
000034:1169826693.3914869,7,'gets the neuraliser out and points it at $who'
|
||||||
|
000035:1170410240.286854,1,'smacks $who with a vista DVD. COOTIES!'
|
||||||
|
000036:1170410622.2356141,1,'spanks $who with a pink tutu'
|
||||||
|
000037:1170410687.610502,1,'shows $who a photo of mneptok: http://tinyurl.com/yv5q8h'
|
||||||
|
000037:1173736572.6347821,1,'forces $who to use perl for 3 weeks'
|
||||||
|
000038:1173736775.8736949,1,'forces $who to use perl for 3 weeks'
|
||||||
|
000039:1173736803.2703841,1,'pokes $who in the eye'
|
||||||
|
000040:1173736823.520009,1,'signs $who up for AOL'
|
||||||
|
000041:1173736843.5446689,1,'enrolls $who in Visual Basic 101'
|
||||||
|
000042:1173736857.85535,1,'judo chops $who'
|
||||||
|
000043:1173736906.7716081,1,'sells $who on E-Bay'
|
||||||
|
000044:1173736913.094003,1,'forces $who to use emacs for 3 weeks'
|
||||||
|
000045:1173736933.924052,1,"""puts alias vim=emacs in $who's /etc/profile"""
|
||||||
|
000046:1173736963.118736,1,'reads $who some vogon poetry'
|
||||||
|
000047:1173736973.826055,1,'puts $who in the Total Perspective Vortex'
|
||||||
|
000048:1173736987.6467011,1,'uses $who as a biological warfare study'
|
||||||
|
000049:1173737029.025836,1,"""pierces $who's nose with a rusty paper hole puncher"""
|
||||||
|
000050:1173737055.204941,1,'pokes $who with a rusty nail'
|
||||||
|
000051:1173889239.086617,1,'files $who under the L for lame'
|
||||||
|
000052:1176764275.6553509,1,'forces $who to talk in reverse polish notation for the rest of the year'
|
||||||
|
000053:1181421701.938509,1,"""slurps up all of $who's memory by installing vista"""
|
||||||
|
000054:1185565575.1513309,1,"""replaces Ubuntu with Windows Vista on $who's PC"""
|
||||||
|
000055:1186343260.1562171,1,"""annihilates $who's hearing by screaming louder than an arnieboy who got pwned by mjg59"""
|
||||||
|
000056:1200150976.9736941,1,'forces $who to write an operating system in Logo'
|
@ -42,13 +42,9 @@ def configure(advanced):
|
|||||||
|
|
||||||
Lart = conf.registerPlugin('Lart')
|
Lart = conf.registerPlugin('Lart')
|
||||||
# This is where your configuration variables (if any) should go. For example:
|
# This is where your configuration variables (if any) should go. For example:
|
||||||
# conf.registerGlobalValue(Lart, 'someConfigVariableName',
|
|
||||||
# registry.Boolean(False, """Help for someConfigVariableName."""))
|
|
||||||
conf.registerChannelValue(Lart, 'showIds',
|
|
||||||
registry.Boolean(False, """Determines whether the bot will show the ids of
|
|
||||||
a lart when the lart is given."""))
|
|
||||||
conf.registerChannelValue(Lart, 'enabled',
|
conf.registerChannelValue(Lart, 'enabled',
|
||||||
registry.Boolean(False, """Mesa want lart!"""))
|
registry.Boolean(False, "Whether or not to enable the LART for the channel"))
|
||||||
|
conf.registerChannelValue(Lart, 'showIds',
|
||||||
|
registry.Boolean(False, "Determines whether the bot will show the ids of a lart when the lart is given."))
|
||||||
|
|
||||||
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
||||||
|
@ -35,10 +35,42 @@ import supybot.plugins as plugins
|
|||||||
import supybot.ircutils as ircutils
|
import supybot.ircutils as ircutils
|
||||||
import supybot.callbacks as callbacks
|
import supybot.callbacks as callbacks
|
||||||
import supybot.ircdb as ircdb
|
import supybot.ircdb as ircdb
|
||||||
import supybot.plugin as plugin
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
def checkIgnored(hostmask, recipient='', users=ircdb.users, channels=ircdb.channels):
|
||||||
|
if ircdb.ignores.checkIgnored(hostmask):
|
||||||
|
return True
|
||||||
|
try:
|
||||||
|
id = ircdb.users.getUserId(hostmask)
|
||||||
|
user = users.getUser(id)
|
||||||
|
except KeyError:
|
||||||
|
# If there's no user...
|
||||||
|
if ircutils.isChannel(recipient):
|
||||||
|
channel = channels.getChannel(recipient)
|
||||||
|
if channel.checkIgnored(hostmask):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
if user._checkCapability('owner'):
|
||||||
|
# Owners shouldn't ever be ignored.
|
||||||
|
return False
|
||||||
|
elif user.ignore:
|
||||||
|
return True
|
||||||
|
elif recipient:
|
||||||
|
if ircutils.isChannel(recipient):
|
||||||
|
channel = ircdb.channels.getChannel(recipient)
|
||||||
|
if channel.checkIgnored(hostmask):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
class Lart(plugins.ChannelIdDatabasePlugin):
|
class Lart(plugins.ChannelIdDatabasePlugin):
|
||||||
_meRe = re.compile(r'\bme\b', re.I)
|
_meRe = re.compile(r'\bme\b', re.I)
|
||||||
_myRe = re.compile(r'\bmy\b', re.I)
|
_myRe = re.compile(r'\bmy\b', re.I)
|
||||||
@ -64,7 +96,7 @@ class Lart(plugins.ChannelIdDatabasePlugin):
|
|||||||
(target, reason) = map(str.strip, text.split(' for ', 1))
|
(target, reason) = map(str.strip, text.split(' for ', 1))
|
||||||
else:
|
else:
|
||||||
(target, reason) = (text, '')
|
(target, reason) = (text, '')
|
||||||
print target
|
|
||||||
if id is not None:
|
if id is not None:
|
||||||
try:
|
try:
|
||||||
lart = self.db.get(channel, id)
|
lart = self.db.get(channel, id)
|
||||||
@ -78,9 +110,11 @@ class Lart(plugins.ChannelIdDatabasePlugin):
|
|||||||
'for %s.', channel))
|
'for %s.', channel))
|
||||||
return
|
return
|
||||||
text = self._replaceFirstPerson(lart.text, msg.nick)
|
text = self._replaceFirstPerson(lart.text, msg.nick)
|
||||||
if ircutils.strEqual(target, irc.nick) or \
|
formatText = ircutils.stripFormatting(target).lower()
|
||||||
irc.nick.lower() in ircutils.stripFormatting(target).lower() or \
|
if (ircutils.strEqual(target, irc.nick) or 'Evilrockbot' in formatText) and random.uniform(0,100) < 25):
|
||||||
random.uniform(0,100) < 25:
|
target = msg.nick
|
||||||
|
reason = ''
|
||||||
|
elif 'stdin' in formatText or 'tsimpson' in formatText:
|
||||||
target = msg.nick
|
target = msg.nick
|
||||||
reason = ''
|
reason = ''
|
||||||
else:
|
else:
|
||||||
@ -97,7 +131,6 @@ class Lart(plugins.ChannelIdDatabasePlugin):
|
|||||||
irc.reply(text, action=True)
|
irc.reply(text, action=True)
|
||||||
lart = wrap(lart, ['channeldb', optional('id'), 'text'])
|
lart = wrap(lart, ['channeldb', optional('id'), 'text'])
|
||||||
pity = lart
|
pity = lart
|
||||||
slander = lart
|
|
||||||
|
|
||||||
def callPrecedence(self, irc):
|
def callPrecedence(self, irc):
|
||||||
before = []
|
before = []
|
||||||
@ -129,7 +162,6 @@ class Lart(plugins.ChannelIdDatabasePlugin):
|
|||||||
tokens = callbacks.tokenize(s, channel=msg.args[0])
|
tokens = callbacks.tokenize(s, channel=msg.args[0])
|
||||||
self.Proxy(irc, msg, tokens)
|
self.Proxy(irc, msg, tokens)
|
||||||
return msg
|
return msg
|
||||||
# self._callCommand([cmd], irc, msg, [])
|
|
||||||
|
|
||||||
Class = Lart
|
Class = Lart
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
import exceptions
|
import exceptions
|
||||||
import warnings
|
import warnings
|
||||||
warnings.filterwarnings("ignore", "apt API not stable yet", exceptions.FutureWarning)
|
warnings.filterwarnings("ignore", "apt API not stable yet", exceptions.FutureWarning)
|
||||||
import commands, os, apt
|
import commands, os, apt, urllib
|
||||||
from email import FeedParser
|
from email import FeedParser
|
||||||
|
|
||||||
def component(arg):
|
def component(arg):
|
||||||
@ -23,11 +23,11 @@ def component(arg):
|
|||||||
|
|
||||||
class Apt:
|
class Apt:
|
||||||
def __init__(self, plugin):
|
def __init__(self, plugin):
|
||||||
os.environ["LANG"] = "C" # Workaround issues with localized package descriptions
|
|
||||||
self.aptdir = plugin.registryValue('aptdir')
|
self.aptdir = plugin.registryValue('aptdir')
|
||||||
self.distros = []
|
self.distros = []
|
||||||
self.plugin = plugin
|
self.plugin = plugin
|
||||||
self.log = plugin.log
|
self.log = plugin.log
|
||||||
|
os.environ["LANG"] = "C"
|
||||||
if self.aptdir:
|
if self.aptdir:
|
||||||
self.distros = [x[:-5] for x in os.listdir(self.aptdir) if x.endswith('.list')]
|
self.distros = [x[:-5] for x in os.listdir(self.aptdir) if x.endswith('.list')]
|
||||||
self.distros.sort()
|
self.distros.sort()
|
||||||
@ -38,21 +38,23 @@ class Apt:
|
|||||||
-o"Dir::Cache=%s/cache"\\
|
-o"Dir::Cache=%s/cache"\\
|
||||||
-o"APT::Architecture=i386"\\
|
-o"APT::Architecture=i386"\\
|
||||||
%%s %%s""" % tuple([self.aptdir]*4)
|
%%s %%s""" % tuple([self.aptdir]*4)
|
||||||
self.aptfilecommand = """apt-file -s %s/%%s.list -c %s/apt-file/%%s -l search %%s""" % tuple([self.aptdir]*2)
|
self.aptfilecommand = """apt-file -s %s/%%s.list -c %s/apt-file/%%s -l search %%s""" % (self.aptdir, self.aptdir)
|
||||||
|
|
||||||
def find(self, pkg, checkdists, filelookup=True):
|
def find(self, pkg, checkdists, filelookup=True):
|
||||||
_pkg = ''.join([x for x in pkg.strip().split(None,1)[0] if x.isalnum() or x in '.-_+'])
|
_pkg = ''.join([x for x in pkg.strip().split(None,1)[0] if x.isalnum() or x in '.-_+'])
|
||||||
distro = checkdists
|
distro = checkdists
|
||||||
if len(pkg.strip().split()) > 1:
|
if len(pkg.strip().split()) > 1:
|
||||||
distro = ''.join([x for x in pkg.strip().split(None,2)[1] if x.isalnum or x in '.-_+'])
|
distro = ''.join([x for x in pkg.strip().split(None,2)[1] if x.isalnum() or x in '.-_+'])
|
||||||
if distro not in self.distros:
|
if distro not in self.distros:
|
||||||
return "%s is not a valid distribution %s" % (distro, self.distros)
|
return "%s is not a valid distribution: %s" % (distro, ", ".join(self.distros))
|
||||||
pkg = _pkg
|
pkg = _pkg
|
||||||
|
|
||||||
data = commands.getoutput(self.aptcommand % (distro, distro, distro, 'search -n', pkg))
|
data = commands.getoutput(self.aptcommand % (distro, distro, distro, 'search -n', pkg))
|
||||||
|
#self.log.info("command output: %r" % data)
|
||||||
if not data:
|
if not data:
|
||||||
if filelookup:
|
if filelookup:
|
||||||
data = commands.getoutput(self.aptfilecommand % (distro, distro, pkg)).split()
|
data = commands.getoutput(self.aptfilecommand % (distro, distro, pkg)).split()
|
||||||
|
#self.log.info("command output: %r" % ' '.join(data))
|
||||||
if data:
|
if data:
|
||||||
if data[0] == 'sh:': # apt-file isn't installed
|
if data[0] == 'sh:': # apt-file isn't installed
|
||||||
self.log.error("apt-file is not installed")
|
self.log.error("apt-file is not installed")
|
||||||
@ -61,7 +63,7 @@ class Apt:
|
|||||||
self.log.error("Please run the 'update_apt_file' script")
|
self.log.error("Please run the 'update_apt_file' script")
|
||||||
return "Cache out of date, please contact the administrator"
|
return "Cache out of date, please contact the administrator"
|
||||||
if data[0] == "Use" and data[1] == "of":
|
if data[0] == "Use" and data[1] == "of":
|
||||||
url = "http://packages.ubuntu.com/search?searchon=contents&keywords=%s&mode=&suite=%s&arch=any" % (urllib.quote(pkg),distro)
|
url = "http://packages.ubuntu.com/search?searchon=contents&keywords=%s&mode=&suite=%s&arch=any" % (urllib.quote(pkg), distro)
|
||||||
return url
|
return url
|
||||||
if len(data) > 5:
|
if len(data) > 5:
|
||||||
return "File %s found in %s (and %d others)" % (pkg, ', '.join(data[:5]), len(data)-5)
|
return "File %s found in %s (and %d others)" % (pkg, ', '.join(data[:5]), len(data)-5)
|
||||||
@ -75,6 +77,8 @@ class Apt:
|
|||||||
return "Found: %s" % ', '.join(pkgs[:5])
|
return "Found: %s" % ', '.join(pkgs[:5])
|
||||||
|
|
||||||
def info(self, pkg, checkdists):
|
def info(self, pkg, checkdists):
|
||||||
|
if not pkg.strip():
|
||||||
|
return ''
|
||||||
_pkg = ''.join([x for x in pkg.strip().split(None,1)[0] if x.isalnum() or x in '.-_+'])
|
_pkg = ''.join([x for x in pkg.strip().split(None,1)[0] if x.isalnum() or x in '.-_+'])
|
||||||
distro = checkdists
|
distro = checkdists
|
||||||
if len(pkg.strip().split()) > 1:
|
if len(pkg.strip().split()) > 1:
|
||||||
@ -82,7 +86,7 @@ class Apt:
|
|||||||
if not distro:
|
if not distro:
|
||||||
distro = checkdists
|
distro = checkdists
|
||||||
if distro not in self.distros:
|
if distro not in self.distros:
|
||||||
return "%s is not a valid distribution %s" % (distro, self.distros)
|
return "%r is not a valid distribution: %s" % (distro, ", ".join(self.distros))
|
||||||
|
|
||||||
checkdists = distro
|
checkdists = distro
|
||||||
|
|
||||||
@ -104,6 +108,8 @@ class Apt:
|
|||||||
if type(p) == type(""):
|
if type(p) == type(""):
|
||||||
self.log.error("apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
|
self.log.error("apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
|
||||||
return "Package lookup faild"
|
return "Package lookup faild"
|
||||||
|
if not p.get("Version", None):
|
||||||
|
continue
|
||||||
if apt.VersionCompare(maxp['Version'], p['Version']) < 0:
|
if apt.VersionCompare(maxp['Version'], p['Version']) < 0:
|
||||||
maxp = p
|
maxp = p
|
||||||
del parser
|
del parser
|
||||||
@ -118,6 +124,8 @@ class Apt:
|
|||||||
if type(p) == type(""):
|
if type(p) == type(""):
|
||||||
self.log.error("apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
|
self.log.error("apt returned an error, do you have the deb-src URLs in %s.list?" % distro)
|
||||||
return "Package lookup faild"
|
return "Package lookup faild"
|
||||||
|
if not p['Version']:
|
||||||
|
continue
|
||||||
if apt.VersionCompare(maxp2['Version'], p['Version']) < 0:
|
if apt.VersionCompare(maxp2['Version'], p['Version']) < 0:
|
||||||
maxp2 = p
|
maxp2 = p
|
||||||
del parser
|
del parser
|
||||||
@ -130,3 +138,40 @@ class Apt:
|
|||||||
maxp['Priority'], maxp['Version'], distro, int(maxp['Size'])/1024, maxp['Installed-Size'], archs))
|
maxp['Priority'], maxp['Version'], distro, int(maxp['Size'])/1024, maxp['Installed-Size'], archs))
|
||||||
return 'Package %s does not exist in %s' % (pkg, checkdists)
|
return 'Package %s does not exist in %s' % (pkg, checkdists)
|
||||||
|
|
||||||
|
|
||||||
|
# Simple test
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import sys
|
||||||
|
argv = sys.argv
|
||||||
|
argc = len(argv)
|
||||||
|
if argc == 1:
|
||||||
|
print "Need at least one arg"
|
||||||
|
sys.exit(1)
|
||||||
|
if argc > 3:
|
||||||
|
print "Only takes 2 args"
|
||||||
|
sys.exit(1)
|
||||||
|
class FakePlugin:
|
||||||
|
class FakeLog:
|
||||||
|
def error(*args, **kwargs):
|
||||||
|
pass
|
||||||
|
def __init__(self):
|
||||||
|
self.log = self.FakeLog()
|
||||||
|
def registryValue(self, *args, **kwargs):
|
||||||
|
return "/home/jussi/bot/aptdir"
|
||||||
|
|
||||||
|
command = argv[1].split(None, 1)[0]
|
||||||
|
try:
|
||||||
|
lookup = argv[1].split(None, 1)[1]
|
||||||
|
except:
|
||||||
|
print "Need something to lookup"
|
||||||
|
sys.exit(1)
|
||||||
|
dists = "hardy"
|
||||||
|
if argc == 3:
|
||||||
|
dists = argv[2]
|
||||||
|
plugin = FakePlugin()
|
||||||
|
aptlookup = Apt(plugin)
|
||||||
|
if command == "find":
|
||||||
|
print aptlookup.find(lookup, dists)
|
||||||
|
else:
|
||||||
|
print aptlookup.info(lookup, dists)
|
||||||
|
|
||||||
|
@ -32,10 +32,11 @@ import supybot.utils as utils
|
|||||||
from supybot.commands import *
|
from supybot.commands import *
|
||||||
import supybot.plugins as plugins
|
import supybot.plugins as plugins
|
||||||
import supybot.ircutils as ircutils
|
import supybot.ircutils as ircutils
|
||||||
|
import supybot.ircmsgs as ircmsgs
|
||||||
import supybot.callbacks as callbacks
|
import supybot.callbacks as callbacks
|
||||||
import supybot.ircutils as ircutils
|
import supybot.ircutils as ircutils
|
||||||
import supybot.conf as conf
|
|
||||||
import supybot.ircdb as ircdb
|
import supybot.ircdb as ircdb
|
||||||
|
import supybot.conf as conf
|
||||||
import os
|
import os
|
||||||
import packages
|
import packages
|
||||||
reload(packages)
|
reload(packages)
|
||||||
@ -47,7 +48,6 @@ def get_user(msg):
|
|||||||
return False
|
return False
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
class PackageInfo(callbacks.Plugin):
|
class PackageInfo(callbacks.Plugin):
|
||||||
"""Lookup package information via apt-cache/apt-file"""
|
"""Lookup package information via apt-cache/apt-file"""
|
||||||
threaded = True
|
threaded = True
|
||||||
@ -65,87 +65,146 @@ class PackageInfo(callbacks.Plugin):
|
|||||||
return (before, [])
|
return (before, [])
|
||||||
|
|
||||||
def __getRelease(self, irc, release, channel, doError=True):
|
def __getRelease(self, irc, release, channel, doError=True):
|
||||||
if release:
|
defaultRelease = self.registryValue("defaultRelease", channel)
|
||||||
return release
|
if not defaultRelease:
|
||||||
release = self.registryValue("defaultRelease", channel)
|
|
||||||
if not release:
|
|
||||||
if doError:
|
if doError:
|
||||||
irc.error("'supybot.plugins.PackageInfo.defaultRelease' is not set")
|
irc.error("'supybot.plugins.PackageInfo.defaultRelease' is not set")
|
||||||
return None
|
return (None, None)
|
||||||
return release
|
if not release:
|
||||||
|
return (defaultRelease, None)
|
||||||
|
(release, rest) = (release.split(' ', 1) + [None])[:2]
|
||||||
|
if release[0] in ('|', '>'):
|
||||||
|
return (defaultRelease, "%s %s" % (release, rest))
|
||||||
|
return (release, rest)
|
||||||
|
|
||||||
def __getChannel(self, channel):
|
def __getChannel(self, channel):
|
||||||
return ircutils.isChannel(channel) and channel or None
|
return ircutils.isChannel(channel) and channel or None
|
||||||
|
|
||||||
def info(self, irc, msg, args, package, release):
|
def real_info(self, irc, msg, args, package, release):
|
||||||
"""<package> [<release>]
|
"""<package> [<release>]
|
||||||
|
|
||||||
Lookup information for <package>, optionally in <release>
|
Lookup information for <package>, optionally in <release>
|
||||||
"""
|
"""
|
||||||
channel = self.__getChannel(msg.args[0])
|
channel = self.__getChannel(msg.args[0])
|
||||||
release = self.__getRelease(irc, release, channel)
|
(release, rest) = self.__getRelease(irc, release, channel)
|
||||||
if not release:
|
if not release:
|
||||||
return
|
return
|
||||||
irc.reply(self.Apt.info(package, release))
|
reply = self.Apt.info(package, release)
|
||||||
|
if rest:
|
||||||
|
if rest[0] == '|':
|
||||||
|
try:
|
||||||
|
target = rest.split()[1]
|
||||||
|
if target.lower() == "me":
|
||||||
|
target = msg.nick
|
||||||
|
irc.reply("%s: %s" % (target, reply))
|
||||||
|
return
|
||||||
|
except Exception, e:
|
||||||
|
self.log.info("Info: Exception in pipe: %r" % e)
|
||||||
|
pass
|
||||||
|
elif rest[0] == '>':
|
||||||
|
try:
|
||||||
|
target = rest.split()[1]
|
||||||
|
if target.lower() == "me":
|
||||||
|
target = msg.nick
|
||||||
|
irc.queueMsg(ircmsgs.privmsg(target, "<%s> wants you to know: %s" % (msg.nick, reply)))
|
||||||
|
return
|
||||||
|
except Exception, e:
|
||||||
|
self.log.info("Info: Exception in redirect: %r" % e)
|
||||||
|
pass
|
||||||
|
|
||||||
info = wrap(info, ['text', optional('text')])
|
irc.reply(reply)
|
||||||
|
|
||||||
def find(self, irc, msg, args, package, release):
|
info = wrap(real_info, ['anything', optional('text')])
|
||||||
|
|
||||||
|
def real_find(self, irc, msg, args, package, release):
|
||||||
"""<package/filename> [<release>]
|
"""<package/filename> [<release>]
|
||||||
|
|
||||||
Search for <package> or, of that fails, find <filename>'s package(s).
|
Search for <package> or, of that fails, find <filename>'s package(s).
|
||||||
Optionally in <release>
|
Optionally in <release>
|
||||||
"""
|
"""
|
||||||
channel = self.__getChannel(msg.args[0])
|
channel = self.__getChannel(msg.args[0])
|
||||||
release = self.__getRelease(irc, release, channel)
|
(release, rest) = self.__getRelease(irc, release, channel)
|
||||||
if not release:
|
if not release:
|
||||||
return
|
return
|
||||||
irc.reply(self.Apt.find(package, release))
|
reply = self.Apt.find(package, release)
|
||||||
|
if rest:
|
||||||
|
if rest[0] == '|':
|
||||||
|
try:
|
||||||
|
target = rest.split()[1]
|
||||||
|
if target.lower() == "me":
|
||||||
|
target = msg.nick
|
||||||
|
irc.reply("%s: %s" % (target, reply))
|
||||||
|
return
|
||||||
|
except Exception, e:
|
||||||
|
self.log.info("Find: Exception in pipe: %r" % e)
|
||||||
|
pass
|
||||||
|
elif rest[0] == '>':
|
||||||
|
try:
|
||||||
|
target = rest.split()[1]
|
||||||
|
if target.lower() == "me":
|
||||||
|
target = msg.nick
|
||||||
|
irc.queueMsg(ircmsgs.privmsg(target, "<%s> wants you to know: %s" % (msg.nick, reply)))
|
||||||
|
return
|
||||||
|
except Exception, e:
|
||||||
|
self.log.info("Find: Exception in redirect: %r" % e)
|
||||||
|
pass
|
||||||
|
|
||||||
find = wrap(find, ['text', optional('text')])
|
irc.reply(reply)
|
||||||
|
|
||||||
|
find = wrap(real_find, ['anything', optional('text')])
|
||||||
|
|
||||||
def privmsg(self, irc, msg, user):
|
def privmsg(self, irc, msg, user):
|
||||||
text = msg.args[1]
|
channel = self.__getChannel(msg.args[0])
|
||||||
release = self.__getRelease(irc, None, channel, False)
|
text = msg.args[1].strip()
|
||||||
if text[0] == self.registryValue("prefixchar"):
|
if text[0] == self.registryValue("prefixchar"):
|
||||||
text = text[1:]
|
text = text[1:]
|
||||||
if user and text[0] in str(conf.supybot.reply.whenAddressedBy.get('chars')):
|
if user and text[0] in str(conf.supybot.reply.whenAddressedBy.get('chars')):
|
||||||
return
|
return
|
||||||
if text[:4] == "find":
|
(cmd, rest) = (text.split(' ', 1) + [None])[:2]
|
||||||
irc.reply(self.Apt.find(text[4:].strip(), release))
|
if cmd not in ("find", "info"):
|
||||||
|
return
|
||||||
|
if not rest:
|
||||||
|
return
|
||||||
|
(term, rest) = (rest.split(' ', 1) + [None])[:2]
|
||||||
|
if cmd == "find":
|
||||||
|
self.real_find(irc, msg, [], term, rest)
|
||||||
else:
|
else:
|
||||||
irc.reply(self.Apt.info(text[4:].strip(), release))
|
self.real_info(irc, msg, [], term, rest)
|
||||||
|
|
||||||
def chanmsg(self, irc, msg, user):
|
def chanmsg(self, irc, msg, user):
|
||||||
channel = self.__getChannel(msg.args[0])
|
channel = self.__getChannel(msg.args[0])
|
||||||
text = msg.args[1]
|
text = msg.args[1].strip()
|
||||||
release = self.__getRelease(irc, None, channel, False)
|
|
||||||
if text[0] != self.registryValue("prefixchar", channel):
|
if text[0] != self.registryValue("prefixchar", channel):
|
||||||
return
|
return
|
||||||
text = text[1:]
|
text = text[1:]
|
||||||
if not text[:4] in ("find", "info"):
|
(cmd, rest) = (text.split(' ', 1) + [None])[:2]
|
||||||
|
if cmd not in ("find", "info"):
|
||||||
return
|
return
|
||||||
if text[:4] == "find":
|
if not rest:
|
||||||
irc.reply(self.Apt.find(text[4:].strip(), release))
|
return
|
||||||
|
(term, rest) = (rest.split(' ', 1) + [None])[:2]
|
||||||
|
if cmd == "find":
|
||||||
|
self.real_find(irc, msg, [], term, rest)
|
||||||
else:
|
else:
|
||||||
irc.reply(self.Apt.info(text[4:].strip(), release))
|
self.real_info(irc, msg, [], term, rest)
|
||||||
|
|
||||||
def doPrivmsg(self, irc, msg):
|
def doPrivmsg(self, irc, msg):
|
||||||
if chr(1) in msg.args[1]: # CTCP
|
if chr(1) in msg.args[1]: # CTCP
|
||||||
return
|
return
|
||||||
|
if not msg.args[1]:
|
||||||
|
return
|
||||||
channel = self.__getChannel(msg.args[0])
|
channel = self.__getChannel(msg.args[0])
|
||||||
if not self.registryValue("enabled", channel):
|
if not self.registryValue("enabled", channel):
|
||||||
return
|
return
|
||||||
user = get_user(msg)
|
user = get_user(msg)
|
||||||
if channel:
|
if channel:
|
||||||
self.chanmsg(irc, msg, user)
|
self.chanmsg(irc, msg, user)
|
||||||
elif user:
|
else:
|
||||||
return
|
if user:
|
||||||
|
return
|
||||||
self.privmsg(irc, msg, user)
|
self.privmsg(irc, msg, user)
|
||||||
|
|
||||||
def inFilter(self, irc, msg):
|
def inFilter(self, irc, msg):
|
||||||
if not conf.supybot.get("defaultIgnore"):
|
|
||||||
return msg
|
|
||||||
if msg.command != "PRIVMSG":
|
if msg.command != "PRIVMSG":
|
||||||
return msg
|
return msg
|
||||||
text = msg.args[1]
|
text = msg.args[1]
|
||||||
@ -154,7 +213,7 @@ class PackageInfo(callbacks.Plugin):
|
|||||||
return msg
|
return msg
|
||||||
channel = self.__getChannel(msg.args[0])
|
channel = self.__getChannel(msg.args[0])
|
||||||
if channel:
|
if channel:
|
||||||
if text[:5] not in ("!info", "!find", "@info", "@find"):
|
if not text[:5] in ("!info", "!find", "@info", "@find"):
|
||||||
return msg
|
return msg
|
||||||
else:
|
else:
|
||||||
if text[:5] in ("info ", "find ", "!info", "!find", "@info", "@find"):
|
if text[:5] in ("info ", "find ", "!info", "!find", "@info", "@find"):
|
||||||
|
31
PackageInfo/update_apt
Executable file → Normal file
31
PackageInfo/update_apt
Executable file → Normal file
@ -1,6 +1,33 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
DIR=/home/bot/data/apt
|
DIR=/home/bot/aptdir
|
||||||
|
|
||||||
|
DEFAULT_OPTS="-qq"
|
||||||
|
|
||||||
|
while [ "x$1" != "x" ]; do
|
||||||
|
case "$1" in
|
||||||
|
-v|--verbose)
|
||||||
|
DEFAULT_OPTS=""
|
||||||
|
;;
|
||||||
|
-d|--dir)
|
||||||
|
if [ "x$2" == "x" ]; then
|
||||||
|
echo "\"-d|--dir\" requires an argument" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
DIR="$1"
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
echo "Unknown option \"$1\"" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "This script takes no arguments" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
for DIST in "$DIR"/*.list; do
|
for DIST in "$DIR"/*.list; do
|
||||||
test -h $DIST && continue
|
test -h $DIST && continue
|
||||||
@ -8,7 +35,7 @@ for DIST in "$DIR"/*.list; do
|
|||||||
DIST=${DIST/.list}
|
DIST=${DIST/.list}
|
||||||
touch "$DIR/$DIST.status"
|
touch "$DIR/$DIST.status"
|
||||||
mkdir -p "$DIR/$DIST/partial"
|
mkdir -p "$DIR/$DIST/partial"
|
||||||
apt-get -qq -o "Dir::State::Lists=$DIR/$DIST" \
|
apt-get ${DEFAULT_OPTS} -o "Dir::State::Lists=$DIR/$DIST" \
|
||||||
-o "Dir::etc::sourcelist=$DIR/$DIST.list" \
|
-o "Dir::etc::sourcelist=$DIR/$DIST.list" \
|
||||||
-o "Dir::State::status=$DIR/$DIST.status" \
|
-o "Dir::State::status=$DIR/$DIST.status" \
|
||||||
-o "Dir::Cache=$DIR/cache" \
|
-o "Dir::Cache=$DIR/cache" \
|
||||||
|
35
PackageInfo/update_apt_file
Executable file → Normal file
35
PackageInfo/update_apt_file
Executable file → Normal file
@ -5,14 +5,45 @@ if [ -x "$(which apt-file)" ]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
DIR=/home/bot/data/apt
|
DIR=/home/bot/aptdir
|
||||||
|
VERBOSE="no"
|
||||||
|
|
||||||
|
while [ "x$1" != "x" ]; do
|
||||||
|
case "$1" in
|
||||||
|
-v|--verbose)
|
||||||
|
VERBOSE="yes"
|
||||||
|
;;
|
||||||
|
-d|--dir)
|
||||||
|
if [ "x$2" == "x" ]; then
|
||||||
|
echo "\"-d|--dir\" requires an argument" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
DIR="$1"
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
echo "Unknown option \"$1\"" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "This script takes no arguments" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
for DIST in "$DIR"/*.list; do
|
for DIST in "$DIR"/*.list; do
|
||||||
test -h $DIST && continue
|
test -h $DIST && continue
|
||||||
DIST=${DIST:${#DIR}}
|
DIST=${DIST:${#DIR}}
|
||||||
DIST=${DIST/.list}
|
DIST=${DIST/.list}
|
||||||
mkdir -p "$DIR/apt-file/$DIST"
|
mkdir -p "$DIR/apt-file/$DIST"
|
||||||
apt-file -l -c "$DIR/apt-file/$DIST" -s "$DIR/$DIST.list" update >/dev/null 2>&1
|
if [ "${VERBOSE}" != "no" ]; then
|
||||||
|
echo "Processing $DIST"
|
||||||
|
apt-file -l -c "$DIR/apt-file/$DIST" -s "$DIR/$DIST.list" update
|
||||||
|
else
|
||||||
|
apt-file -l -c "$DIR/apt-file/$DIST" -s "$DIR/$DIST.list" update >/dev/null 2>&1
|
||||||
|
fi
|
||||||
RET=$?
|
RET=$?
|
||||||
if [ ! $RET ]; then
|
if [ ! $RET ]; then
|
||||||
echo "apt-file failed for $DIST!"
|
echo "apt-file failed for $DIST!"
|
||||||
|
182
Webcal/cal.ical
Normal file
182
Webcal/cal.ical
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
METHOD:PUBLISH
|
||||||
|
X-WR-CALNAME:The Fridge | October 17\, 2008 - December 16\, 2008
|
||||||
|
PRODID:-//strange bird labs//Drupal iCal API//EN
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081017T190000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081017T210000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1656
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1656
|
||||||
|
SUMMARY:Tunisian LoCo Team IRC Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-tn<br />
|
||||||
|
Agenda\: Team participation to SFD Tunisia 2008.</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081018T130000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081018T150000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1571
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1571
|
||||||
|
SUMMARY:Xubuntu Community Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting<br />
|
||||||
|
Agenda\: https\://wiki.ubuntu.com/Xubuntu/Meetings</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081021T110000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081021T130000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1558
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1558
|
||||||
|
SUMMARY:Community Council Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting<br />
|
||||||
|
Agenda\: <a href=\\"https\://wiki.ubuntu.com/CommunityCouncilAgenda\\">https\://wiki.ubuntu.com/CommunityCouncilAgenda</a></p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081021T110000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081021T120000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1678
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1678
|
||||||
|
SUMMARY:Asia Oceania Membership Board Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081021T140000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081021T160000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1662
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1662
|
||||||
|
SUMMARY:Technical Board Meeting
|
||||||
|
DESCRIPTION:<ul>
|
||||||
|
<li><strong>Agenda\:</strong> <a href=\\"https\://wiki.ubuntu.com/TechnicalBoardAgenda\\">https\://wiki.ubuntu.com/TechnicalBoardAgenda</a></li>
|
||||||
|
<li><strong>Location\:</strong> #ubuntu-meeting</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081021T150000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081021T160000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1681
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1681
|
||||||
|
SUMMARY:Server Team Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting on IRC<br />
|
||||||
|
Agenda\: https\://wiki.ubuntu.com/ServerTeam/Meeting</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081021T170000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081021T180000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1683
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1683
|
||||||
|
SUMMARY:Kernel Team Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting in IRC<br />
|
||||||
|
Agenda\: Not listed as of publication</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081022T230000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081023T000000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1667
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1667
|
||||||
|
SUMMARY:Forum Council Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081028T160000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081028T170000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1682
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1682
|
||||||
|
SUMMARY:Server Team Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting on IRC<br />
|
||||||
|
Agenda\: https\://wiki.ubuntu.com/ServerTeam/Meeting</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081028T170000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081028T180000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1684
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1684
|
||||||
|
SUMMARY:Kernel Team Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting in IRC<br />
|
||||||
|
Agenda\: Not listed as of publication</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081104T140000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081104T140000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1663
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1663
|
||||||
|
SUMMARY:Technical Board Meeting
|
||||||
|
DESCRIPTION:<ul>
|
||||||
|
<li><strong>Agenda\:</strong> <a href=\\"https\://wiki.ubuntu.com/TechnicalBoardAgenda\\">https\://wiki.ubuntu.com/TechnicalBoardAgenda</a></li>
|
||||||
|
<li><strong>Location\:</strong> #ubuntu-meeting</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081104T210000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081104T230000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1553
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1553
|
||||||
|
SUMMARY:Community Council Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting<br />
|
||||||
|
Agenda\: <a href=\\"https\://wiki.ubuntu.com/CommunityCouncilAgenda\\">https\://wiki.ubuntu.com/CommunityCouncilAgenda</a></p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081106T000000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081106T010000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1547
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1547
|
||||||
|
SUMMARY:Maryland LoCo IRC Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-us-md</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081118T110000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081118T130000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1559
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1559
|
||||||
|
SUMMARY:Community Council Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting<br />
|
||||||
|
Agenda\: <a href=\\"https\://wiki.ubuntu.com/CommunityCouncilAgenda\\">https\://wiki.ubuntu.com/CommunityCouncilAgenda</a></p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081202T210000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081202T230000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1554
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1554
|
||||||
|
SUMMARY:Community Council Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-meeting<br />
|
||||||
|
Agenda\: <a href=\\"https\://wiki.ubuntu.com/CommunityCouncilAgenda\\">https\://wiki.ubuntu.com/CommunityCouncilAgenda</a></p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTAMP;VALUE=DATE:20081017T202549Z
|
||||||
|
DTSTART;VALUE=DATE-TIME:20081204T000000Z
|
||||||
|
DTEND;VALUE=DATE-TIME:20081204T010000Z
|
||||||
|
UID:http://fridge.ubuntu.com/node/1548
|
||||||
|
URL;VALUE=URI:http://fridge.ubuntu.com/node/1548
|
||||||
|
SUMMARY:Maryland LoCo IRC Meeting
|
||||||
|
DESCRIPTION:<p>Location\: #ubuntu-us-md</p>
|
||||||
|
|
||||||
|
END:VEVENT
|
||||||
|
END:VCALENDAR
|
299
Webcal/ical.py
299
Webcal/ical.py
@ -1,88 +1,101 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# Slightly modified version of the iCal module found at
|
import sys, os
|
||||||
# http://www.random-ideas.net/Software/iCal_Module
|
sys.path.append(os.path.dirname(__file__))
|
||||||
# Original file doesn't come with a license but is public domain according to
|
from icalendar import Calendar, cal, prop
|
||||||
# above website
|
from dateutil import tz as tzmod
|
||||||
|
from cStringIO import StringIO
|
||||||
import os
|
import pytz
|
||||||
import os.path
|
import urllib2
|
||||||
import re
|
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import rruler
|
||||||
import pytz # pytz can be found on http://pytz.sourceforge.net
|
|
||||||
|
DEB_OBJ = None
|
||||||
|
|
||||||
SECONDS_PER_DAY=24*60*60
|
SECONDS_PER_DAY=24*60*60
|
||||||
def seconds(timediff):
|
def seconds(timediff):
|
||||||
return SECONDS_PER_DAY * timediff.days + timediff.seconds
|
return SECONDS_PER_DAY * timediff.days + timediff.seconds
|
||||||
|
|
||||||
class ICalReader:
|
def toTz(date, tz):
|
||||||
|
assert isinstance(tz, datetime.tzinfo), "tz must be a tzinfo type"
|
||||||
|
if isinstance(date, datetime.datetime):
|
||||||
|
try:
|
||||||
|
return date.astimezone(tz)
|
||||||
|
except:
|
||||||
|
return datetime.datetime.combine(date.date(), datetime.time(date.time().hour, date.time().minute, date.time().second, tzinfo=tz))
|
||||||
|
elif isinstance(datetime.date):
|
||||||
|
return datetime.datetime.combine(date, datetime.time(0, 0, 0, tzinfo=tz))
|
||||||
|
|
||||||
|
class ICalReader:
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.events = []
|
self.events = []
|
||||||
|
self.timezones = {}
|
||||||
self.raw_data = data
|
self.raw_data = data
|
||||||
self.readEvents()
|
self.readEvents()
|
||||||
|
|
||||||
def readEvents(self):
|
def readEvents(self):
|
||||||
self.events = []
|
self.events = []
|
||||||
lines = self.raw_data.split('\n')
|
self.timezones = {}
|
||||||
inEvent = False
|
parser = Calendar.from_string(self.raw_data)
|
||||||
eventLines = []
|
tzs = parser.walk("vtimezone")
|
||||||
stRegex = re.compile("^BEGIN:VEVENT")
|
self.parseTzs(tzs)
|
||||||
enRegex = re.compile("^END:VEVENT")
|
events = parser.walk("vevent")
|
||||||
for line in lines:
|
for event in events:
|
||||||
if stRegex.match(line):
|
res = self.parseEvent(event)
|
||||||
inEvent = True
|
if res:
|
||||||
eventLines = []
|
self.events.append(res)
|
||||||
if inEvent:
|
|
||||||
eventLines.append(line)
|
|
||||||
if enRegex.match(line):
|
|
||||||
self.events.append(self.parseEvent(eventLines))
|
|
||||||
|
|
||||||
return self.events
|
def parseTzs(self, tzs):
|
||||||
|
if not tzs:
|
||||||
|
return
|
||||||
|
for tz in tzs:
|
||||||
|
if 'X-LIC-LOCATION' in tz:
|
||||||
|
del tz['X-LIC-LOCATION']
|
||||||
|
data = ''.join([str(i) for i in tzs])
|
||||||
|
data = '\r\n'.join([i for i in data.splitlines() if i.strip()])
|
||||||
|
fd = StringIO(data)
|
||||||
|
times = tzmod.tzical(fd)
|
||||||
|
for tz in times.keys():
|
||||||
|
self.timezones[tz] = times.get(tz)
|
||||||
|
|
||||||
|
def parseEvent(self, e):
|
||||||
|
for k in ["dtstart", "dtend", "summary"]:
|
||||||
|
if not k in e:
|
||||||
|
return
|
||||||
|
if not isinstance(e['dtstart'].dt, datetime.datetime):
|
||||||
|
return
|
||||||
|
return ICalEvent.from_event(e, self)
|
||||||
|
startDate = endDate = rule = summary = None
|
||||||
|
startDate = self.parseDate(e.get("dtstart"))
|
||||||
|
endDate = self.parseDate(e.get("dtend"))
|
||||||
|
rule = e.get("RRULE")
|
||||||
|
summary = e.get("summary")
|
||||||
|
if e.get("exdate"):
|
||||||
|
event.addExceptionDate(e['EXDATE'].ical()[7:])
|
||||||
|
if not startDate or not endDate or not summary: # Bad event
|
||||||
|
return
|
||||||
|
|
||||||
def parseEvent(self, lines):
|
|
||||||
event = ICalEvent()
|
event = ICalEvent()
|
||||||
event.raw_data = "\n".join(lines)
|
event.raw_data = str(e)
|
||||||
startDate = None
|
event.summary = summary
|
||||||
rule = None
|
|
||||||
endDate = None
|
|
||||||
|
|
||||||
for line in lines:
|
|
||||||
if re.compile("^SUMMARY:(.*)").match(line):
|
|
||||||
event.summary = re.compile("^SUMMARY:(.*)").match(line).group(1)
|
|
||||||
elif re.compile("^DTSTART;.*:(.*).*").match(line):
|
|
||||||
startDate = self.parseDate(re.compile("^DTSTART;.*:(.*).*").match(line).group(1))
|
|
||||||
elif re.compile("^DTEND;.*:(.*).*").match(line):
|
|
||||||
endDate = self.parseDate(re.compile("^DTEND;.*:(.*).*").match(line).group(1))
|
|
||||||
elif re.compile("^EXDATE.*:(.*)").match(line):
|
|
||||||
event.addExceptionDate(parseDate(re.compile("^EXDATE.*:(.*)").match(line).group(1)))
|
|
||||||
elif re.compile("^RRULE:(.*)").match(line):
|
|
||||||
rule = re.compile("^RRULE:(.*)").match(line).group(1)
|
|
||||||
|
|
||||||
event.startDate = startDate
|
event.startDate = startDate
|
||||||
event.endDate = endDate
|
event.endDate = endDate
|
||||||
if rule:
|
if rule:
|
||||||
event.addRecurrenceRule(rule)
|
event.addRecurrenceRule(rule)
|
||||||
return event
|
return event
|
||||||
|
|
||||||
def parseDate(self, dateStr):
|
def parseDate(self, date):
|
||||||
year = int(dateStr[0:4])
|
if not date:
|
||||||
if year < 1970:
|
return
|
||||||
year = 1970
|
tz = pytz.UTC
|
||||||
|
if 'tzid' in date.params:
|
||||||
month = int(dateStr[4:4+2])
|
tz = self.timezones[date.params['tzid']]
|
||||||
day = int(dateStr[6:6+2])
|
for attr in ['hour', 'minute', 'second']:
|
||||||
try:
|
if not hasattr(date.dt, attr):
|
||||||
hour = int(dateStr[9:9+2])
|
return
|
||||||
minute = int(dateStr[11:11+2])
|
return toTz(date.dt, tz)
|
||||||
except:
|
# return datetime.datetime(date.dt.year, date.dt.month, date.dt.day, date.dt.hour, date.dt.minute, date.dt.second, tzinfo=tz)
|
||||||
hour = 0
|
|
||||||
minute = 0
|
|
||||||
|
|
||||||
return datetime.datetime(year, month, day, hour, minute, tzinfo=pytz.UTC)
|
|
||||||
|
|
||||||
def selectEvents(self, selectFunction):
|
def selectEvents(self, selectFunction):
|
||||||
note = datetime.datetime.today()
|
|
||||||
self.events.sort()
|
self.events.sort()
|
||||||
events = filter(selectFunction, self.events)
|
events = filter(selectFunction, self.events)
|
||||||
return events
|
return events
|
||||||
@ -94,25 +107,68 @@ class ICalReader:
|
|||||||
return event.startsTomorrow()
|
return event.startsTomorrow()
|
||||||
|
|
||||||
def eventsFor(self, date):
|
def eventsFor(self, date):
|
||||||
note = datetime.datetime.today()
|
|
||||||
self.events.sort()
|
self.events.sort()
|
||||||
ret = []
|
ret = []
|
||||||
for event in self.events:
|
for event in self.events:
|
||||||
if event.startsOn(date):
|
if event.startsOn(date):
|
||||||
ret.append(event)
|
ret.append(event)
|
||||||
return ret
|
return re
|
||||||
|
|
||||||
|
|
||||||
class ICalEvent:
|
#class ICalEvent:
|
||||||
def __init__(self):
|
# def __init__(self):
|
||||||
|
# self.exceptionDates = []
|
||||||
|
# self.dateSet = None
|
||||||
|
#
|
||||||
|
# def __str__(self):
|
||||||
|
# return "%s (%s - %s)" % (self.summary, self.startDate, self.endDate)
|
||||||
|
|
||||||
|
class ICalEvent(cal.Event):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
self.exceptionDates = []
|
self.exceptionDates = []
|
||||||
self.dateSet = None
|
self.dateSet = None
|
||||||
|
self.__parent = super(ICalEvent, self)
|
||||||
|
self.__parent.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_event(cls, event, parent):
|
||||||
|
global DEB_OBJ
|
||||||
|
x = cls(**dict(event))
|
||||||
|
x.__dict__ = event.__dict__
|
||||||
|
x.exceptionDates = []
|
||||||
|
x.dateSet = None
|
||||||
|
x.summary = x['summary']
|
||||||
|
x.timezone = x['dtstart'].dt.tzinfo
|
||||||
|
x.startDate = parent.parseDate(x['dtstart'])
|
||||||
|
x.endDate = parent.parseDate(x['dtend'])
|
||||||
|
if not x.timezone:
|
||||||
|
x.timezone = pytz.UTC
|
||||||
|
x.startDate = parent.parseDate(x['dtstart'])
|
||||||
|
x.endDate = parent.parseDate(x['dtend'])
|
||||||
|
x.raw_data = str(x)
|
||||||
|
if 'rrule' in event:
|
||||||
|
x.addRecurrenceRule(event['rrule'])
|
||||||
|
if x.summary == "Server Team Meeting":
|
||||||
|
DEB_OBJ = x
|
||||||
|
return x
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.summary
|
return "%s (%s - %s)" % (self.summary, self.startDate, self.endDate)
|
||||||
|
|
||||||
def __eq__(self, otherEvent):
|
def __eq__(self, otherEvent):
|
||||||
return self.startDate == otherEvent.startDate
|
return self.startTime() == otherEvent.startTime()
|
||||||
|
|
||||||
|
def __lt__(self, otherEvent):
|
||||||
|
return self.startTime() < otherEvent.startTime()
|
||||||
|
|
||||||
|
def __gt__(self, otherEvent):
|
||||||
|
return self.startTime() > otherEvent.startTime()
|
||||||
|
|
||||||
|
def __ge__(self, otherEvent):
|
||||||
|
return self.startTime() >= otherEvent.startTime()
|
||||||
|
|
||||||
|
def __le__(self, otherEvent):
|
||||||
|
return self.startTime() <= otherEvent.startTime()
|
||||||
|
|
||||||
def addExceptionDate(self, date):
|
def addExceptionDate(self, date):
|
||||||
self.exceptionDates.append(date)
|
self.exceptionDates.append(date)
|
||||||
@ -124,7 +180,8 @@ class ICalEvent:
|
|||||||
return self.startsOn(datetime.datetime.today())
|
return self.startsOn(datetime.datetime.today())
|
||||||
|
|
||||||
def startsTomorrow(self):
|
def startsTomorrow(self):
|
||||||
tomorrow = datetime.datetime.fromtimestamp(time.time() + SECONDS_PER_DAY)
|
tomorrow = datetime.datetime.today() + datetime.timedelta(1)
|
||||||
|
# tomorrow = datetime.datetime.fromtimestamp(time.time() + SECONDS_PER_DAY)
|
||||||
return self.startsOn(tomorrow)
|
return self.startsOn(tomorrow)
|
||||||
|
|
||||||
def startsOn(self, date):
|
def startsOn(self, date):
|
||||||
@ -134,29 +191,47 @@ class ICalEvent:
|
|||||||
(self.dateSet and self.dateSet.includes(date)))
|
(self.dateSet and self.dateSet.includes(date)))
|
||||||
|
|
||||||
def startTime(self):
|
def startTime(self):
|
||||||
|
now = datetime.datetime.now(pytz.UTC)
|
||||||
|
if self.dateSet and self.startDate < now:
|
||||||
|
dates = self.dateSet.getRecurrence()
|
||||||
|
for date in dates:
|
||||||
|
if date.date() >= now.date():
|
||||||
|
if date.date() > now.date() or (date.date() == now.date and date.astimezone(pytz.UTC).time() >= now.time()):
|
||||||
|
return toTz(datetime.datetime.combine(date,self.startDate.time()), self.startDate.tzinfo)
|
||||||
return self.startDate
|
return self.startDate
|
||||||
|
|
||||||
|
def endTime(self):
|
||||||
|
now = datetime.datetime.now(pytz.UTC).date()
|
||||||
|
if self.dateSet and self.endDate.date() < now:
|
||||||
|
return toTz(datetime.datetime.combine(self.startTime().date(), self.endDate.time()), self.startDate.tzinfo)
|
||||||
|
return self.endDate
|
||||||
|
|
||||||
def schedule(self, timezone=None):
|
def schedule(self, timezone=None):
|
||||||
if not timezone:
|
if not timezone:
|
||||||
return "%s UTC: %s" % (self.startDate.strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
return "%s UTC: %s" % (self.startTime().astimezone(pytz.UTC).strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
||||||
return "%s: %s" % (self.startDate.astimezone(pytz.timezone(timezone)).strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
if isinstance(timezone, basestring):
|
||||||
|
return "%s: %s" % (self.startTime().astimezone(pytz.timezone(timezone)).strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
||||||
|
return "%s: %s" % (self.startTime().astimezone(timezone).strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
||||||
|
|
||||||
def is_on(self):
|
def is_on(self):
|
||||||
return self.startDate < datetime.datetime.now(pytz.UTC) and self.endDate > datetime.datetime.now(pytz.UTC)
|
now = datetime.datetime.now(pytz.UTC)
|
||||||
|
return self.startTime() >= now and self.endTime() < now
|
||||||
|
|
||||||
def has_passed(self):
|
def has_passed(self):
|
||||||
|
if self.dateSet:
|
||||||
|
return toTz(datetime.datetime.combine(self.startTime().date(), self.endDate.time()), self.startDate.tzinfo) < datetime.datetime.now(pytz.UTC)
|
||||||
return self.endDate < datetime.datetime.now(pytz.UTC)
|
return self.endDate < datetime.datetime.now(pytz.UTC)
|
||||||
|
|
||||||
def seconds_to_go(self):
|
def seconds_to_go(self):
|
||||||
return seconds(self.startDate - datetime.datetime.now(pytz.UTC))
|
return seconds(self.startTime() - datetime.datetime.now(pytz.UTC))
|
||||||
|
|
||||||
def seconds_ago(self):
|
def seconds_ago(self):
|
||||||
return seconds(datetime.datetime.now(pytz.UTC) - self.endDate)
|
return seconds(datetime.datetime.now(pytz.UTC) - self.endTime())
|
||||||
|
|
||||||
def time_to_go(self):
|
def time_to_go(self):
|
||||||
if self.endDate < datetime.datetime.now(pytz.UTC):
|
if self.endTime() < datetime.datetime.now(pytz.UTC):
|
||||||
return False
|
return False
|
||||||
delta = self.startDate - datetime.datetime.now(pytz.UTC)
|
delta = self.startTime() - datetime.datetime.now(pytz.UTC)
|
||||||
s = ''
|
s = ''
|
||||||
if delta.days:
|
if delta.days:
|
||||||
if delta.days != 1:
|
if delta.days != 1:
|
||||||
@ -182,69 +257,25 @@ class DateSet:
|
|||||||
self.untilDate = None
|
self.untilDate = None
|
||||||
self.byMonth = None
|
self.byMonth = None
|
||||||
self.byDate = None
|
self.byDate = None
|
||||||
|
self.dates = None
|
||||||
self.parseRecurrenceRule(rule)
|
self.parseRecurrenceRule(rule)
|
||||||
|
|
||||||
def parseRecurrenceRule(self, rule):
|
def parseRecurrenceRule(self, rule):
|
||||||
if re.compile("FREQ=(.*?);").match(rule) :
|
freq = rruler.rrule_map[rule.pop('freq')[0]]
|
||||||
self.frequency = re.compile("FREQ=(.*?);").match(rule).group(1)
|
now = datetime.datetime.now(self.startDate.tzinfo)
|
||||||
|
rule['dtstart'] = now
|
||||||
if re.compile("COUNT=(\d*)").match(rule) :
|
rule['until'] = now + datetime.timedelta(60)
|
||||||
self.count = int(re.compile("COUNT=(\d*)").match(rule).group(1))
|
self.recurrence = rruler.rrule_wrapper(freq, **rule)
|
||||||
|
|
||||||
if re.compile("UNTIL=(.*?);").match(rule) :
|
|
||||||
self.untilDate = DateParser.parse(re.compile("UNTIL=(.*?);").match(rule).group(1))
|
|
||||||
|
|
||||||
if re.compile("INTERVAL=(\d*)").match(rule) :
|
|
||||||
self.interval = int(re.compile("INTERVAL=(\d*)").match(rule).group(1))
|
|
||||||
|
|
||||||
if re.compile("BYMONTH=(.*?);").match(rule) :
|
def getRecurrence(self):
|
||||||
self.byMonth = re.compile("BYMONTH=(.*?);").match(rule).group(1)
|
if not self.dates:
|
||||||
|
self.dates = []
|
||||||
|
for x in list(self.recurrence):
|
||||||
|
self.dates.append(toTz(x, self.startDate.tzinfo))
|
||||||
|
self.dates.append(self.startDate)
|
||||||
|
return self.dates
|
||||||
|
|
||||||
if re.compile("BYDAY=(.*?);").match(rule) :
|
|
||||||
self.byDay = re.compile("BYDAY=(.*?);").match(rule).group(1)
|
|
||||||
|
|
||||||
|
|
||||||
def includes(self, date):
|
def includes(self, date):
|
||||||
if date == self.startDate:
|
if isinstance(date, datetime.datetime):
|
||||||
return True
|
date = date.date()
|
||||||
|
return date in [x.date() for x in self.getRecurrence()]
|
||||||
if self.untilDate and date > self.untilDate:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.frequency == 'DAILY':
|
|
||||||
increment = 1
|
|
||||||
if self.interval:
|
|
||||||
increment = self.interval
|
|
||||||
d = self.startDate
|
|
||||||
counter = 0
|
|
||||||
while(d < date):
|
|
||||||
if self.count:
|
|
||||||
counter += 1
|
|
||||||
if counter >= self.count:
|
|
||||||
return False
|
|
||||||
|
|
||||||
d = d.replace(day=d.day+1)
|
|
||||||
|
|
||||||
if (d.day == date.day and
|
|
||||||
d.year == date.year and
|
|
||||||
d.month == date.month):
|
|
||||||
return True
|
|
||||||
|
|
||||||
elif self.frequency == 'WEEKLY':
|
|
||||||
if self.startDate.weekday() == date.weekday():
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
if self.endDate:
|
|
||||||
for n in range(0, self.endDate.day - self.startDate.day):
|
|
||||||
newDate = self.startDate.replace(day=self.startDate.day+n)
|
|
||||||
if newDate.weekday() == date.weekday():
|
|
||||||
return True
|
|
||||||
|
|
||||||
elif self.frequency == 'MONTHLY':
|
|
||||||
pass
|
|
||||||
|
|
||||||
elif self.frequency == 'YEARLY':
|
|
||||||
pass
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
299
Webcal/ical.py.bak
Normal file
299
Webcal/ical.py.bak
Normal file
@ -0,0 +1,299 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
import sys, os
|
||||||
|
sys.path.append(os.path.dirname(__file__))
|
||||||
|
import icalendar
|
||||||
|
reload(icalendar)
|
||||||
|
from icalendar import Calendar, cal, prop
|
||||||
|
from dateutil import tz as tzmod
|
||||||
|
from cStringIO import StringIO
|
||||||
|
import pytz
|
||||||
|
import urllib2
|
||||||
|
import datetime, time
|
||||||
|
import rruler
|
||||||
|
reload(rruler)
|
||||||
|
|
||||||
|
SECONDS_PER_DAY=24*60*60
|
||||||
|
def seconds(timediff):
|
||||||
|
return SECONDS_PER_DAY * timediff.days + timediff.seconds
|
||||||
|
|
||||||
|
class ICalReader:
|
||||||
|
def __init__(self, data):
|
||||||
|
self.events = []
|
||||||
|
self.timezones = {}
|
||||||
|
self.raw_data = data
|
||||||
|
self.readEvents()
|
||||||
|
|
||||||
|
def readEvents(self):
|
||||||
|
self.events = []
|
||||||
|
self.timezones = {}
|
||||||
|
parser = Calendar.from_string(self.raw_data)
|
||||||
|
tzs = parser.walk("vtimezone")
|
||||||
|
self.parseTzs(tzs)
|
||||||
|
events = parser.walk("vevent")
|
||||||
|
for event in events:
|
||||||
|
res = self.parseEvent(event)
|
||||||
|
if res:
|
||||||
|
self.events.append(res)
|
||||||
|
|
||||||
|
def parseTzs(self, tzs):
|
||||||
|
if not tzs:
|
||||||
|
return
|
||||||
|
for tz in tzs:
|
||||||
|
if 'X-LIC-LOCATION' in tz:
|
||||||
|
del tz['X-LIC-LOCATION']
|
||||||
|
data = ''.join([str(i) for i in tzs])
|
||||||
|
data = '\r\n'.join([i for i in data.splitlines() if i.strip()])
|
||||||
|
fd = StringIO(data)
|
||||||
|
times = tzmod.tzical(fd)
|
||||||
|
for tz in times.keys():
|
||||||
|
self.timezones[tz] = times.get(tz)
|
||||||
|
|
||||||
|
def parseEvent(self, e):
|
||||||
|
for k in ["dtstart", "dtend", "summary"]:
|
||||||
|
if not k in e:
|
||||||
|
return
|
||||||
|
if not isinstance(e['dtstart'].dt, datetime.datetime):
|
||||||
|
return
|
||||||
|
return ICalEvent.from_event(e, self)
|
||||||
|
startDate = endDate = rule = summary = None
|
||||||
|
startDate = self.parseDate(e.get("dtstart"))
|
||||||
|
endDate = self.parseDate(e.get("dtend"))
|
||||||
|
rule = e.get("RRULE")
|
||||||
|
summary = e.get("summary")
|
||||||
|
if e.get("exdate"):
|
||||||
|
event.addExceptionDate(e['EXDATE'].ical()[7:])
|
||||||
|
if not startDate or not endDate or not summary: # Bad event
|
||||||
|
return
|
||||||
|
|
||||||
|
event = ICalEvent()
|
||||||
|
event.raw_data = str(e)
|
||||||
|
event.summary = summary
|
||||||
|
event.startDate = startDate
|
||||||
|
event.endDate = endDate
|
||||||
|
if rule:
|
||||||
|
event.addRecurrenceRule(rule)
|
||||||
|
return event
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def toTz(date, tz):
|
||||||
|
return datetime.datetime(date.year, date.month, date.day, date.hour, date.minute, date.second, tzinfo=tz)
|
||||||
|
|
||||||
|
def parseDate(self, date):
|
||||||
|
if not date:
|
||||||
|
return
|
||||||
|
tz = pytz.UTC
|
||||||
|
if 'tzid' in date.params:
|
||||||
|
tz = self.timezones[date.params['tzid']]
|
||||||
|
for attr in ['hour', 'minute', 'second']:
|
||||||
|
if not hasattr(date.dt, attr):
|
||||||
|
return
|
||||||
|
return self.toTz(date.dt, tz)
|
||||||
|
# return datetime.datetime(date.dt.year, date.dt.month, date.dt.day, date.dt.hour, date.dt.minute, date.dt.second, tzinfo=tz)
|
||||||
|
|
||||||
|
def selectEvents(self, selectFunction):
|
||||||
|
self.events.sort()
|
||||||
|
events = filter(selectFunction, self.events)
|
||||||
|
return events
|
||||||
|
|
||||||
|
def todaysEvents(self, event):
|
||||||
|
return event.startsToday()
|
||||||
|
|
||||||
|
def tomorrowsEvents(self, event):
|
||||||
|
return event.startsTomorrow()
|
||||||
|
|
||||||
|
def eventsFor(self, date):
|
||||||
|
self.events.sort()
|
||||||
|
ret = []
|
||||||
|
for event in self.events:
|
||||||
|
if event.startsOn(date):
|
||||||
|
ret.append(event)
|
||||||
|
return re
|
||||||
|
|
||||||
|
|
||||||
|
#class ICalEvent:
|
||||||
|
# def __init__(self):
|
||||||
|
# self.exceptionDates = []
|
||||||
|
# self.dateSet = None
|
||||||
|
#
|
||||||
|
# def __str__(self):
|
||||||
|
# return "%s (%s - %s)" % (self.summary, self.startDate, self.endDate)
|
||||||
|
|
||||||
|
class ICalEvent(cal.Event):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.exceptionDates = []
|
||||||
|
self.dateSet = None
|
||||||
|
self.__parent = super(ICalEvent, self)
|
||||||
|
self.__parent.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_event(cls, event, parent):
|
||||||
|
x = cls(**dict(event))
|
||||||
|
x.__dict__ = event.__dict__
|
||||||
|
x.summary = x['summary']
|
||||||
|
x.timezone = x['dtstart'].dt.tzinfo
|
||||||
|
x.startDate = parent.parseDate(x['dtstart'])
|
||||||
|
x.endDate = parent.parseDate(x['dtend'])
|
||||||
|
if not x.timezone:
|
||||||
|
x.timezone = pytz.UTC
|
||||||
|
x.startDate = parent.parseDate(x['dtstart'])
|
||||||
|
x.endDate = parent.parseDate(x['dtend'])
|
||||||
|
x.raw_data = str(x)
|
||||||
|
if 'rrule' in event:
|
||||||
|
x.addRecurrenceRule(event['rrule'])
|
||||||
|
return x
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "%s (%s - %s)" % (self.summary, self.startDate, self.endDate)
|
||||||
|
|
||||||
|
def __eq__(self, otherEvent):
|
||||||
|
return self.startDate == otherEvent.startDate
|
||||||
|
|
||||||
|
def __lt__(self, otherEvent):
|
||||||
|
return self.startDate < otherEvent.startDate
|
||||||
|
|
||||||
|
def __gt__(self, otherEvent):
|
||||||
|
return self.startDate > otherEvent.startDate
|
||||||
|
|
||||||
|
def __ge__(self, otherEvent):
|
||||||
|
return self.startDate >= otherEvent.startDate
|
||||||
|
|
||||||
|
def __le__(self, otherEvent):
|
||||||
|
return self.startDate <= otherEvent.startDate
|
||||||
|
|
||||||
|
def addExceptionDate(self, date):
|
||||||
|
self.exceptionDates.append(date)
|
||||||
|
|
||||||
|
def addRecurrenceRule(self, rule):
|
||||||
|
self.dateSet = DateSet(self.startDate, self.endDate, rule)
|
||||||
|
|
||||||
|
def startsToday(self):
|
||||||
|
return self.startsOn(datetime.datetime.today())
|
||||||
|
|
||||||
|
def startsTomorrow(self):
|
||||||
|
tomorrow = datetime.datetime.fromtimestamp(time.time() + SECONDS_PER_DAY)
|
||||||
|
return self.startsOn(tomorrow)
|
||||||
|
|
||||||
|
def startsOn(self, date):
|
||||||
|
return (self.startDate.year == date.year and
|
||||||
|
self.startDate.month == date.month and
|
||||||
|
self.startDate.day == date.day or
|
||||||
|
(self.dateSet and self.dateSet.includes(date)))
|
||||||
|
|
||||||
|
def startTime(self):
|
||||||
|
return self.startDate
|
||||||
|
|
||||||
|
def schedule(self, timezone=None):
|
||||||
|
if not timezone:
|
||||||
|
return "%s UTC: %s" % (self.startDate.strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
||||||
|
return "%s: %s" % (self.startDate.astimezone(pytz.timezone(timezone)).strftime("%d %b %H:%M %Z"), self.summary.replace('Meeting','').strip())
|
||||||
|
|
||||||
|
def is_on(self):
|
||||||
|
return self.startDate < datetime.datetime.now(pytz.UTC) and self.endDate > datetime.datetime.now(pytz.UTC)
|
||||||
|
|
||||||
|
def has_passed(self):
|
||||||
|
return self.endDate < datetime.datetime.now(pytz.UTC)
|
||||||
|
|
||||||
|
def seconds_to_go(self):
|
||||||
|
return seconds(self.startDate - datetime.datetime.now(pytz.UTC))
|
||||||
|
|
||||||
|
def seconds_ago(self):
|
||||||
|
return seconds(datetime.datetime.now(pytz.UTC) - self.endDate)
|
||||||
|
|
||||||
|
def time_to_go(self):
|
||||||
|
if self.endDate < datetime.datetime.now(pytz.UTC):
|
||||||
|
return False
|
||||||
|
delta = self.startDate - datetime.datetime.now(pytz.UTC)
|
||||||
|
s = ''
|
||||||
|
if delta.days:
|
||||||
|
if delta.days != 1:
|
||||||
|
s = 's'
|
||||||
|
return '%d day%s' % (delta.days, s)
|
||||||
|
h = ''
|
||||||
|
if delta.seconds > 7200:
|
||||||
|
s = 's'
|
||||||
|
if delta.seconds > 3600:
|
||||||
|
h = '%d hour%s ' % (int(delta.seconds/3600),s)
|
||||||
|
s = ''
|
||||||
|
minutes = (delta.seconds % 3600) / 60
|
||||||
|
if minutes != 1:
|
||||||
|
s = 's'
|
||||||
|
return '%s%d minute%s' % (h,minutes,s)
|
||||||
|
|
||||||
|
class DateSet:
|
||||||
|
def __init__(self, startDate, endDate, rule):
|
||||||
|
self.startDate = startDate
|
||||||
|
self.endDate = endDate
|
||||||
|
self.frequency = None
|
||||||
|
self.count = None
|
||||||
|
self.untilDate = None
|
||||||
|
self.byMonth = None
|
||||||
|
self.byDate = None
|
||||||
|
self.parseRecurrenceRule(rule)
|
||||||
|
|
||||||
|
def parseRecurrenceRule(self, rule):
|
||||||
|
freq = rruler.rrule_map[rule.pop('freq')[0]]
|
||||||
|
self.recurrence = rruler.rrule_wrapper(freq, **rule)
|
||||||
|
# if 'freq' in rule:
|
||||||
|
# self.frequency = rule['freq']
|
||||||
|
# if 'count' in rule:
|
||||||
|
# self.count = rule['count']
|
||||||
|
# if 'until' in rule:
|
||||||
|
## self.untilDate = rule['until'][0].strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
# self.untilDate = rule['until'][0]
|
||||||
|
# if 'interval' in rule:
|
||||||
|
# self.interval = rule['interval']
|
||||||
|
# if 'bymonth' in rule:
|
||||||
|
# self.myMonth = rule['bymonth']
|
||||||
|
# if 'byday' in rule:
|
||||||
|
# self.byDay = rule['byday']
|
||||||
|
|
||||||
|
def includes(self, date):
|
||||||
|
if isinstance(date, datetime.datetime):
|
||||||
|
date = date.date()
|
||||||
|
return date in [x.date() for x in list(self.recurrence)] or date == self.startDate.date()
|
||||||
|
# if date == self.startDate:
|
||||||
|
# return True
|
||||||
|
#
|
||||||
|
# if self.untilDate and date > self.untilDate:
|
||||||
|
# return False
|
||||||
|
#
|
||||||
|
# if self.frequency == 'DAILY':
|
||||||
|
# increment = 1
|
||||||
|
# if self.interval:
|
||||||
|
# increment = self.interval
|
||||||
|
# d = self.startDate
|
||||||
|
# counter = 0
|
||||||
|
# while(d < date):
|
||||||
|
# if self.count:
|
||||||
|
# counter += 1
|
||||||
|
# if counter >= self.count:
|
||||||
|
# return False
|
||||||
|
#
|
||||||
|
# d = d.replace(day=d.day+1)
|
||||||
|
#
|
||||||
|
# if (d.day == date.day and
|
||||||
|
# d.year == date.year and
|
||||||
|
# d.month == date.month):
|
||||||
|
# return True
|
||||||
|
#
|
||||||
|
# elif self.frequency == 'WEEKLY':
|
||||||
|
# if self.startDate.weekday() == date.weekday():
|
||||||
|
# return True
|
||||||
|
# else:
|
||||||
|
# if self.endDate:
|
||||||
|
# for n in range(0, self.endDate.day - self.startDate.day):
|
||||||
|
# newDate = self.startDate.replace(day=self.startDate.day+n)
|
||||||
|
# if newDate.weekday() == date.weekday():
|
||||||
|
# return True
|
||||||
|
#
|
||||||
|
# elif self.frequency == 'MONTHLY':
|
||||||
|
# if self.startDate.month == date.month:
|
||||||
|
# if self.startDate.weekday() == date.weekday():
|
||||||
|
# return True
|
||||||
|
#
|
||||||
|
# elif self.frequency == 'YEARLY':
|
||||||
|
# if (self.startDate.month == date.month) and (self.startDate.day == date.day):
|
||||||
|
# return True
|
||||||
|
#
|
||||||
|
# return False
|
||||||
|
|
286
Webcal/ical.py.bak.bac2
Normal file
286
Webcal/ical.py.bak.bac2
Normal file
@ -0,0 +1,286 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# Slightly modified version of the iCal module found at
|
||||||
|
# http://www.devoesquared.com/Software/iCal_Module
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
import pytz # pytz can be found on http://pytz.sourceforge.net
|
||||||
|
|
||||||
|
parent = None
|
||||||
|
|
||||||
|
def log(x):
|
||||||
|
if not parent:
|
||||||
|
return
|
||||||
|
parent.log.info(x)
|
||||||
|
|
||||||
|
SECONDS_PER_DAY=24*60*60
|
||||||
|
def seconds(timediff):
|
||||||
|
return SECONDS_PER_DAY * timediff.days + timediff.seconds
|
||||||
|
|
||||||
|
class ICalReader:
|
||||||
|
|
||||||
|
def __init__(self, data):
|
||||||
|
self.events = []
|
||||||
|
self.raw_data = data.replace('\r','')
|
||||||
|
self.readEvents()
|
||||||
|
|
||||||
|
def readEvents(self):
|
||||||
|
self.events = []
|
||||||
|
lines = self.raw_data.split('\n')
|
||||||
|
inEvent = False
|
||||||
|
eventLines = []
|
||||||
|
stRegex = re.compile("^BEGIN:VEVENT")
|
||||||
|
enRegex = re.compile("^END:VEVENT")
|
||||||
|
for line in lines:
|
||||||
|
if stRegex.match(line):
|
||||||
|
inEvent = True
|
||||||
|
eventLines = []
|
||||||
|
if inEvent:
|
||||||
|
eventLines.append(line)
|
||||||
|
if enRegex.match(line):
|
||||||
|
inEvent = False
|
||||||
|
event = self.parseEvent(eventLines)
|
||||||
|
if event:
|
||||||
|
self.events.append(event)
|
||||||
|
|
||||||
|
self.events.sort()
|
||||||
|
return self.events
|
||||||
|
|
||||||
|
def parseEvent(self, lines):
|
||||||
|
event = ICalEvent()
|
||||||
|
event.raw_data = "\n".join(lines)
|
||||||
|
startDate = None
|
||||||
|
rule = None
|
||||||
|
endDate = None
|
||||||
|
reSummary = re.compile("^SUMMARY:(.*)")
|
||||||
|
reDstart = re.compile("^DTSTART(.*):([0-9]+T[0-9]+)")
|
||||||
|
reDend = re.compile("^DTEND(.*):([0-9]+T[0-9]+)")
|
||||||
|
reExdata = re.compile("^EXDATE:([0-9]+T[0-9]+)")
|
||||||
|
reRrule = re.compile("^RRULE:(.*)")
|
||||||
|
for line in lines:
|
||||||
|
match = False
|
||||||
|
if reSummary.match(line):
|
||||||
|
event.summary = reSummary.match(line).group(1)
|
||||||
|
elif reDstart.match(line):
|
||||||
|
startDate = self.parseDate(*reDstart.match(line).groups())
|
||||||
|
elif reDend.match(line):
|
||||||
|
endDate = self.parseDate(*reDend.match(line).groups())
|
||||||
|
elif reExdata.match(line):
|
||||||
|
event.addExceptionDate(reExdate.match(line).group(1))
|
||||||
|
elif reRrule.match(line):
|
||||||
|
rule = reRrule.match(line).group(1)
|
||||||
|
|
||||||
|
event.startDate = startDate
|
||||||
|
event.endDate = endDate
|
||||||
|
|
||||||
|
if rule:
|
||||||
|
event.addRecurrenceRule(rule)
|
||||||
|
|
||||||
|
if not startDate or not endDate:
|
||||||
|
return None
|
||||||
|
return event
|
||||||
|
|
||||||
|
def parseDate(self, tz, dateStr):
|
||||||
|
year = int(dateStr[0:4])
|
||||||
|
if year < 1970:
|
||||||
|
year = 1970
|
||||||
|
|
||||||
|
month = int(dateStr[4:4+2])
|
||||||
|
day = int(dateStr[6:6+2])
|
||||||
|
try:
|
||||||
|
hour = int(dateStr[9:9+2])
|
||||||
|
minute = int(dateStr[11:11+2])
|
||||||
|
except:
|
||||||
|
hour = 0
|
||||||
|
minute = 0
|
||||||
|
if tz:
|
||||||
|
return datetime.datetime(year, month, day, hour, minute, tzinfo=pytz.timezone(tz[6:]))
|
||||||
|
return datetime.datetime(year, month, day, hour, minute, tzinfo=pytz.UTC)
|
||||||
|
|
||||||
|
def selectEvents(self, selectFunction):
|
||||||
|
note = datetime.datetime.today()
|
||||||
|
self.events.sort()
|
||||||
|
events = filter(selectFunction, self.events)
|
||||||
|
return events
|
||||||
|
|
||||||
|
def todaysEvents(self, event):
|
||||||
|
return event.startsToday()
|
||||||
|
|
||||||
|
def tomorrowsEvents(self, event):
|
||||||
|
return event.startsTomorrow()
|
||||||
|
|
||||||
|
def eventsFor(self, date):
|
||||||
|
note = datetime.datetime.today()
|
||||||
|
self.events.sort()
|
||||||
|
ret = []
|
||||||
|
for event in self.events:
|
||||||
|
if event.startsOn(date):
|
||||||
|
ret.append(event)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
class ICalEvent:
|
||||||
|
def __init__(self):
|
||||||
|
self.exceptionDates = []
|
||||||
|
self.dateSet = None
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "%s (%s - %s)" % (self.summary, self.startDate, self.endDate)
|
||||||
|
|
||||||
|
def __eq__(self, otherEvent):
|
||||||
|
return self.startDate == otherEvent.startDate
|
||||||
|
|
||||||
|
def __lt__(self, otherEvent):
|
||||||
|
return self.startDate < otherEvent.startDate
|
||||||
|
|
||||||
|
def __gt__(self, otherEvent):
|
||||||
|
return self.startDate > otherEvent.startDate
|
||||||
|
|
||||||
|
def __ge__(self, otherEvent):
|
||||||
|
return self.startDate >= otherEvent.startDate
|
||||||
|
|
||||||
|
def __le__(self, otherEvent):
|
||||||
|
return self.startDate <= otherEvent.startDate
|
||||||
|
|
||||||
|
def addExceptionDate(self, date):
|
||||||
|
self.exceptionDates.append(date)
|
||||||
|
|
||||||
|
def addRecurrenceRule(self, rule):
|
||||||
|
self.dateSet = DateSet(self.startDate, self.endDate, rule)
|
||||||
|
|
||||||
|
def startsToday(self):
|
||||||
|
return self.startsOn(datetime.datetime.today())
|
||||||
|
|
||||||
|
def startsTomorrow(self):
|
||||||
|
tomorrow = datetime.datetime.fromtimestamp(time.time() + SECONDS_PER_DAY)
|
||||||
|
return self.startsOn(tomorrow)
|
||||||
|
|
||||||
|
def startsOn(self, date):
|
||||||
|
return (self.startDate.year == date.year and
|
||||||
|
self.startDate.month == date.month and
|
||||||
|
self.startDate.day == date.day or
|
||||||
|
(self.dateSet and self.dateSet.includes(date)))
|
||||||
|
|
||||||
|
def startTime(self):
|
||||||
|
return self.startDate
|
||||||
|
|
||||||
|
def schedule(self, timezone=None):
|
||||||
|
if not timezone:
|
||||||
|
return "%s UTC: %s" % (self.startDate.strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
||||||
|
return "%s: %s" % (self.startDate.astimezone(pytz.timezone(timezone)).strftime("%d %b %H:%M"), self.summary.replace('Meeting','').strip())
|
||||||
|
|
||||||
|
def is_on(self):
|
||||||
|
return self.startDate < datetime.datetime.now(pytz.UTC) and self.endDate > datetime.datetime.now(pytz.UTC)
|
||||||
|
|
||||||
|
def has_passed(self):
|
||||||
|
return self.endDate < datetime.datetime.now(pytz.UTC)
|
||||||
|
|
||||||
|
def seconds_to_go(self):
|
||||||
|
return seconds(self.startDate - datetime.datetime.now(pytz.UTC))
|
||||||
|
|
||||||
|
def seconds_ago(self):
|
||||||
|
return seconds(datetime.datetime.now(pytz.UTC) - self.endDate)
|
||||||
|
|
||||||
|
def time_to_go(self):
|
||||||
|
if self.endDate < datetime.datetime.now(pytz.UTC):
|
||||||
|
return False
|
||||||
|
delta = self.startDate - datetime.datetime.now(pytz.UTC)
|
||||||
|
s = ''
|
||||||
|
if delta.days:
|
||||||
|
if delta.days != 1:
|
||||||
|
s = 's'
|
||||||
|
return '%d day%s' % (delta.days, s)
|
||||||
|
h = ''
|
||||||
|
if delta.seconds > 7200:
|
||||||
|
s = 's'
|
||||||
|
if delta.seconds > 3600:
|
||||||
|
h = '%d hour%s ' % (int(delta.seconds/3600),s)
|
||||||
|
s = ''
|
||||||
|
minutes = (delta.seconds % 3600) / 60
|
||||||
|
if minutes != 1:
|
||||||
|
s = 's'
|
||||||
|
return '%s%d minute%s' % (h,minutes,s)
|
||||||
|
|
||||||
|
|
||||||
|
class DateSet:
|
||||||
|
def __init__(self, startDate, endDate, rule):
|
||||||
|
self.startDate = startDate
|
||||||
|
self.endDate = endDate
|
||||||
|
self.frequency = None
|
||||||
|
self.count = None
|
||||||
|
self.untilDate = None
|
||||||
|
self.byMonth = None
|
||||||
|
self.byDate = None
|
||||||
|
self.parseRecurrenceRule(rule)
|
||||||
|
|
||||||
|
def parseRecurrenceRule(self, rule):
|
||||||
|
if re.compile("FREQ=(.*?);").match(rule) :
|
||||||
|
self.frequency = re.compile("FREQ=(.*?);").match(rule).group(1)
|
||||||
|
|
||||||
|
if re.compile("COUNT=(\d*)").match(rule) :
|
||||||
|
self.count = int(re.compile("COUNT=(\d*)").match(rule).group(1))
|
||||||
|
|
||||||
|
if re.compile("UNTIL=(.*?);").match(rule) :
|
||||||
|
# self.untilDate = DateParser.parse(re.compile("UNTIL=(.*?);").match(rule).group(1))
|
||||||
|
self.untilDate = re.compile("UNTIL=(.*?);").match(rule).group(1)
|
||||||
|
|
||||||
|
if re.compile("INTERVAL=(\d*)").match(rule) :
|
||||||
|
self.interval = int(re.compile("INTERVAL=(\d*)").match(rule).group(1))
|
||||||
|
|
||||||
|
if re.compile("BYMONTH=(.*?);").match(rule) :
|
||||||
|
self.byMonth = re.compile("BYMONTH=(.*?);").match(rule).group(1)
|
||||||
|
|
||||||
|
if re.compile("BYDAY=(.*?);").match(rule) :
|
||||||
|
self.byDay = re.compile("BYDAY=(.*?);").match(rule).group(1)
|
||||||
|
|
||||||
|
|
||||||
|
def includes(self, date):
|
||||||
|
if date == self.startDate:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.untilDate and date > self.untilDate:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.frequency == 'DAILY':
|
||||||
|
increment = 1
|
||||||
|
if self.interval:
|
||||||
|
increment = self.interval
|
||||||
|
d = self.startDate
|
||||||
|
counter = 0
|
||||||
|
while(d < date):
|
||||||
|
if self.count:
|
||||||
|
counter += 1
|
||||||
|
if counter >= self.count:
|
||||||
|
return False
|
||||||
|
|
||||||
|
d = d.replace(day=d.day+1)
|
||||||
|
|
||||||
|
if (d.day == date.day and
|
||||||
|
d.year == date.year and
|
||||||
|
d.month == date.month):
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif self.frequency == 'WEEKLY':
|
||||||
|
if self.startDate.weekday() == date.weekday():
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
if self.endDate:
|
||||||
|
for n in range(0, self.endDate.day - self.startDate.day):
|
||||||
|
newDate = self.startDate.replace(day=self.startDate.day+n)
|
||||||
|
if newDate.weekday() == date.weekday():
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif self.frequency == 'MONTHLY':
|
||||||
|
if self.startDate.month == date.month:
|
||||||
|
if self.startDate.weekday() == date.weekday():
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif self.frequency == 'YEARLY':
|
||||||
|
if (self.startDate.month == date.month) and (self.startDate.day == date.day):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
16
Webcal/icalendar/__init__.py
Normal file
16
Webcal/icalendar/__init__.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# Components
|
||||||
|
from icalendar.cal import Calendar, Event, Todo, Journal
|
||||||
|
from icalendar.cal import FreeBusy, Timezone, Alarm, ComponentFactory
|
||||||
|
|
||||||
|
# Property Data Value Types
|
||||||
|
from icalendar.prop import vBinary, vBoolean, vCalAddress, vDatetime, vDate, \
|
||||||
|
vDDDTypes, vDuration, vFloat, vInt, vPeriod, \
|
||||||
|
vWeekday, vFrequency, vRecur, vText, vTime, vUri, \
|
||||||
|
vGeo, vUTCOffset, TypesFactory
|
||||||
|
|
||||||
|
# useful tzinfo subclasses
|
||||||
|
from icalendar.prop import FixedOffset, UTC, LocalTimezone
|
||||||
|
|
||||||
|
# Parameters and helper methods for splitting and joining string with escaped
|
||||||
|
# chars.
|
||||||
|
from icalendar.parser import Parameters, q_split, q_join
|
534
Webcal/icalendar/cal.py
Normal file
534
Webcal/icalendar/cal.py
Normal file
@ -0,0 +1,534 @@
|
|||||||
|
# -*- coding: latin-1 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
Calendar is a dictionary like Python object that can render itself as VCAL
|
||||||
|
files according to rfc2445.
|
||||||
|
|
||||||
|
These are the defined components.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# from python
|
||||||
|
from types import ListType, TupleType
|
||||||
|
SequenceTypes = (ListType, TupleType)
|
||||||
|
import re
|
||||||
|
|
||||||
|
# from this package
|
||||||
|
from icalendar.caselessdict import CaselessDict
|
||||||
|
from icalendar.parser import Contentlines, Contentline, Parameters
|
||||||
|
from icalendar.parser import q_split, q_join
|
||||||
|
from icalendar.prop import TypesFactory, vText
|
||||||
|
|
||||||
|
|
||||||
|
######################################
|
||||||
|
# The component factory
|
||||||
|
|
||||||
|
class ComponentFactory(CaselessDict):
|
||||||
|
"""
|
||||||
|
All components defined in rfc 2445 are registered in this factory class. To
|
||||||
|
get a component you can use it like this.
|
||||||
|
|
||||||
|
>>> factory = ComponentFactory()
|
||||||
|
>>> component = factory['VEVENT']
|
||||||
|
>>> event = component(dtstart='19700101')
|
||||||
|
>>> event.as_string()
|
||||||
|
'BEGIN:VEVENT\\r\\nDTSTART:19700101\\r\\nEND:VEVENT\\r\\n'
|
||||||
|
|
||||||
|
>>> factory.get('VCALENDAR', Component)
|
||||||
|
<class 'icalendar.cal.Calendar'>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
"Set keys to upper for initial dict"
|
||||||
|
CaselessDict.__init__(self, *args, **kwargs)
|
||||||
|
self['VEVENT'] = Event
|
||||||
|
self['VTODO'] = Todo
|
||||||
|
self['VJOURNAL'] = Journal
|
||||||
|
self['VFREEBUSY'] = FreeBusy
|
||||||
|
self['VTIMEZONE'] = Timezone
|
||||||
|
self['VALARM'] = Alarm
|
||||||
|
self['VCALENDAR'] = Calendar
|
||||||
|
|
||||||
|
|
||||||
|
# These Properties have multiple property values inlined in one propertyline
|
||||||
|
# seperated by comma. Use CaselessDict as simple caseless set.
|
||||||
|
INLINE = CaselessDict(
|
||||||
|
[(cat, 1) for cat in ('CATEGORIES', 'RESOURCES', 'FREEBUSY')]
|
||||||
|
)
|
||||||
|
|
||||||
|
_marker = []
|
||||||
|
|
||||||
|
class Component(CaselessDict):
|
||||||
|
"""
|
||||||
|
Component is the base object for calendar, Event and the other components
|
||||||
|
defined in RFC 2445. normally you will not use this class directy, but
|
||||||
|
rather one of the subclasses.
|
||||||
|
|
||||||
|
A component is like a dictionary with extra methods and attributes.
|
||||||
|
>>> c = Component()
|
||||||
|
>>> c.name = 'VCALENDAR'
|
||||||
|
|
||||||
|
Every key defines a property. A property can consist of either a single
|
||||||
|
item. This can be set with a single value
|
||||||
|
>>> c['prodid'] = '-//max m//icalendar.mxm.dk/'
|
||||||
|
>>> c
|
||||||
|
VCALENDAR({'PRODID': '-//max m//icalendar.mxm.dk/'})
|
||||||
|
|
||||||
|
or with a list
|
||||||
|
>>> c['ATTENDEE'] = ['Max M', 'Rasmussen']
|
||||||
|
|
||||||
|
if you use the add method you don't have to considder if a value is a list
|
||||||
|
or not.
|
||||||
|
>>> c = Component()
|
||||||
|
>>> c.name = 'VEVENT'
|
||||||
|
>>> c.add('attendee', 'maxm@mxm.dk')
|
||||||
|
>>> c.add('attendee', 'test@example.dk')
|
||||||
|
>>> c
|
||||||
|
VEVENT({'ATTENDEE': [vCalAddress('maxm@mxm.dk'), vCalAddress('test@example.dk')]})
|
||||||
|
|
||||||
|
You can get the values back directly
|
||||||
|
>>> c.add('prodid', '-//my product//')
|
||||||
|
>>> c['prodid']
|
||||||
|
vText(u'-//my product//')
|
||||||
|
|
||||||
|
or decoded to a python type
|
||||||
|
>>> c.decoded('prodid')
|
||||||
|
u'-//my product//'
|
||||||
|
|
||||||
|
With default values for non existing properties
|
||||||
|
>>> c.decoded('version', 'No Version')
|
||||||
|
'No Version'
|
||||||
|
|
||||||
|
The component can render itself in the RFC 2445 format.
|
||||||
|
>>> c = Component()
|
||||||
|
>>> c.name = 'VCALENDAR'
|
||||||
|
>>> c.add('attendee', 'Max M')
|
||||||
|
>>> c.as_string()
|
||||||
|
'BEGIN:VCALENDAR\\r\\nATTENDEE:Max M\\r\\nEND:VCALENDAR\\r\\n'
|
||||||
|
|
||||||
|
>>> from icalendar.prop import vDatetime
|
||||||
|
|
||||||
|
Components can be nested, so You can add a subcompont. Eg a calendar holds events.
|
||||||
|
>>> e = Component(summary='A brief history of time')
|
||||||
|
>>> e.name = 'VEVENT'
|
||||||
|
>>> e.add('dtend', '20000102T000000', encode=0)
|
||||||
|
>>> e.add('dtstart', '20000101T000000', encode=0)
|
||||||
|
>>> e.as_string()
|
||||||
|
'BEGIN:VEVENT\\r\\nDTEND:20000102T000000\\r\\nDTSTART:20000101T000000\\r\\nSUMMARY:A brief history of time\\r\\nEND:VEVENT\\r\\n'
|
||||||
|
|
||||||
|
>>> c.add_component(e)
|
||||||
|
>>> c.subcomponents
|
||||||
|
[VEVENT({'DTEND': '20000102T000000', 'DTSTART': '20000101T000000', 'SUMMARY': 'A brief history of time'})]
|
||||||
|
|
||||||
|
We can walk over nested componentes with the walk method.
|
||||||
|
>>> [i.name for i in c.walk()]
|
||||||
|
['VCALENDAR', 'VEVENT']
|
||||||
|
|
||||||
|
We can also just walk over specific component types, by filtering them on
|
||||||
|
their name.
|
||||||
|
>>> [i.name for i in c.walk('VEVENT')]
|
||||||
|
['VEVENT']
|
||||||
|
|
||||||
|
>>> [i['dtstart'] for i in c.walk('VEVENT')]
|
||||||
|
['20000101T000000']
|
||||||
|
|
||||||
|
INLINE properties have their values on one property line. Note the double
|
||||||
|
quoting of the value with a colon in it.
|
||||||
|
>>> c = Calendar()
|
||||||
|
>>> c['resources'] = 'Chair, Table, "Room: 42"'
|
||||||
|
>>> c
|
||||||
|
VCALENDAR({'RESOURCES': 'Chair, Table, "Room: 42"'})
|
||||||
|
|
||||||
|
>>> c.as_string()
|
||||||
|
'BEGIN:VCALENDAR\\r\\nRESOURCES:Chair, Table, "Room: 42"\\r\\nEND:VCALENDAR\\r\\n'
|
||||||
|
|
||||||
|
The inline values must be handled by the get_inline() and set_inline()
|
||||||
|
methods.
|
||||||
|
|
||||||
|
>>> c.get_inline('resources', decode=0)
|
||||||
|
['Chair', 'Table', 'Room: 42']
|
||||||
|
|
||||||
|
These can also be decoded
|
||||||
|
>>> c.get_inline('resources', decode=1)
|
||||||
|
[u'Chair', u'Table', u'Room: 42']
|
||||||
|
|
||||||
|
You can set them directly
|
||||||
|
>>> c.set_inline('resources', ['A', 'List', 'of', 'some, recources'], encode=1)
|
||||||
|
>>> c['resources']
|
||||||
|
'A,List,of,"some, recources"'
|
||||||
|
|
||||||
|
and back again
|
||||||
|
>>> c.get_inline('resources', decode=0)
|
||||||
|
['A', 'List', 'of', 'some, recources']
|
||||||
|
|
||||||
|
>>> c['freebusy'] = '19970308T160000Z/PT3H,19970308T200000Z/PT1H,19970308T230000Z/19970309T000000Z'
|
||||||
|
>>> c.get_inline('freebusy', decode=0)
|
||||||
|
['19970308T160000Z/PT3H', '19970308T200000Z/PT1H', '19970308T230000Z/19970309T000000Z']
|
||||||
|
|
||||||
|
>>> freebusy = c.get_inline('freebusy', decode=1)
|
||||||
|
>>> type(freebusy[0][0]), type(freebusy[0][1])
|
||||||
|
(<type 'datetime.datetime'>, <type 'datetime.timedelta'>)
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = '' # must be defined in each component
|
||||||
|
required = () # These properties are required
|
||||||
|
singletons = () # These properties must only appear once
|
||||||
|
multiple = () # may occur more than once
|
||||||
|
exclusive = () # These properties are mutually exclusive
|
||||||
|
inclusive = () # if any occurs the other(s) MUST occur ('duration', 'repeat')
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
"Set keys to upper for initial dict"
|
||||||
|
CaselessDict.__init__(self, *args, **kwargs)
|
||||||
|
# set parameters here for properties that use non-default values
|
||||||
|
self.subcomponents = [] # Components can be nested.
|
||||||
|
|
||||||
|
|
||||||
|
# def non_complience(self, warnings=0):
|
||||||
|
# """
|
||||||
|
# not implemented yet!
|
||||||
|
# Returns a dict describing non compliant properties, if any.
|
||||||
|
# If warnings is true it also returns warnings.
|
||||||
|
#
|
||||||
|
# If the parser is too strict it might prevent parsing erroneous but
|
||||||
|
# otherwise compliant properties. So the parser is pretty lax, but it is
|
||||||
|
# possible to test for non-complience by calling this method.
|
||||||
|
# """
|
||||||
|
# nc = {}
|
||||||
|
# if not getattr(self, 'name', ''):
|
||||||
|
# nc['name'] = {'type':'ERROR', 'description':'Name is not defined'}
|
||||||
|
# return nc
|
||||||
|
|
||||||
|
|
||||||
|
#############################
|
||||||
|
# handling of property values
|
||||||
|
|
||||||
|
def _encode(self, name, value, cond=1):
|
||||||
|
# internal, for conditional convertion of values.
|
||||||
|
if cond:
|
||||||
|
klass = types_factory.for_property(name)
|
||||||
|
return klass(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def set(self, name, value, encode=1):
|
||||||
|
if type(value) == ListType:
|
||||||
|
self[name] = [self._encode(name, v, encode) for v in value]
|
||||||
|
else:
|
||||||
|
self[name] = self._encode(name, value, encode)
|
||||||
|
|
||||||
|
|
||||||
|
def add(self, name, value, encode=1):
|
||||||
|
"If property exists append, else create and set it"
|
||||||
|
if name in self:
|
||||||
|
oldval = self[name]
|
||||||
|
value = self._encode(name, value, encode)
|
||||||
|
if type(oldval) == ListType:
|
||||||
|
oldval.append(value)
|
||||||
|
else:
|
||||||
|
self.set(name, [oldval, value], encode=0)
|
||||||
|
else:
|
||||||
|
self.set(name, value, encode)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode(self, name, value):
|
||||||
|
# internal for decoding property values
|
||||||
|
decoded = types_factory.from_ical(name, value)
|
||||||
|
return decoded
|
||||||
|
|
||||||
|
|
||||||
|
def decoded(self, name, default=_marker):
|
||||||
|
"Returns decoded value of property"
|
||||||
|
if name in self:
|
||||||
|
value = self[name]
|
||||||
|
if type(value) == ListType:
|
||||||
|
return [self._decode(name, v) for v in value]
|
||||||
|
return self._decode(name, value)
|
||||||
|
else:
|
||||||
|
if default is _marker:
|
||||||
|
raise KeyError, name
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
# Inline values. A few properties have multiple values inlined in in one
|
||||||
|
# property line. These methods are used for splitting and joining these.
|
||||||
|
|
||||||
|
def get_inline(self, name, decode=1):
|
||||||
|
"""
|
||||||
|
Returns a list of values (split on comma).
|
||||||
|
"""
|
||||||
|
vals = [v.strip('" ').encode(vText.encoding)
|
||||||
|
for v in q_split(self[name])]
|
||||||
|
if decode:
|
||||||
|
return [self._decode(name, val) for val in vals]
|
||||||
|
return vals
|
||||||
|
|
||||||
|
|
||||||
|
def set_inline(self, name, values, encode=1):
|
||||||
|
"""
|
||||||
|
Converts a list of values into comma seperated string and sets value to
|
||||||
|
that.
|
||||||
|
"""
|
||||||
|
if encode:
|
||||||
|
values = [self._encode(name, value, 1) for value in values]
|
||||||
|
joined = q_join(values).encode(vText.encoding)
|
||||||
|
self[name] = types_factory['inline'](joined)
|
||||||
|
|
||||||
|
|
||||||
|
#########################
|
||||||
|
# Handling of components
|
||||||
|
|
||||||
|
def add_component(self, component):
|
||||||
|
"add a subcomponent to this component"
|
||||||
|
self.subcomponents.append(component)
|
||||||
|
|
||||||
|
|
||||||
|
def _walk(self, name):
|
||||||
|
# private!
|
||||||
|
result = []
|
||||||
|
if name is None or self.name == name:
|
||||||
|
result.append(self)
|
||||||
|
for subcomponent in self.subcomponents:
|
||||||
|
result += subcomponent._walk(name)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def walk(self, name=None):
|
||||||
|
"""
|
||||||
|
Recursively traverses component and subcomponents. Returns sequence of
|
||||||
|
same. If name is passed, only components with name will be returned.
|
||||||
|
"""
|
||||||
|
if not name is None:
|
||||||
|
name = name.upper()
|
||||||
|
return self._walk(name)
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Generation
|
||||||
|
|
||||||
|
def property_items(self):
|
||||||
|
"""
|
||||||
|
Returns properties in this component and subcomponents as:
|
||||||
|
[(name, value), ...]
|
||||||
|
"""
|
||||||
|
vText = types_factory['text']
|
||||||
|
properties = [('BEGIN', vText(self.name).ical())]
|
||||||
|
property_names = self.keys()
|
||||||
|
property_names.sort()
|
||||||
|
for name in property_names:
|
||||||
|
values = self[name]
|
||||||
|
if type(values) == ListType:
|
||||||
|
# normally one property is one line
|
||||||
|
for value in values:
|
||||||
|
properties.append((name, value))
|
||||||
|
else:
|
||||||
|
properties.append((name, values))
|
||||||
|
# recursion is fun!
|
||||||
|
for subcomponent in self.subcomponents:
|
||||||
|
properties += subcomponent.property_items()
|
||||||
|
properties.append(('END', vText(self.name).ical()))
|
||||||
|
return properties
|
||||||
|
|
||||||
|
|
||||||
|
def from_string(st, multiple=False):
|
||||||
|
"""
|
||||||
|
Populates the component recursively from a string
|
||||||
|
"""
|
||||||
|
stack = [] # a stack of components
|
||||||
|
comps = []
|
||||||
|
for line in Contentlines.from_string(st): # raw parsing
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
name, params, vals = line.parts()
|
||||||
|
uname = name.upper()
|
||||||
|
# check for start of component
|
||||||
|
if uname == 'BEGIN':
|
||||||
|
# try and create one of the components defined in the spec,
|
||||||
|
# otherwise get a general Components for robustness.
|
||||||
|
component_name = vals.upper()
|
||||||
|
component_class = component_factory.get(component_name, Component)
|
||||||
|
component = component_class()
|
||||||
|
if not getattr(component, 'name', ''): # for undefined components
|
||||||
|
component.name = component_name
|
||||||
|
stack.append(component)
|
||||||
|
# check for end of event
|
||||||
|
elif uname == 'END':
|
||||||
|
# we are done adding properties to this component
|
||||||
|
# so pop it from the stack and add it to the new top.
|
||||||
|
component = stack.pop()
|
||||||
|
if not stack: # we are at the end
|
||||||
|
comps.append(component)
|
||||||
|
else:
|
||||||
|
stack[-1].add_component(component)
|
||||||
|
# we are adding properties to the current top of the stack
|
||||||
|
else:
|
||||||
|
factory = types_factory.for_property(name)
|
||||||
|
vals = factory(factory.from_ical(vals))
|
||||||
|
vals.params = params
|
||||||
|
stack[-1].add(name, vals, encode=0)
|
||||||
|
if multiple:
|
||||||
|
return comps
|
||||||
|
if not len(comps) == 1:
|
||||||
|
raise ValueError('Found multiple components where '
|
||||||
|
'only one is allowed')
|
||||||
|
return comps[0]
|
||||||
|
from_string = staticmethod(from_string)
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(' % self.name + dict.__repr__(self) + ')'
|
||||||
|
|
||||||
|
# def content_line(self, name):
|
||||||
|
# "Returns property as content line"
|
||||||
|
# value = self[name]
|
||||||
|
# params = getattr(value, 'params', Parameters())
|
||||||
|
# return Contentline.from_parts((name, params, value))
|
||||||
|
|
||||||
|
def content_lines(self):
|
||||||
|
"Converts the Component and subcomponents into content lines"
|
||||||
|
contentlines = Contentlines()
|
||||||
|
for name, values in self.property_items():
|
||||||
|
params = getattr(values, 'params', Parameters())
|
||||||
|
contentlines.append(Contentline.from_parts((name, params, values)))
|
||||||
|
contentlines.append('') # remember the empty string in the end
|
||||||
|
return contentlines
|
||||||
|
|
||||||
|
|
||||||
|
def as_string(self):
|
||||||
|
return str(self.content_lines())
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"Returns rendered iCalendar"
|
||||||
|
return self.as_string()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# components defined in RFC 2445
|
||||||
|
|
||||||
|
|
||||||
|
class Event(Component):
|
||||||
|
|
||||||
|
name = 'VEVENT'
|
||||||
|
|
||||||
|
required = ('UID',)
|
||||||
|
singletons = (
|
||||||
|
'CLASS', 'CREATED', 'DESCRIPTION', 'DTSTART', 'GEO',
|
||||||
|
'LAST-MOD', 'LOCATION', 'ORGANIZER', 'PRIORITY', 'DTSTAMP', 'SEQUENCE',
|
||||||
|
'STATUS', 'SUMMARY', 'TRANSP', 'URL', 'RECURID', 'DTEND', 'DURATION',
|
||||||
|
'DTSTART',
|
||||||
|
)
|
||||||
|
exclusive = ('DTEND', 'DURATION', )
|
||||||
|
multiple = (
|
||||||
|
'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT','CONTACT', 'EXDATE',
|
||||||
|
'EXRULE', 'RSTATUS', 'RELATED', 'RESOURCES', 'RDATE', 'RRULE'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Todo(Component):
|
||||||
|
|
||||||
|
name = 'VTODO'
|
||||||
|
|
||||||
|
required = ('UID',)
|
||||||
|
singletons = (
|
||||||
|
'CLASS', 'COMPLETED', 'CREATED', 'DESCRIPTION', 'DTSTAMP', 'DTSTART',
|
||||||
|
'GEO', 'LAST-MOD', 'LOCATION', 'ORGANIZER', 'PERCENT', 'PRIORITY',
|
||||||
|
'RECURID', 'SEQUENCE', 'STATUS', 'SUMMARY', 'UID', 'URL', 'DUE', 'DURATION',
|
||||||
|
)
|
||||||
|
exclusive = ('DUE', 'DURATION',)
|
||||||
|
multiple = (
|
||||||
|
'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE',
|
||||||
|
'EXRULE', 'RSTATUS', 'RELATED', 'RESOURCES', 'RDATE', 'RRULE'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Journal(Component):
|
||||||
|
|
||||||
|
name = 'VJOURNAL'
|
||||||
|
|
||||||
|
required = ('UID',)
|
||||||
|
singletons = (
|
||||||
|
'CLASS', 'CREATED', 'DESCRIPTION', 'DTSTART', 'DTSTAMP', 'LAST-MOD',
|
||||||
|
'ORGANIZER', 'RECURID', 'SEQUENCE', 'STATUS', 'SUMMARY', 'UID', 'URL',
|
||||||
|
)
|
||||||
|
multiple = (
|
||||||
|
'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE',
|
||||||
|
'EXRULE', 'RELATED', 'RDATE', 'RRULE', 'RSTATUS',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class FreeBusy(Component):
|
||||||
|
|
||||||
|
name = 'VFREEBUSY'
|
||||||
|
|
||||||
|
required = ('UID',)
|
||||||
|
singletons = (
|
||||||
|
'CONTACT', 'DTSTART', 'DTEND', 'DURATION', 'DTSTAMP', 'ORGANIZER',
|
||||||
|
'UID', 'URL',
|
||||||
|
)
|
||||||
|
multiple = ('ATTENDEE', 'COMMENT', 'FREEBUSY', 'RSTATUS',)
|
||||||
|
|
||||||
|
|
||||||
|
class Timezone(Component):
|
||||||
|
|
||||||
|
name = 'VTIMEZONE'
|
||||||
|
|
||||||
|
required = (
|
||||||
|
'TZID', 'STANDARDC', 'DAYLIGHTC', 'DTSTART', 'TZOFFSETTO',
|
||||||
|
'TZOFFSETFROM'
|
||||||
|
)
|
||||||
|
singletons = ('LAST-MOD', 'TZURL', 'TZID',)
|
||||||
|
multiple = ('COMMENT', 'RDATE', 'RRULE', 'TZNAME',)
|
||||||
|
|
||||||
|
|
||||||
|
class Alarm(Component):
|
||||||
|
|
||||||
|
name = 'VALARM'
|
||||||
|
# not quite sure about these ...
|
||||||
|
required = ('ACTION', 'TRIGGER',)
|
||||||
|
singletons = ('ATTACH', 'ACTION', 'TRIGGER', 'DURATION', 'REPEAT',)
|
||||||
|
inclusive = (('DURATION', 'REPEAT',),)
|
||||||
|
multiple = ('STANDARDC', 'DAYLIGHTC')
|
||||||
|
|
||||||
|
|
||||||
|
class Calendar(Component):
|
||||||
|
"""
|
||||||
|
This is the base object for an iCalendar file.
|
||||||
|
|
||||||
|
Setting up a minimal calendar component looks like this
|
||||||
|
>>> cal = Calendar()
|
||||||
|
|
||||||
|
Som properties are required to be compliant
|
||||||
|
>>> cal['prodid'] = '-//My calendar product//mxm.dk//'
|
||||||
|
>>> cal['version'] = '2.0'
|
||||||
|
|
||||||
|
We also need at least one subcomponent for a calendar to be compliant
|
||||||
|
>>> from datetime import datetime
|
||||||
|
>>> event = Event()
|
||||||
|
>>> event['summary'] = 'Python meeting about calendaring'
|
||||||
|
>>> event['uid'] = '42'
|
||||||
|
>>> event.set('dtstart', datetime(2005,4,4,8,0,0))
|
||||||
|
>>> cal.add_component(event)
|
||||||
|
>>> cal.subcomponents[0].as_string()
|
||||||
|
'BEGIN:VEVENT\\r\\nDTSTART:20050404T080000\\r\\nSUMMARY:Python meeting about calendaring\\r\\nUID:42\\r\\nEND:VEVENT\\r\\n'
|
||||||
|
|
||||||
|
Write to disc
|
||||||
|
>>> import tempfile, os
|
||||||
|
>>> directory = tempfile.mkdtemp()
|
||||||
|
>>> open(os.path.join(directory, 'test.ics'), 'wb').write(cal.as_string())
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = 'VCALENDAR'
|
||||||
|
required = ('prodid', 'version', )
|
||||||
|
singletons = ('prodid', 'version', )
|
||||||
|
multiple = ('calscale', 'method', )
|
||||||
|
|
||||||
|
|
||||||
|
# These are read only singleton, so one instance is enough for the module
|
||||||
|
types_factory = TypesFactory()
|
||||||
|
component_factory = ComponentFactory()
|
93
Webcal/icalendar/caselessdict.py
Normal file
93
Webcal/icalendar/caselessdict.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
# -*- coding: latin-1 -*-
|
||||||
|
|
||||||
|
class CaselessDict(dict):
|
||||||
|
"""
|
||||||
|
A dictionary that isn't case sensitive, and only use string as keys.
|
||||||
|
|
||||||
|
>>> ncd = CaselessDict(key1='val1', key2='val2')
|
||||||
|
>>> ncd
|
||||||
|
CaselessDict({'KEY2': 'val2', 'KEY1': 'val1'})
|
||||||
|
>>> ncd['key1']
|
||||||
|
'val1'
|
||||||
|
>>> ncd['KEY1']
|
||||||
|
'val1'
|
||||||
|
>>> ncd['KEY3'] = 'val3'
|
||||||
|
>>> ncd['key3']
|
||||||
|
'val3'
|
||||||
|
>>> ncd.setdefault('key3', 'FOUND')
|
||||||
|
'val3'
|
||||||
|
>>> ncd.setdefault('key4', 'NOT FOUND')
|
||||||
|
'NOT FOUND'
|
||||||
|
>>> ncd['key4']
|
||||||
|
'NOT FOUND'
|
||||||
|
>>> ncd.get('key1')
|
||||||
|
'val1'
|
||||||
|
>>> ncd.get('key3', 'NOT FOUND')
|
||||||
|
'val3'
|
||||||
|
>>> ncd.get('key4', 'NOT FOUND')
|
||||||
|
'NOT FOUND'
|
||||||
|
>>> 'key4' in ncd
|
||||||
|
True
|
||||||
|
>>> del ncd['key4']
|
||||||
|
>>> ncd.has_key('key4')
|
||||||
|
False
|
||||||
|
>>> ncd.update({'key5':'val5', 'KEY6':'val6', 'KEY5':'val7'})
|
||||||
|
>>> ncd['key6']
|
||||||
|
'val6'
|
||||||
|
>>> keys = ncd.keys()
|
||||||
|
>>> keys.sort()
|
||||||
|
>>> keys
|
||||||
|
['KEY1', 'KEY2', 'KEY3', 'KEY5', 'KEY6']
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
"Set keys to upper for initial dict"
|
||||||
|
dict.__init__(self, *args, **kwargs)
|
||||||
|
for k,v in self.items():
|
||||||
|
k_upper = k.upper()
|
||||||
|
if k != k_upper:
|
||||||
|
dict.__delitem__(self, k)
|
||||||
|
self[k_upper] = v
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return dict.__getitem__(self, key.upper())
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
dict.__setitem__(self, key.upper(), value)
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
dict.__delitem__(self, key.upper())
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
return dict.__contains__(self, item.upper())
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return dict.get(self, key.upper(), default)
|
||||||
|
|
||||||
|
def setdefault(self, key, value=None):
|
||||||
|
return dict.setdefault(self, key.upper(), value)
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
return dict.pop(self, key.upper(), default)
|
||||||
|
|
||||||
|
def popitem(self):
|
||||||
|
return dict.popitem(self)
|
||||||
|
|
||||||
|
def has_key(self, key):
|
||||||
|
return dict.has_key(self, key.upper())
|
||||||
|
|
||||||
|
def update(self, indict):
|
||||||
|
"""
|
||||||
|
Multiple keys where key1.upper() == key2.upper() will be lost.
|
||||||
|
"""
|
||||||
|
for entry in indict:
|
||||||
|
self[entry] = indict[entry]
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
return CaselessDict(dict.copy(self))
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
dict.clear(self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'CaselessDict(' + dict.__repr__(self) + ')'
|
262
Webcal/icalendar/interfaces.py
Normal file
262
Webcal/icalendar/interfaces.py
Normal file
@ -0,0 +1,262 @@
|
|||||||
|
try:
|
||||||
|
from zope.interface import Interface, Attribute
|
||||||
|
except ImportError:
|
||||||
|
class Interface:
|
||||||
|
"""A dummy interface base class"""
|
||||||
|
|
||||||
|
class Attribute:
|
||||||
|
"""A dummy attribute implementation"""
|
||||||
|
def __init__(self, doc):
|
||||||
|
self.doc = doc
|
||||||
|
|
||||||
|
_marker = object()
|
||||||
|
|
||||||
|
class IComponent(Interface):
|
||||||
|
"""
|
||||||
|
Component is the base object for calendar, Event and the other
|
||||||
|
components defined in RFC 2445.
|
||||||
|
|
||||||
|
A component is like a dictionary with extra methods and attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# MANIPULATORS
|
||||||
|
|
||||||
|
def __setitem__(name, value):
|
||||||
|
"""Set a property.
|
||||||
|
|
||||||
|
name - case insensitive name
|
||||||
|
value - value of the property to set. This can be either a single
|
||||||
|
item or a list.
|
||||||
|
|
||||||
|
Some iCalendar properties are set INLINE; these properties
|
||||||
|
have multiple values on one property line in the iCalendar
|
||||||
|
representation. The list can be supplied as a comma separated
|
||||||
|
string to __setitem__. If special iCalendar characters exist in
|
||||||
|
an entry, such as the colon (:) and (,), that comma-separated
|
||||||
|
entry needs to be quoted with double quotes. For example:
|
||||||
|
|
||||||
|
'foo, bar, "baz:hoi"'
|
||||||
|
|
||||||
|
See also set_inline() for an easier way to deal with this case.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def set_inline(name, values, encode=1):
|
||||||
|
"""Set list of INLINE values for property.
|
||||||
|
|
||||||
|
Converts a list of values into valid iCalendar comma seperated
|
||||||
|
string and sets value to that.
|
||||||
|
|
||||||
|
name - case insensitive name of property
|
||||||
|
values - list of values to set
|
||||||
|
encode - if True, encode Python values as iCalendar types first.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def add(name, value):
|
||||||
|
"""Add a property. Can be called multiple times to set a list.
|
||||||
|
|
||||||
|
name - case insensitive name
|
||||||
|
value - value of property to set or add to list for this property.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def add_component(component):
|
||||||
|
"""Add a nested subcomponent to this component.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# static method, can be called on class directly
|
||||||
|
def from_string(st, multiple=False):
|
||||||
|
"""Populates the component recursively from a iCalendar string.
|
||||||
|
|
||||||
|
Reads the iCalendar string and constructs components and
|
||||||
|
subcomponents out of it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ACCESSORS
|
||||||
|
def __getitem__(name):
|
||||||
|
"""Get a property
|
||||||
|
|
||||||
|
name - case insensitive name
|
||||||
|
|
||||||
|
Returns an iCalendar property object such as vText.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decoded(name, default=_marker):
|
||||||
|
"""Get a property as a python object.
|
||||||
|
|
||||||
|
name - case insensitive name
|
||||||
|
default - optional argument. If supplied, will use this if
|
||||||
|
name cannot be found. If not supplied, decoded will raise a
|
||||||
|
KeyError if name cannot be found.
|
||||||
|
|
||||||
|
Returns python object (such as unicode string, datetime, etc).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_inline(name, decode=1):
|
||||||
|
"""Get list of INLINE values from property.
|
||||||
|
|
||||||
|
name - case insensitive name
|
||||||
|
decode - decode to Python objects.
|
||||||
|
|
||||||
|
Returns list of python objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def as_string():
|
||||||
|
"""Render the component in the RFC 2445 (iCalendar) format.
|
||||||
|
|
||||||
|
Returns a string in RFC 2445 format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
subcomponents = Attribute("""
|
||||||
|
A list of all subcomponents of this component,
|
||||||
|
added using add_component()""")
|
||||||
|
|
||||||
|
name = Attribute("""
|
||||||
|
Name of this component (VEVENT, etc)
|
||||||
|
""")
|
||||||
|
|
||||||
|
def walk(name=None):
|
||||||
|
"""Recursively traverses component and subcomponents.
|
||||||
|
|
||||||
|
name - optional, if given, only return components with that name
|
||||||
|
|
||||||
|
Returns sequence of components.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def property_items():
|
||||||
|
"""Return properties as (name, value) tuples.
|
||||||
|
|
||||||
|
Returns all properties in this comopnent and subcomponents as
|
||||||
|
name, value tuples.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IEvent(IComponent):
|
||||||
|
"""A component which conforms to an iCalendar VEVENT.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class ITodo(IComponent):
|
||||||
|
"""A component which conforms to an iCalendar VTODO.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IJournal(IComponent):
|
||||||
|
"""A component which conforms to an iCalendar VJOURNAL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IFreeBusy(IComponent):
|
||||||
|
"""A component which conforms to an iCalendar VFREEBUSY.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class ITimezone(IComponent):
|
||||||
|
"""A component which conforms to an iCalendar VTIMEZONE.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IAlarm(IComponent):
|
||||||
|
"""A component which conforms to an iCalendar VALARM.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class ICalendar(IComponent):
|
||||||
|
"""A component which conforms to an iCalendar VCALENDAR.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IPropertyValue(Interface):
|
||||||
|
"""An iCalendar property value.
|
||||||
|
iCalendar properties have strongly typed values.
|
||||||
|
|
||||||
|
This invariance should always be true:
|
||||||
|
|
||||||
|
assert x == vDataType.from_ical(vDataType(x).ical())
|
||||||
|
"""
|
||||||
|
|
||||||
|
def ical():
|
||||||
|
"""Render property as string, as defined in iCalendar RFC 2445.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# this is a static method
|
||||||
|
def from_ical(ical):
|
||||||
|
"""Parse property from iCalendar RFC 2445 text.
|
||||||
|
|
||||||
|
Inverse of ical().
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IBinary(IPropertyValue):
|
||||||
|
"""Binary property values are base 64 encoded
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IBoolean(IPropertyValue):
|
||||||
|
"""Boolean property.
|
||||||
|
|
||||||
|
Also behaves like a python int.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class ICalAddress(IPropertyValue):
|
||||||
|
"""Email address.
|
||||||
|
|
||||||
|
Also behaves like a python str.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IDateTime(IPropertyValue):
|
||||||
|
"""Render and generates iCalendar datetime format.
|
||||||
|
|
||||||
|
Important: if tzinfo is defined it renders itself as 'date with utc time'
|
||||||
|
Meaning that it has a 'Z' appended, and is in absolute time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IDate(IPropertyValue):
|
||||||
|
"""Render and generates iCalendar date format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IDuration(IPropertyValue):
|
||||||
|
"""Render and generates timedelta in iCalendar DURATION format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IFloat(IPropertyValue):
|
||||||
|
"""Render and generate floats in iCalendar format.
|
||||||
|
|
||||||
|
Also behaves like a python float.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IInt(IPropertyValue):
|
||||||
|
"""Render and generate ints in iCalendar format.
|
||||||
|
|
||||||
|
Also behaves like a python int.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IPeriod(IPropertyValue):
|
||||||
|
"""A precise period of time (datetime, datetime).
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IWeekDay(IPropertyValue):
|
||||||
|
"""Render and generate weekday abbreviation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IFrequency(IPropertyValue):
|
||||||
|
"""Frequency.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IRecur(IPropertyValue):
|
||||||
|
"""Render and generate data based on recurrent event representation.
|
||||||
|
|
||||||
|
This acts like a caseless dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IText(IPropertyValue):
|
||||||
|
"""Unicode text.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class ITime(IPropertyValue):
|
||||||
|
"""Time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IUri(IPropertyValue):
|
||||||
|
"""URI
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IGeo(IPropertyValue):
|
||||||
|
"""Geographical location.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IUTCOffset(IPropertyValue):
|
||||||
|
"""Offset from UTC.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IInline(IPropertyValue):
|
||||||
|
"""Inline list.
|
||||||
|
"""
|
522
Webcal/icalendar/parser.py
Normal file
522
Webcal/icalendar/parser.py
Normal file
@ -0,0 +1,522 @@
|
|||||||
|
# -*- coding: latin-1 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
This module parses and generates contentlines as defined in RFC 2445
|
||||||
|
(iCalendar), but will probably work for other MIME types with similar syntax.
|
||||||
|
Eg. RFC 2426 (vCard)
|
||||||
|
|
||||||
|
It is stupid in the sense that it treats the content purely as strings. No type
|
||||||
|
conversion is attempted.
|
||||||
|
|
||||||
|
Copyright, 2005: Max M <maxm@mxm.dk>
|
||||||
|
License: GPL (Just contact med if and why you would like it changed)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# from python
|
||||||
|
from types import TupleType, ListType
|
||||||
|
SequenceTypes = [TupleType, ListType]
|
||||||
|
import re
|
||||||
|
# from this package
|
||||||
|
from icalendar.caselessdict import CaselessDict
|
||||||
|
|
||||||
|
|
||||||
|
#################################################################
|
||||||
|
# Property parameter stuff
|
||||||
|
|
||||||
|
def paramVal(val):
|
||||||
|
"Returns a parameter value"
|
||||||
|
if type(val) in SequenceTypes:
|
||||||
|
return q_join(val)
|
||||||
|
return dQuote(val)
|
||||||
|
|
||||||
|
# Could be improved
|
||||||
|
NAME = re.compile('[\w-]+')
|
||||||
|
UNSAFE_CHAR = re.compile('[\x00-\x08\x0a-\x1f\x7F",:;]')
|
||||||
|
QUNSAFE_CHAR = re.compile('[\x00-\x08\x0a-\x1f\x7F"]')
|
||||||
|
FOLD = re.compile('([\r]?\n)+[ \t]{1}')
|
||||||
|
|
||||||
|
def validate_token(name):
|
||||||
|
match = NAME.findall(name)
|
||||||
|
if len(match) == 1 and name == match[0]:
|
||||||
|
return
|
||||||
|
raise ValueError, name
|
||||||
|
|
||||||
|
def validate_param_value(value, quoted=True):
|
||||||
|
validator = UNSAFE_CHAR
|
||||||
|
if quoted:
|
||||||
|
validator = QUNSAFE_CHAR
|
||||||
|
if validator.findall(value):
|
||||||
|
raise ValueError, value
|
||||||
|
|
||||||
|
QUOTABLE = re.compile('[,;:].')
|
||||||
|
def dQuote(val):
|
||||||
|
"""
|
||||||
|
Parameter values containing [,;:] must be double quoted
|
||||||
|
>>> dQuote('Max')
|
||||||
|
'Max'
|
||||||
|
>>> dQuote('Rasmussen, Max')
|
||||||
|
'"Rasmussen, Max"'
|
||||||
|
>>> dQuote('name:value')
|
||||||
|
'"name:value"'
|
||||||
|
"""
|
||||||
|
if QUOTABLE.search(val):
|
||||||
|
return '"%s"' % val
|
||||||
|
return val
|
||||||
|
|
||||||
|
# parsing helper
|
||||||
|
def q_split(st, sep=','):
|
||||||
|
"""
|
||||||
|
Splits a string on char, taking double (q)uotes into considderation
|
||||||
|
>>> q_split('Max,Moller,"Rasmussen, Max"')
|
||||||
|
['Max', 'Moller', '"Rasmussen, Max"']
|
||||||
|
"""
|
||||||
|
result = []
|
||||||
|
cursor = 0
|
||||||
|
length = len(st)
|
||||||
|
inquote = 0
|
||||||
|
for i in range(length):
|
||||||
|
ch = st[i]
|
||||||
|
if ch == '"':
|
||||||
|
inquote = not inquote
|
||||||
|
if not inquote and ch == sep:
|
||||||
|
result.append(st[cursor:i])
|
||||||
|
cursor = i + 1
|
||||||
|
if i + 1 == length:
|
||||||
|
result.append(st[cursor:])
|
||||||
|
return result
|
||||||
|
|
||||||
|
def q_join(lst, sep=','):
|
||||||
|
"""
|
||||||
|
Joins a list on sep, quoting strings with QUOTABLE chars
|
||||||
|
>>> s = ['Max', 'Moller', 'Rasmussen, Max']
|
||||||
|
>>> q_join(s)
|
||||||
|
'Max,Moller,"Rasmussen, Max"'
|
||||||
|
"""
|
||||||
|
return sep.join([dQuote(itm) for itm in lst])
|
||||||
|
|
||||||
|
class Parameters(CaselessDict):
|
||||||
|
"""
|
||||||
|
Parser and generator of Property parameter strings. It knows nothing of
|
||||||
|
datatypes. It's main concern is textual structure.
|
||||||
|
|
||||||
|
|
||||||
|
Simple parameter:value pair
|
||||||
|
>>> p = Parameters(parameter1='Value1')
|
||||||
|
>>> str(p)
|
||||||
|
'PARAMETER1=Value1'
|
||||||
|
|
||||||
|
|
||||||
|
keys are converted to upper
|
||||||
|
>>> p.keys()
|
||||||
|
['PARAMETER1']
|
||||||
|
|
||||||
|
|
||||||
|
Parameters are case insensitive
|
||||||
|
>>> p['parameter1']
|
||||||
|
'Value1'
|
||||||
|
>>> p['PARAMETER1']
|
||||||
|
'Value1'
|
||||||
|
|
||||||
|
|
||||||
|
Parameter with list of values must be seperated by comma
|
||||||
|
>>> p = Parameters({'parameter1':['Value1', 'Value2']})
|
||||||
|
>>> str(p)
|
||||||
|
'PARAMETER1=Value1,Value2'
|
||||||
|
|
||||||
|
|
||||||
|
Multiple parameters must be seperated by a semicolon
|
||||||
|
>>> p = Parameters({'RSVP':'TRUE', 'ROLE':'REQ-PARTICIPANT'})
|
||||||
|
>>> str(p)
|
||||||
|
'ROLE=REQ-PARTICIPANT;RSVP=TRUE'
|
||||||
|
|
||||||
|
|
||||||
|
Parameter values containing ',;:' must be double quoted
|
||||||
|
>>> p = Parameters({'ALTREP':'http://www.wiz.org'})
|
||||||
|
>>> str(p)
|
||||||
|
'ALTREP="http://www.wiz.org"'
|
||||||
|
|
||||||
|
|
||||||
|
list items must be quoted seperately
|
||||||
|
>>> p = Parameters({'MEMBER':['MAILTO:projectA@host.com', 'MAILTO:projectB@host.com', ]})
|
||||||
|
>>> str(p)
|
||||||
|
'MEMBER="MAILTO:projectA@host.com","MAILTO:projectB@host.com"'
|
||||||
|
|
||||||
|
Now the whole sheebang
|
||||||
|
>>> p = Parameters({'parameter1':'Value1', 'parameter2':['Value2', 'Value3'],\
|
||||||
|
'ALTREP':['http://www.wiz.org', 'value4']})
|
||||||
|
>>> str(p)
|
||||||
|
'ALTREP="http://www.wiz.org",value4;PARAMETER1=Value1;PARAMETER2=Value2,Value3'
|
||||||
|
|
||||||
|
We can also parse parameter strings
|
||||||
|
>>> Parameters.from_string('PARAMETER1=Value 1;param2=Value 2')
|
||||||
|
Parameters({'PARAMETER1': 'Value 1', 'PARAM2': 'Value 2'})
|
||||||
|
|
||||||
|
Including empty strings
|
||||||
|
>>> Parameters.from_string('param=')
|
||||||
|
Parameters({'PARAM': ''})
|
||||||
|
|
||||||
|
We can also parse parameter strings
|
||||||
|
>>> Parameters.from_string('MEMBER="MAILTO:projectA@host.com","MAILTO:projectB@host.com"')
|
||||||
|
Parameters({'MEMBER': ['MAILTO:projectA@host.com', 'MAILTO:projectB@host.com']})
|
||||||
|
|
||||||
|
We can also parse parameter strings
|
||||||
|
>>> Parameters.from_string('ALTREP="http://www.wiz.org",value4;PARAMETER1=Value1;PARAMETER2=Value2,Value3')
|
||||||
|
Parameters({'PARAMETER1': 'Value1', 'ALTREP': ['http://www.wiz.org', 'value4'], 'PARAMETER2': ['Value2', 'Value3']})
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def params(self):
|
||||||
|
"""
|
||||||
|
in rfc2445 keys are called parameters, so this is to be consitent with
|
||||||
|
the naming conventions
|
||||||
|
"""
|
||||||
|
return self.keys()
|
||||||
|
|
||||||
|
### Later, when I get more time... need to finish this off now. The last majot thing missing.
|
||||||
|
### def _encode(self, name, value, cond=1):
|
||||||
|
### # internal, for conditional convertion of values.
|
||||||
|
### if cond:
|
||||||
|
### klass = types_factory.for_property(name)
|
||||||
|
### return klass(value)
|
||||||
|
### return value
|
||||||
|
###
|
||||||
|
### def add(self, name, value, encode=0):
|
||||||
|
### "Add a parameter value and optionally encode it."
|
||||||
|
### if encode:
|
||||||
|
### value = self._encode(name, value, encode)
|
||||||
|
### self[name] = value
|
||||||
|
###
|
||||||
|
### def decoded(self, name):
|
||||||
|
### "returns a decoded value, or list of same"
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'Parameters(' + dict.__repr__(self) + ')'
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
result = []
|
||||||
|
items = self.items()
|
||||||
|
items.sort() # To make doctests work
|
||||||
|
for key, value in items:
|
||||||
|
value = paramVal(value)
|
||||||
|
result.append('%s=%s' % (key.upper(), value))
|
||||||
|
return ';'.join(result)
|
||||||
|
|
||||||
|
|
||||||
|
def from_string(st, strict=False):
|
||||||
|
"Parses the parameter format from ical text format"
|
||||||
|
try:
|
||||||
|
# parse into strings
|
||||||
|
result = Parameters()
|
||||||
|
for param in q_split(st, ';'):
|
||||||
|
key, val = q_split(param, '=')
|
||||||
|
validate_token(key)
|
||||||
|
param_values = [v for v in q_split(val, ',')]
|
||||||
|
# Property parameter values that are not in quoted
|
||||||
|
# strings are case insensitive.
|
||||||
|
vals = []
|
||||||
|
for v in param_values:
|
||||||
|
if v.startswith('"') and v.endswith('"'):
|
||||||
|
v = v.strip('"')
|
||||||
|
validate_param_value(v, quoted=True)
|
||||||
|
vals.append(v)
|
||||||
|
else:
|
||||||
|
validate_param_value(v, quoted=False)
|
||||||
|
if strict:
|
||||||
|
vals.append(v.upper())
|
||||||
|
else:
|
||||||
|
vals.append(v)
|
||||||
|
if not vals:
|
||||||
|
result[key] = val
|
||||||
|
else:
|
||||||
|
if len(vals) == 1:
|
||||||
|
result[key] = vals[0]
|
||||||
|
else:
|
||||||
|
result[key] = vals
|
||||||
|
return result
|
||||||
|
except:
|
||||||
|
raise ValueError, 'Not a valid parameter string'
|
||||||
|
from_string = staticmethod(from_string)
|
||||||
|
|
||||||
|
|
||||||
|
#########################################
|
||||||
|
# parsing and generation of content lines
|
||||||
|
|
||||||
|
class Contentline(str):
|
||||||
|
"""
|
||||||
|
A content line is basically a string that can be folded and parsed into
|
||||||
|
parts.
|
||||||
|
|
||||||
|
>>> c = Contentline('Si meliora dies, ut vina, poemata reddit')
|
||||||
|
>>> str(c)
|
||||||
|
'Si meliora dies, ut vina, poemata reddit'
|
||||||
|
|
||||||
|
A long line gets folded
|
||||||
|
>>> c = Contentline(''.join(['123456789 ']*10))
|
||||||
|
>>> str(c)
|
||||||
|
'123456789 123456789 123456789 123456789 123456789 123456789 123456789 1234\\r\\n 56789 123456789 123456789 '
|
||||||
|
|
||||||
|
A folded line gets unfolded
|
||||||
|
>>> c = Contentline.from_string(str(c))
|
||||||
|
>>> c
|
||||||
|
'123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 '
|
||||||
|
|
||||||
|
We do not fold within a UTF-8 character:
|
||||||
|
>>> c = Contentline('This line has a UTF-8 character where it should be folded. Make sure it g\xc3\xabts folded before that character.')
|
||||||
|
>>> '\xc3\xab' in str(c)
|
||||||
|
True
|
||||||
|
|
||||||
|
Don't fail if we fold a line that is exactly X times 74 characters long:
|
||||||
|
>>> c = str(Contentline(''.join(['x']*148)))
|
||||||
|
|
||||||
|
It can parse itself into parts. Which is a tuple of (name, params, vals)
|
||||||
|
|
||||||
|
>>> c = Contentline('dtstart:20050101T120000')
|
||||||
|
>>> c.parts()
|
||||||
|
('dtstart', Parameters({}), '20050101T120000')
|
||||||
|
|
||||||
|
>>> c = Contentline('dtstart;value=datetime:20050101T120000')
|
||||||
|
>>> c.parts()
|
||||||
|
('dtstart', Parameters({'VALUE': 'datetime'}), '20050101T120000')
|
||||||
|
|
||||||
|
>>> c = Contentline('ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:MAILTO:maxm@example.com')
|
||||||
|
>>> c.parts()
|
||||||
|
('ATTENDEE', Parameters({'ROLE': 'REQ-PARTICIPANT', 'CN': 'Max Rasmussen'}), 'MAILTO:maxm@example.com')
|
||||||
|
>>> str(c)
|
||||||
|
'ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:MAILTO:maxm@example.com'
|
||||||
|
|
||||||
|
and back again
|
||||||
|
>>> parts = ('ATTENDEE', Parameters({'ROLE': 'REQ-PARTICIPANT', 'CN': 'Max Rasmussen'}), 'MAILTO:maxm@example.com')
|
||||||
|
>>> Contentline.from_parts(parts)
|
||||||
|
'ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:MAILTO:maxm@example.com'
|
||||||
|
|
||||||
|
and again
|
||||||
|
>>> parts = ('ATTENDEE', Parameters(), 'MAILTO:maxm@example.com')
|
||||||
|
>>> Contentline.from_parts(parts)
|
||||||
|
'ATTENDEE:MAILTO:maxm@example.com'
|
||||||
|
|
||||||
|
A value can also be any of the types defined in PropertyValues
|
||||||
|
>>> from icalendar.prop import vText
|
||||||
|
>>> parts = ('ATTENDEE', Parameters(), vText('MAILTO:test@example.com'))
|
||||||
|
>>> Contentline.from_parts(parts)
|
||||||
|
'ATTENDEE:MAILTO:test@example.com'
|
||||||
|
|
||||||
|
A value can also be unicode
|
||||||
|
>>> from icalendar.prop import vText
|
||||||
|
>>> parts = ('SUMMARY', Parameters(), vText(u'INternational char <20> <20> <20>'))
|
||||||
|
>>> Contentline.from_parts(parts)
|
||||||
|
'SUMMARY:INternational char \\xc3\\xa6 \\xc3\\xb8 \\xc3\\xa5'
|
||||||
|
|
||||||
|
Traversing could look like this.
|
||||||
|
>>> name, params, vals = c.parts()
|
||||||
|
>>> name
|
||||||
|
'ATTENDEE'
|
||||||
|
>>> vals
|
||||||
|
'MAILTO:maxm@example.com'
|
||||||
|
>>> for key, val in params.items():
|
||||||
|
... (key, val)
|
||||||
|
('ROLE', 'REQ-PARTICIPANT')
|
||||||
|
('CN', 'Max Rasmussen')
|
||||||
|
|
||||||
|
And the traditional failure
|
||||||
|
>>> c = Contentline('ATTENDEE;maxm@example.com')
|
||||||
|
>>> c.parts()
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValueError: Content line could not be parsed into parts
|
||||||
|
|
||||||
|
Another failure:
|
||||||
|
>>> c = Contentline(':maxm@example.com')
|
||||||
|
>>> c.parts()
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValueError: Content line could not be parsed into parts
|
||||||
|
|
||||||
|
>>> c = Contentline('key;param=:value')
|
||||||
|
>>> c.parts()
|
||||||
|
('key', Parameters({'PARAM': ''}), 'value')
|
||||||
|
|
||||||
|
>>> c = Contentline('key;param="pvalue":value')
|
||||||
|
>>> c.parts()
|
||||||
|
('key', Parameters({'PARAM': 'pvalue'}), 'value')
|
||||||
|
|
||||||
|
Should bomb on missing param:
|
||||||
|
>>> c = Contentline.from_string("k;:no param")
|
||||||
|
>>> c.parts()
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValueError: Content line could not be parsed into parts
|
||||||
|
|
||||||
|
>>> c = Contentline('key;param=pvalue:value', strict=False)
|
||||||
|
>>> c.parts()
|
||||||
|
('key', Parameters({'PARAM': 'pvalue'}), 'value')
|
||||||
|
|
||||||
|
If strict is set to True, uppercase param values that are not
|
||||||
|
double-quoted, this is because the spec says non-quoted params are
|
||||||
|
case-insensitive.
|
||||||
|
|
||||||
|
>>> c = Contentline('key;param=pvalue:value', strict=True)
|
||||||
|
>>> c.parts()
|
||||||
|
('key', Parameters({'PARAM': 'PVALUE'}), 'value')
|
||||||
|
|
||||||
|
>>> c = Contentline('key;param="pValue":value', strict=True)
|
||||||
|
>>> c.parts()
|
||||||
|
('key', Parameters({'PARAM': 'pValue'}), 'value')
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(cls, st, strict=False):
|
||||||
|
self = str.__new__(cls, st)
|
||||||
|
setattr(self, 'strict', strict)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def from_parts(parts):
|
||||||
|
"Turns a tuple of parts into a content line"
|
||||||
|
(name, params, values) = [str(p) for p in parts]
|
||||||
|
try:
|
||||||
|
if params:
|
||||||
|
return Contentline('%s;%s:%s' % (name, params, values))
|
||||||
|
return Contentline('%s:%s' % (name, values))
|
||||||
|
except:
|
||||||
|
raise ValueError(
|
||||||
|
'Property: %s Wrong values "%s" or "%s"' % (repr(name),
|
||||||
|
repr(params),
|
||||||
|
repr(values)))
|
||||||
|
from_parts = staticmethod(from_parts)
|
||||||
|
|
||||||
|
def parts(self):
|
||||||
|
""" Splits the content line up into (name, parameters, values) parts
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
name_split = None
|
||||||
|
value_split = None
|
||||||
|
inquotes = 0
|
||||||
|
for i in range(len(self)):
|
||||||
|
ch = self[i]
|
||||||
|
if not inquotes:
|
||||||
|
if ch in ':;' and not name_split:
|
||||||
|
name_split = i
|
||||||
|
if ch == ':' and not value_split:
|
||||||
|
value_split = i
|
||||||
|
if ch == '"':
|
||||||
|
inquotes = not inquotes
|
||||||
|
name = self[:name_split]
|
||||||
|
if not name:
|
||||||
|
raise ValueError, 'Key name is required'
|
||||||
|
validate_token(name)
|
||||||
|
if name_split+1 == value_split:
|
||||||
|
raise ValueError, 'Invalid content line'
|
||||||
|
params = Parameters.from_string(self[name_split+1:value_split],
|
||||||
|
strict=self.strict)
|
||||||
|
values = self[value_split+1:]
|
||||||
|
return (name, params, values)
|
||||||
|
except:
|
||||||
|
raise ValueError, 'Content line could not be parsed into parts'
|
||||||
|
|
||||||
|
def from_string(st, strict=False):
|
||||||
|
"Unfolds the content lines in an iCalendar into long content lines"
|
||||||
|
try:
|
||||||
|
# a fold is carriage return followed by either a space or a tab
|
||||||
|
return Contentline(FOLD.sub('', st), strict=strict)
|
||||||
|
except:
|
||||||
|
raise ValueError, 'Expected StringType with content line'
|
||||||
|
from_string = staticmethod(from_string)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"Long content lines are folded so they are less than 75 characters wide"
|
||||||
|
l_line = len(self)
|
||||||
|
new_lines = []
|
||||||
|
start = 0
|
||||||
|
end = 74
|
||||||
|
while True:
|
||||||
|
if end >= l_line:
|
||||||
|
end = l_line
|
||||||
|
else:
|
||||||
|
# Check that we don't fold in the middle of a UTF-8 character:
|
||||||
|
# http://lists.osafoundation.org/pipermail/ietf-calsify/2006-August/001126.html
|
||||||
|
while True:
|
||||||
|
char_value = ord(self[end])
|
||||||
|
if char_value < 128 or char_value >= 192:
|
||||||
|
# This is not in the middle of a UTF-8 character, so we
|
||||||
|
# can fold here:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
end -= 1
|
||||||
|
|
||||||
|
new_lines.append(self[start:end])
|
||||||
|
if end == l_line:
|
||||||
|
# Done
|
||||||
|
break
|
||||||
|
start = end
|
||||||
|
end = start + 74
|
||||||
|
return '\r\n '.join(new_lines)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Contentlines(list):
|
||||||
|
"""
|
||||||
|
I assume that iCalendar files generally are a few kilobytes in size. Then
|
||||||
|
this should be efficient. for Huge files, an iterator should probably be
|
||||||
|
used instead.
|
||||||
|
|
||||||
|
>>> c = Contentlines([Contentline('BEGIN:VEVENT\\r\\n')])
|
||||||
|
>>> str(c)
|
||||||
|
'BEGIN:VEVENT\\r\\n'
|
||||||
|
|
||||||
|
Lets try appending it with a 100 charater wide string
|
||||||
|
>>> c.append(Contentline(''.join(['123456789 ']*10)+'\\r\\n'))
|
||||||
|
>>> str(c)
|
||||||
|
'BEGIN:VEVENT\\r\\n\\r\\n123456789 123456789 123456789 123456789 123456789 123456789 123456789 1234\\r\\n 56789 123456789 123456789 \\r\\n'
|
||||||
|
|
||||||
|
Notice that there is an extra empty string in the end of the content lines.
|
||||||
|
That is so they can be easily joined with: '\r\n'.join(contentlines)).
|
||||||
|
>>> Contentlines.from_string('A short line\\r\\n')
|
||||||
|
['A short line', '']
|
||||||
|
>>> Contentlines.from_string('A faked\\r\\n long line\\r\\n')
|
||||||
|
['A faked long line', '']
|
||||||
|
>>> Contentlines.from_string('A faked\\r\\n long line\\r\\nAnd another lin\\r\\n\\te that is folded\\r\\n')
|
||||||
|
['A faked long line', 'And another line that is folded', '']
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"Simply join self."
|
||||||
|
return '\r\n'.join(map(str, self))
|
||||||
|
|
||||||
|
def from_string(st):
|
||||||
|
"Parses a string into content lines"
|
||||||
|
try:
|
||||||
|
# a fold is carriage return followed by either a space or a tab
|
||||||
|
unfolded = FOLD.sub('', st)
|
||||||
|
lines = [Contentline(line) for line in unfolded.splitlines() if line]
|
||||||
|
lines.append('') # we need a '\r\n' in the end of every content line
|
||||||
|
return Contentlines(lines)
|
||||||
|
except:
|
||||||
|
raise ValueError, 'Expected StringType with content lines'
|
||||||
|
from_string = staticmethod(from_string)
|
||||||
|
|
||||||
|
|
||||||
|
# ran this:
|
||||||
|
# sample = open('./samples/test.ics', 'rb').read() # binary file in windows!
|
||||||
|
# lines = Contentlines.from_string(sample)
|
||||||
|
# for line in lines[:-1]:
|
||||||
|
# print line.parts()
|
||||||
|
|
||||||
|
# got this:
|
||||||
|
#('BEGIN', Parameters({}), 'VCALENDAR')
|
||||||
|
#('METHOD', Parameters({}), 'Request')
|
||||||
|
#('PRODID', Parameters({}), '-//My product//mxm.dk/')
|
||||||
|
#('VERSION', Parameters({}), '2.0')
|
||||||
|
#('BEGIN', Parameters({}), 'VEVENT')
|
||||||
|
#('DESCRIPTION', Parameters({}), 'This is a very long description that ...')
|
||||||
|
#('PARTICIPANT', Parameters({'CN': 'Max M'}), 'MAILTO:maxm@mxm.dk')
|
||||||
|
#('DTEND', Parameters({}), '20050107T160000')
|
||||||
|
#('DTSTART', Parameters({}), '20050107T120000')
|
||||||
|
#('SUMMARY', Parameters({}), 'A second event')
|
||||||
|
#('END', Parameters({}), 'VEVENT')
|
||||||
|
#('BEGIN', Parameters({}), 'VEVENT')
|
||||||
|
#('DTEND', Parameters({}), '20050108T235900')
|
||||||
|
#('DTSTART', Parameters({}), '20050108T230000')
|
||||||
|
#('SUMMARY', Parameters({}), 'A single event')
|
||||||
|
#('UID', Parameters({}), '42')
|
||||||
|
#('END', Parameters({}), 'VEVENT')
|
||||||
|
#('END', Parameters({}), 'VCALENDAR')
|
1513
Webcal/icalendar/prop.py
Normal file
1513
Webcal/icalendar/prop.py
Normal file
File diff suppressed because it is too large
Load Diff
1
Webcal/icalendar/tests/__init__.py
Normal file
1
Webcal/icalendar/tests/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# this is a package
|
16
Webcal/icalendar/tests/test_icalendar.py
Normal file
16
Webcal/icalendar/tests/test_icalendar.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import unittest, doctest, os
|
||||||
|
from icalendar import cal, caselessdict, parser, prop
|
||||||
|
|
||||||
|
def test_suite():
|
||||||
|
suite = unittest.TestSuite()
|
||||||
|
|
||||||
|
suite.addTest(doctest.DocTestSuite(caselessdict))
|
||||||
|
suite.addTest(doctest.DocTestSuite(parser))
|
||||||
|
suite.addTest(doctest.DocTestSuite(prop))
|
||||||
|
suite.addTest(doctest.DocTestSuite(cal))
|
||||||
|
doc_dir = '../../../doc'
|
||||||
|
for docfile in ['example.txt', 'groupscheduled.txt',
|
||||||
|
'small.txt', 'multiple.txt', 'recurrence.txt']:
|
||||||
|
suite.addTest(doctest.DocFileSuite(os.path.join(doc_dir, docfile),
|
||||||
|
optionflags=doctest.ELLIPSIS),)
|
||||||
|
return suite
|
53
Webcal/icalendar/tools.py
Normal file
53
Webcal/icalendar/tools.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from string import ascii_letters, digits
|
||||||
|
import random
|
||||||
|
|
||||||
|
"""
|
||||||
|
This module contains non-essential tools for iCalendar. Pretty thin so far eh?
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
class UIDGenerator:
|
||||||
|
|
||||||
|
"""
|
||||||
|
If you are too lazy to create real uid's. Notice, this doctest is disabled!
|
||||||
|
|
||||||
|
Automatic semi-random uid
|
||||||
|
>> g = UIDGenerator()
|
||||||
|
>> uid = g.uid()
|
||||||
|
>> uid.ical()
|
||||||
|
'20050109T153222-7ekDDHKcw46QlwZK@example.com'
|
||||||
|
|
||||||
|
You Should at least insert your own hostname to be more complient
|
||||||
|
>> g = UIDGenerator()
|
||||||
|
>> uid = g.uid('Example.ORG')
|
||||||
|
>> uid.ical()
|
||||||
|
'20050109T153549-NbUItOPDjQj8Ux6q@Example.ORG'
|
||||||
|
|
||||||
|
You can also insert a path or similar
|
||||||
|
>> g = UIDGenerator()
|
||||||
|
>> uid = g.uid('Example.ORG', '/path/to/content')
|
||||||
|
>> uid.ical()
|
||||||
|
'20050109T153415-/path/to/content@Example.ORG'
|
||||||
|
"""
|
||||||
|
|
||||||
|
chars = list(ascii_letters + digits)
|
||||||
|
|
||||||
|
def rnd_string(self, length=16):
|
||||||
|
"Generates a string with random characters of length"
|
||||||
|
return ''.join([random.choice(self.chars) for i in range(length)])
|
||||||
|
|
||||||
|
def uid(self, host_name='example.com', unique=''):
|
||||||
|
"""
|
||||||
|
Generates a unique id consisting of:
|
||||||
|
datetime-uniquevalue@host. Like:
|
||||||
|
20050105T225746Z-HKtJMqUgdO0jDUwm@example.com
|
||||||
|
"""
|
||||||
|
from PropertyValues import vText, vDatetime
|
||||||
|
unique = unique or self.rnd_string()
|
||||||
|
return vText('%s-%s@%s' % (vDatetime.today().ical(), unique, host_name))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import os.path, doctest, tools
|
||||||
|
# import and test this file
|
||||||
|
doctest.testmod(tools)
|
50
Webcal/icalendar/util.py
Normal file
50
Webcal/icalendar/util.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from string import ascii_letters, digits
|
||||||
|
import random
|
||||||
|
|
||||||
|
"""
|
||||||
|
This module contains non-essential tools for iCalendar. Pretty thin so far eh?
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
class UIDGenerator:
|
||||||
|
|
||||||
|
"""
|
||||||
|
If you are too lazy to create real uids.
|
||||||
|
|
||||||
|
NOTE: this doctest is disabled
|
||||||
|
(only two > instead of three)
|
||||||
|
|
||||||
|
Automatic semi-random uid
|
||||||
|
>> g = UIDGenerator()
|
||||||
|
>> uid = g.uid()
|
||||||
|
>> uid.ical()
|
||||||
|
'20050109T153222-7ekDDHKcw46QlwZK@example.com'
|
||||||
|
|
||||||
|
You should at least insert your own hostname to be more compliant
|
||||||
|
>> g = UIDGenerator()
|
||||||
|
>> uid = g.uid('Example.ORG')
|
||||||
|
>> uid.ical()
|
||||||
|
'20050109T153549-NbUItOPDjQj8Ux6q@Example.ORG'
|
||||||
|
|
||||||
|
You can also insert a path or similar
|
||||||
|
>> g = UIDGenerator()
|
||||||
|
>> uid = g.uid('Example.ORG', '/path/to/content')
|
||||||
|
>> uid.ical()
|
||||||
|
'20050109T153415-/path/to/content@Example.ORG'
|
||||||
|
"""
|
||||||
|
|
||||||
|
chars = list(ascii_letters + digits)
|
||||||
|
|
||||||
|
def rnd_string(self, length=16):
|
||||||
|
"Generates a string with random characters of length"
|
||||||
|
return ''.join([random.choice(self.chars) for i in range(length)])
|
||||||
|
|
||||||
|
def uid(self, host_name='example.com', unique=''):
|
||||||
|
"""
|
||||||
|
Generates a unique id consisting of:
|
||||||
|
datetime-uniquevalue@host. Like:
|
||||||
|
20050105T225746Z-HKtJMqUgdO0jDUwm@example.com
|
||||||
|
"""
|
||||||
|
from PropertyValues import vText, vDatetime
|
||||||
|
unique = unique or self.rnd_string()
|
||||||
|
return vText('%s-%s@%s' % (vDatetime.today().ical(), unique, host_name))
|
@ -19,11 +19,14 @@ import supybot.ircutils as ircutils
|
|||||||
import supybot.callbacks as callbacks
|
import supybot.callbacks as callbacks
|
||||||
import supybot.schedule as schedule
|
import supybot.schedule as schedule
|
||||||
import supybot.ircmsgs as ircmsgs
|
import supybot.ircmsgs as ircmsgs
|
||||||
|
import supybot.ircdb as ircdb
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import pytz
|
|
||||||
import ical
|
try:
|
||||||
import datetime, shelve, re
|
import supybot.plugin as plugin
|
||||||
import cPickle as pickle
|
LoggerWrapper = plugin.loadPluginModule("IRCLog", False).LoggerWrapper
|
||||||
|
except Exception, e:
|
||||||
|
def LoggerWrapper(self): return self.log
|
||||||
|
|
||||||
def checkIgnored(hostmask, recipient='', users=ircdb.users, channels=ircdb.channels):
|
def checkIgnored(hostmask, recipient='', users=ircdb.users, channels=ircdb.channels):
|
||||||
if ircdb.ignores.checkIgnored(hostmask):
|
if ircdb.ignores.checkIgnored(hostmask):
|
||||||
@ -58,25 +61,38 @@ def checkIgnored(hostmask, recipient='', users=ircdb.users, channels=ircdb.chann
|
|||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
import pytz
|
||||||
|
import ical
|
||||||
|
reload(ical)
|
||||||
|
import datetime, shelve, re
|
||||||
|
import cPickle as pickle
|
||||||
|
|
||||||
class Webcal(callbacks.Plugin):
|
class Webcal(callbacks.Plugin):
|
||||||
"""@schedule <timezone>: display the schedule in your timezone"""
|
"""@schedule <timezone>
|
||||||
|
display the schedule in your timezone
|
||||||
|
"""
|
||||||
threaded = True
|
threaded = True
|
||||||
|
noIgnore = True
|
||||||
|
|
||||||
def __init__(self, irc):
|
def __init__(self, irc):
|
||||||
callbacks.Privmsg.__init__(self, irc)
|
parent = super(Webcal, self)
|
||||||
|
parent.__init__(irc)
|
||||||
|
self.log = LoggerWrapper(self)
|
||||||
self.irc = irc
|
self.irc = irc
|
||||||
|
self.cache = {}
|
||||||
|
self.firstevent = {}
|
||||||
try:
|
try:
|
||||||
schedule.removeEvent(self.name())
|
schedule.removeEvent(self.name())
|
||||||
schedule.removeEvent(self.name() + 'b')
|
schedule.removeEvent(self.name() + 'b')
|
||||||
except Exception: # Oh well
|
except AssertionError:
|
||||||
|
pass
|
||||||
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
schedule.addPeriodicEvent(self.refresh_cache, 60 * 20, name=self.name())
|
schedule.addPeriodicEvent(self.refresh_cache, 60 * 20, name=self.name())
|
||||||
schedule.addPeriodicEvent(self.autotopics, 60, name=self.name() + 'b')
|
schedule.addPeriodicEvent(self.autotopics, 60, name=self.name() + 'b')
|
||||||
except Exception: # Just work
|
except AssertionError:
|
||||||
pass
|
pass
|
||||||
self.cache = {}
|
|
||||||
self.firstevent = {}
|
|
||||||
|
|
||||||
def die(self):
|
def die(self):
|
||||||
try:
|
try:
|
||||||
@ -116,22 +132,38 @@ class Webcal(callbacks.Plugin):
|
|||||||
def filter(self, events, channel):
|
def filter(self, events, channel):
|
||||||
now = datetime.datetime.now(pytz.UTC)
|
now = datetime.datetime.now(pytz.UTC)
|
||||||
fword = self.registryValue('filter', channel)
|
fword = self.registryValue('filter', channel)
|
||||||
return [x for x in events if fword.lower() in x.raw_data.lower() and x.seconds_ago() < 1800]
|
ret = [x for x in events if fword.lower() in x.raw_data.lower() and x.seconds_ago() < 1800]
|
||||||
|
ret = [x for x in ret if self.filterChannel(x)]
|
||||||
|
ret.sort()
|
||||||
|
ret.sort()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def filterChannel(self, event):
|
||||||
|
desc = event['description']
|
||||||
|
where = u'#ubuntu-meeting'
|
||||||
|
if "Location\\:" in desc:
|
||||||
|
where = desc.split('<')[1].split()[-1]
|
||||||
|
if where[0] != u'#':
|
||||||
|
where = u'#ubuntu-meeting'
|
||||||
|
return where == u'#ubuntu-meeting'
|
||||||
|
|
||||||
def maketopic(self, c, tz=None, template='%s', num_events=6):
|
def maketopic(self, c, tz=None, template='%s', num_events=6):
|
||||||
url = self.registryValue('url',c)
|
url = self.registryValue('url',c)
|
||||||
|
if not tz:
|
||||||
|
tz = 'UTC'
|
||||||
if url not in self.cache.keys():
|
if url not in self.cache.keys():
|
||||||
self.update(url)
|
self.update(url)
|
||||||
|
|
||||||
now = datetime.datetime.now(pytz.UTC)
|
now = datetime.datetime.now(pytz.UTC)
|
||||||
events = self.filter(self.cache[url],c)[:num_events]
|
events = self.filter(self.cache[url],c)[:num_events]
|
||||||
|
# events.sort()
|
||||||
preamble = ''
|
preamble = ''
|
||||||
if not len(events):
|
if not len(events):
|
||||||
return template % "No meetings scheduled"
|
return template % "No meetings scheduled"
|
||||||
# The standard slack of 30 minutes after the meeting will be an
|
# The standard slack of 30 minutes after the meeting will be an
|
||||||
# error if there are 2 conscutive meetings, so remove the first
|
# error if there are 2 conscutive meetings, so remove the first
|
||||||
# one in that case
|
# one in that case
|
||||||
if len(events) > 1 and events[1].startDate < now:
|
if len(events) > 1 and events[1].startTime() < now:
|
||||||
events = events[1:]
|
events = events[1:]
|
||||||
ev0 = events[0]
|
ev0 = events[0]
|
||||||
if ev0.seconds_to_go() < 600:
|
if ev0.seconds_to_go() < 600:
|
||||||
@ -149,6 +181,9 @@ class Webcal(callbacks.Plugin):
|
|||||||
|
|
||||||
# Now the commands
|
# Now the commands
|
||||||
def topic(self, irc, msg, args):
|
def topic(self, irc, msg, args):
|
||||||
|
"""No args
|
||||||
|
Updates the topics in the channel
|
||||||
|
"""
|
||||||
c = msg.args[0]
|
c = msg.args[0]
|
||||||
url = self.registryValue('url', c)
|
url = self.registryValue('url', c)
|
||||||
if not url or not self.registryValue('doTopic',channel=c):
|
if not url or not self.registryValue('doTopic',channel=c):
|
||||||
@ -156,14 +191,14 @@ class Webcal(callbacks.Plugin):
|
|||||||
self.update(url)
|
self.update(url)
|
||||||
|
|
||||||
events = self.filter(self.cache[url], c)
|
events = self.filter(self.cache[url], c)
|
||||||
if events[0].is_on():
|
if events and events[0].is_on():
|
||||||
irc.error("Won't update topic while a meeting is in progress")
|
irc.error("Won't update topic while a meeting is in progress")
|
||||||
return
|
return
|
||||||
|
|
||||||
newtopic = self.maketopic(c, template=self.registryValue('topic',c))
|
newtopic = self.maketopic(c, template=self.registryValue('topic',c))
|
||||||
if not (newtopic.strip() == irc.state.getTopic(c).strip()):
|
if not (newtopic.strip() == irc.state.getTopic(c).strip()):
|
||||||
irc.queueMsg(ircmsgs.topic(c, newtopic))
|
irc.queueMsg(ircmsgs.topic(c, newtopic))
|
||||||
topic = wrap(topic)
|
topic = wrap(topic, [('checkCapability', 'admin')])
|
||||||
|
|
||||||
def _tzfilter(self, tz, ud):
|
def _tzfilter(self, tz, ud):
|
||||||
if tz == ud:
|
if tz == ud:
|
||||||
@ -186,7 +221,9 @@ class Webcal(callbacks.Plugin):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def schedule(self, irc, msg, args, tz):
|
def schedule(self, irc, msg, args, tz):
|
||||||
""" Retrieve the date/time of scheduled meetings in a specific timezone """
|
"""[<timezone>]
|
||||||
|
Retrieve the date/time of scheduled meetings in a specific timezone, defaults to UTC
|
||||||
|
"""
|
||||||
if not tz:
|
if not tz:
|
||||||
tz = 'utc'
|
tz = 'utc'
|
||||||
if irc.isChannel(msg.args[0]):
|
if irc.isChannel(msg.args[0]):
|
||||||
@ -196,15 +233,18 @@ class Webcal(callbacks.Plugin):
|
|||||||
if not c:
|
if not c:
|
||||||
return
|
return
|
||||||
url = self.registryValue('url', c)
|
url = self.registryValue('url', c)
|
||||||
|
if not url:
|
||||||
|
c = self.registryValue('defaultChannel')
|
||||||
|
url = self.registryValue('url', c)
|
||||||
if not url:
|
if not url:
|
||||||
return
|
return
|
||||||
tzs = filter(lambda x: self._tzfilter(x.lower(),tz.lower()), pytz.all_timezones)
|
tzs = filter(lambda x: self._tzfilter(x.lower(),tz.lower()), pytz.all_timezones)
|
||||||
if not tzs:
|
if not tzs:
|
||||||
irc.error('Unknown timezone: %s - Full list: %s' % (tz, self.config.registryValue('tzUrl') or 'Value not set'))
|
irc.error('Unknown timezone: %s - Full list: %s' % (tz, self.registryValue('tzUrl') or 'Value not set'))
|
||||||
return
|
return
|
||||||
newtopic = self.maketopic(c,tz=tzs[0])
|
newtopic = self.maketopic(c,tz=tzs[0])
|
||||||
events = self.filter(self.cache[url], msg.args[0])
|
events = self.filter(self.cache[url], msg.args[0])
|
||||||
if events[0].is_on(): # FIXME channel filter
|
if events and events[0].is_on(): # FIXME channel filter
|
||||||
irc.error('Please don\'t use @schedule during a meeting')
|
irc.error('Please don\'t use @schedule during a meeting')
|
||||||
irc.reply('Schedule for %s: %s' % (tzs[0], newtopic), private=True)
|
irc.reply('Schedule for %s: %s' % (tzs[0], newtopic), private=True)
|
||||||
else:
|
else:
|
||||||
@ -212,7 +252,9 @@ class Webcal(callbacks.Plugin):
|
|||||||
schedule = wrap(schedule, [additional('text')])
|
schedule = wrap(schedule, [additional('text')])
|
||||||
|
|
||||||
def now(self, irc, msg, args, tz):
|
def now(self, irc, msg, args, tz):
|
||||||
""" Display the current time """
|
"""[<timezone>]
|
||||||
|
Display the current time, <timezone> defaults to UTC
|
||||||
|
"""
|
||||||
if not tz:
|
if not tz:
|
||||||
tz = 'utc'
|
tz = 'utc'
|
||||||
if irc.isChannel(msg.args[0]):
|
if irc.isChannel(msg.args[0]):
|
||||||
@ -222,11 +264,14 @@ class Webcal(callbacks.Plugin):
|
|||||||
if not c:
|
if not c:
|
||||||
return
|
return
|
||||||
url = self.registryValue('url', c)
|
url = self.registryValue('url', c)
|
||||||
|
if not url:
|
||||||
|
c = self.registryValue('defaultChannel')
|
||||||
|
url = self.registryValue('url', c)
|
||||||
if not url:
|
if not url:
|
||||||
return
|
return
|
||||||
tzs = filter(lambda x: self._tzfilter(x.lower(),tz.lower()), pytz.all_timezones)
|
tzs = filter(lambda x: self._tzfilter(x.lower(),tz.lower()), pytz.all_timezones)
|
||||||
if not tzs:
|
if not tzs:
|
||||||
irc.error('Unknown timezone: %s - Full list: %s' % (tz, self.config.registryValue('tzUrl') or 'Value not set'))
|
irc.error('Unknown timezone: %s - Full list: %s' % (tz, self.registryValue('tzUrl') or 'Value not set'))
|
||||||
return
|
return
|
||||||
now = datetime.datetime.now(pytz.UTC)
|
now = datetime.datetime.now(pytz.UTC)
|
||||||
newtopic = self.maketopic(c,tz=tzs[0],num_events=1)
|
newtopic = self.maketopic(c,tz=tzs[0],num_events=1)
|
||||||
@ -234,7 +279,7 @@ class Webcal(callbacks.Plugin):
|
|||||||
newtopic = 'Current time in %s: %s - %s' % \
|
newtopic = 'Current time in %s: %s - %s' % \
|
||||||
(tzs[0], datetime.datetime.now(pytz.UTC).astimezone(pytz.timezone(tzs[0])).strftime("%B %d %Y, %H:%M:%S"), newtopic)
|
(tzs[0], datetime.datetime.now(pytz.UTC).astimezone(pytz.timezone(tzs[0])).strftime("%B %d %Y, %H:%M:%S"), newtopic)
|
||||||
|
|
||||||
if events[0].is_on(): # Fixme -- channel filter
|
if events and events[0].is_on(): # Fixme -- channel filter
|
||||||
irc.error('Please don\'t use @schedule during a meeting')
|
irc.error('Please don\'t use @schedule during a meeting')
|
||||||
irc.reply(newtopic, private=True)
|
irc.reply(newtopic, private=True)
|
||||||
else:
|
else:
|
||||||
@ -270,7 +315,7 @@ class Webcal(callbacks.Plugin):
|
|||||||
return msg
|
return msg
|
||||||
try:
|
try:
|
||||||
id = ircdb.users.getUserId(msg.prefix)
|
id = ircdb.users.getUserId(msg.prefix)
|
||||||
user = users.getUser(id)
|
user = ircdb.users.getUser(id)
|
||||||
return msg
|
return msg
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@ -281,6 +326,5 @@ class Webcal(callbacks.Plugin):
|
|||||||
tokens = callbacks.tokenize(s, channel=msg.args[0])
|
tokens = callbacks.tokenize(s, channel=msg.args[0])
|
||||||
self.Proxy(irc, msg, tokens)
|
self.Proxy(irc, msg, tokens)
|
||||||
return msg
|
return msg
|
||||||
# self._callCommand([cmd], irc, msg, [])
|
|
||||||
|
|
||||||
Class = Webcal
|
Class = Webcal
|
||||||
|
61
Webcal/rruler.py
Normal file
61
Webcal/rruler.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
from dateutil import rrule
|
||||||
|
import re, datetime
|
||||||
|
|
||||||
|
#wk_days = ('MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU')
|
||||||
|
wk_days = re.compile("([0-9]?)([M|T|W|F|S][O|U|E|H|R|A])")
|
||||||
|
|
||||||
|
rrule_map = {
|
||||||
|
'SECONDLY': rrule.SECONDLY,
|
||||||
|
'MINUTELY': rrule.MINUTELY,
|
||||||
|
'HOURLY': rrule.HOURLY,
|
||||||
|
'DAILY': rrule.DAILY,
|
||||||
|
'WEEKLY': rrule.WEEKLY,
|
||||||
|
'MONTHLY': rrule.MONTHLY,
|
||||||
|
'YEARLY': rrule.YEARLY,
|
||||||
|
'MO': rrule.MO,
|
||||||
|
'TU': rrule.TU,
|
||||||
|
'WE': rrule.WE,
|
||||||
|
'TH': rrule.TH,
|
||||||
|
'FR': rrule.FR,
|
||||||
|
'SA': rrule.SA,
|
||||||
|
'SU': rrule.SU }
|
||||||
|
|
||||||
|
def rrule_wrapper(*args, **kwargs):
|
||||||
|
for k, v in kwargs.iteritems():
|
||||||
|
if k == 'byday' or k == 'BYDAY':
|
||||||
|
del kwargs[k]
|
||||||
|
groups = wk_days.match(v[0]).groups()
|
||||||
|
if groups[0]:
|
||||||
|
kwargs['byweekday'] = rrule_map[groups[1]](int(groups[0]))
|
||||||
|
else:
|
||||||
|
kwargs['byweekday'] = rrule_map[groups[1]]
|
||||||
|
|
||||||
|
else:
|
||||||
|
del kwargs[k]
|
||||||
|
k = k.lower()
|
||||||
|
if isinstance(v, list):
|
||||||
|
if len(v) > 1:
|
||||||
|
res = []
|
||||||
|
for x in v:
|
||||||
|
if isinstance(x, basestring) and wk_days.match(x):
|
||||||
|
res.append(rrule_map[wk_days.match(x).group(1)])
|
||||||
|
elif v in rrule_map:
|
||||||
|
res.append(rrule_map[x])
|
||||||
|
elif isinstance(x, datetime.datetime):
|
||||||
|
res.append(datetime.datetime.fromordinal(x.toordinal()))
|
||||||
|
else:
|
||||||
|
res.append(v)
|
||||||
|
kwargs[k] = tuple(res)
|
||||||
|
else:
|
||||||
|
if isinstance(v[0], basestring) and wk_days.match(v[0]):
|
||||||
|
kwargs[k] = rrule_map[wk_days.match(v[0]).group(0)]
|
||||||
|
elif v[0] in rrule_map:
|
||||||
|
kwargs[k] = rrule_map[v[0]]
|
||||||
|
elif isinstance(v[0], datetime.datetime):
|
||||||
|
kwargs[k] = datetime.datetime.fromordinal(v[0].toordinal())
|
||||||
|
else:
|
||||||
|
kwargs[k] = v[0]
|
||||||
|
else:
|
||||||
|
kwargs[k] = v
|
||||||
|
return rrule.rrule(*args, **kwargs)
|
14
Webcal/testical.py
Normal file
14
Webcal/testical.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
import datetime, pytz, urllib2, ical
|
||||||
|
def filter(events):
|
||||||
|
ret = [x for x in events if x.seconds_ago() < 1800]
|
||||||
|
ret.sort()
|
||||||
|
ret.sort() # Needs this twice for some reason
|
||||||
|
return ret
|
||||||
|
|
||||||
|
data = urllib2.urlopen("http://tinyurl.com/6mzmbr").read()
|
||||||
|
parser = ical.ICalReader(data)
|
||||||
|
|
||||||
|
events = filter(parser.events)
|
||||||
|
|
||||||
|
print "\n".join([x.schedule() for x in events])
|
Reference in New Issue
Block a user