import socket
import httplib
import magic
+import logging
# Set up for accessing django
from django.core.management import setup_environ
from commitfest.models import MailThreadAttachment
if __name__ == "__main__":
+ debug = "--debug" in sys.argv
+
+ # Logging always done to stdout, but we can turn on/off how much
+ logging.basicConfig(format='%(asctime)s %(levelname)s: %(msg)s',
+ level=debug and logging.DEBUG or logging.INFO)
+
socket.setdefaulttimeout(settings.ARCHIVES_TIMEOUT)
mag = magic.open(magic.MIME)
mag.load()
-
+
+ logging.debug("Updating attachment metadata from archives")
+
# Try to fetch/scan all attachments that haven't already been scanned.
# If they have already been scanned, we don't bother.
# We will hit the archives without delay when doing this, but that
# downloading a lot...
for a in MailThreadAttachment.objects.filter(ispatch=None):
url = "/message-id/attachment/%s/attach" % a.attachmentid
+ logging.debug("Checking attachment %s" % a.attachmentid)
+
h = httplib.HTTPConnection(settings.ARCHIVES_SERVER,
settings.ARCHIVES_PORT,
True,
})
resp = h.getresponse()
if resp.status != 200:
- print "Failed to get %s: %s" % (url, resp.status)
+ logging.error("Failed to get %s: %s" % (url, resp.status))
continue
contents = resp.read()
# Attempt to identify the file using magic information
mtype = mag.buffer(contents)
+ logging.debug("Detected MIME type is %s" % mtype)
# We don't support gzipped or tar:ed patches or anything like
# that at this point - just plain patches.
a.ispatch = True
else:
a.ispatch = False
+ logging.info("Attachment %s is patch: %s" % (a.id, a.ispatch))
a.save()
connection.close()
+ logging.debug("Done.")
import os
import sys
+import logging
# Set up for accessing django
from django.core.management import setup_environ
from commitfest.ajax import _archivesAPI, parse_and_add_attachments
if __name__ == "__main__":
+ debug = "--debug" in sys.argv
+
+ # Logging always done to stdout, but we can turn on/off how much
+ logging.basicConfig(format='%(asctime)s %(levelname)s: %(msg)s',
+ level=debug and logging.DEBUG or logging.INFO)
+
+ logging.debug("Checking for updated mail threads in the archives")
for thread in MailThread.objects.filter(patches__commitfests__status__in=(1,2,3)).distinct():
+ logging.debug("Checking %s in the archives" % thread.messageid)
r = sorted(_archivesAPI('/message-id.json/%s' % thread.messageid), key=lambda x: x['date'])
if thread.latestmsgid != r[-1]['msgid']:
# There is now a newer mail in the thread!
- print "Thread %s updated" % thread.messageid
+ logging.info("Thread %s updated" % thread.messageid)
thread.latestmsgid = r[-1]['msgid']
thread.latestmessage = r[-1]['date']
thread.latestauthor = r[-1]['from']
# Potentially update the last mail date - if there wasn't already a mail on each patch
# from a *different* thread that had an earlier date.
for p in thread.patches.filter(lastmail__lt=thread.latestmessage):
+ logging.debug("Last mail time updated for %s" % thread.messageid)
p.lastmail = thread.latestmessage
p.save()
connection.close()
+ logging.debug("Done.")