summaryrefslogtreecommitdiff
path: root/scripts/queue_mover.py
blob: 6c0556d6bf61a638aa5b6077587f09ec7ccc60f3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
#! /usr/bin/env python

"""This script simply mover events from one queue to another.

Config template::

    [queue_mover]
    job_name          = qm_sourcedb_to_targetdb

    src_db            = dbname=sourcedb
    dst_db            = dbname=targetdb

    pgq_queue_name    = source_queue
    dst_queue_name    = dest_queue

    logfile           = ~/log/%(job_name)s.log
    pidfile           = ~/pid/%(job_name)s.pid

    use_skylog        = 0
"""

import sys, os, pgq

class QueueMover(pgq.SerialConsumer):
    __doc__ = __doc__
    
    def __init__(self, args):
        pgq.SerialConsumer.__init__(self, "queue_mover", "src_db", "dst_db", args)
        self.dst_queue_name = self.cf.get("dst_queue_name")

    def process_remote_batch(self, db, batch_id, ev_list, dst_db):

        # load data
        rows = []
        for ev in ev_list:
            data = [ev.type, ev.data, ev.extra1, ev.extra2, ev.extra3, ev.extra4, ev.time]
            rows.append(data)
            ev.tag_done()
        fields = ['type', 'data', 'extra1', 'extra2', 'extra3', 'extra4', 'time']

        # insert data
        curs = dst_db.cursor()
        pgq.bulk_insert_events(curs, rows, fields, self.dst_queue_name)

if __name__ == '__main__':
    script = QueueMover(sys.argv[1:])
    script.start()