summaryrefslogtreecommitdiff
path: root/kombu/messaging.py
blob: 0d81e7a5be327dd9fb3c3c6cb391375c90c855d5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
"""
kombu.messaging
===============

Sending and receiving messages.

:copyright: (c) 2009 - 2011 by Ask Solem.
:license: BSD, see LICENSE for more details.

"""
from itertools import count

from kombu import entity
from kombu.compression import compress
from kombu.serialization import encode
from kombu.syn import blocking as _SYN
from kombu.utils import maybe_list


Exchange = entity.Exchange
Queue = entity.Queue


class Producer(object):
    """Message Producer.

    :param channel: Connection channel.
    :keyword exchange: Default exchange.
    :keyword routing_key: Default routing key.
    :keyword serializer: Default serializer. Default is `"json"`.
    :keyword compression: Default compression method. Default is no
        compression.
    :keyword auto_declare: Automatically declare the exchange
      at instantiation. Default is :const:`True`.
    :keyword on_return: Callback to call for undeliverable messages,
        when the `mandatory` or `immediate` arguments to
        :meth:`publish` is used. This callback needs the following
        signature: `(exception, exchange, routing_key, message)`.
        Note that the producer needs to drain events to use this feature.

    """
    #: The connection channel used.
    channel = None

    #: Default exchange.
    exchange = None

    # Default routing key.
    routing_key = ""

    #: Default serializer to use. Default is JSON.
    serializer = None

    #: Default compression method.  Disabled by default.
    compression = None

    #: By default the exchange is declared at instantiation.
    #: If you want to declare manually then you can set this
    #: to :const:`False`.
    auto_declare = True

    #: Basic return callback.
    on_return = None

    def __init__(self, channel, exchange=None, routing_key=None,
            serializer=None, auto_declare=None, compression=None,
            on_return=None):
        from kombu.connection import BrokerConnection
        if isinstance(channel, BrokerConnection):
            channel = channel.default_channel
        self.channel = channel
        self.connection = self.channel.connection.client

        self.exchange = exchange or self.exchange
        if self.exchange is None:
            self.exchange = Exchange("")
        self.routing_key = routing_key or self.routing_key
        self.serializer = serializer or self.serializer
        self.compression = compression or self.compression
        self.on_return = on_return or self.on_return
        if auto_declare is not None:
            self.auto_declare = auto_declare

        self.exchange = self.exchange(self.channel)
        if self.auto_declare:
            self.declare()

        if self.on_return:
            self.channel.events["basic_return"].append(self.on_return)

    def declare(self):
        """Declare the exchange.

        This is done automatically at instantiation if :attr:`auto_declare`
        is set to :const:`True`.

        """
        if self.exchange.name:
            self.exchange.declare()

    def publish(self, body, routing_key=None, delivery_mode=None,
            mandatory=False, immediate=False, priority=0, content_type=None,
            content_encoding=None, serializer=None, headers=None,
            compression=None, exchange=None, **properties):
        """Publish message to the specified exchange.

        :param body: Message body.
        :keyword routing_key: Message routing key.
        :keyword delivery_mode: See :attr:`delivery_mode`.
        :keyword mandatory: Currently not supported.
        :keyword immediate: Currently not supported.
        :keyword priority: Message priority. A number between 0 and 9.
        :keyword content_type: Content type. Default is auto-detect.
        :keyword content_encoding: Content encoding. Default is auto-detect.
        :keyword serializer: Serializer to use. Default is auto-detect.
        :keyword headers: Mapping of arbitrary headers to pass along
          with the message body.
        :keyword exchange: Override the exchange.  Note that this exchange
          must have been declared.
        :keyword properties: Additional properties, see the AMQP spec.

        """
        headers = headers or {}
        if routing_key is None:
            routing_key = self.routing_key
        if compression is None:
            compression = self.compression

        if isinstance(exchange, Exchange):
            exchange = exchange.name

        body, content_type, content_encoding = self._prepare(
                body, serializer, content_type, content_encoding,
                compression, headers)
        message = self.exchange.Message(body,
                                        delivery_mode,
                                        priority,
                                        content_type,
                                        content_encoding,
                                        headers=headers,
                                        properties=properties)
        return self.exchange.publish(message, routing_key, mandatory,
                                     immediate, exchange=exchange)

    def revive(self, channel):
        """Revive the producer after connection loss."""
        self.channel = channel
        self.connection = self.channel.connection.client
        self.exchange.revive(channel)

    def __enter__(self):
        return self

    def __exit__(self, *exc_info):
        self.release()

    def release(self):
        pass
    close = release

    def _prepare(self, body, serializer=None,
            content_type=None, content_encoding=None, compression=None,
            headers=None):

        # No content_type? Then we're serializing the data internally.
        if not content_type:
            serializer = serializer or self.serializer
            (content_type, content_encoding,
             body) = encode(body, serializer=serializer)
        else:
            # If the programmer doesn't want us to serialize,
            # make sure content_encoding is set.
            if isinstance(body, unicode):
                if not content_encoding:
                    content_encoding = 'utf-8'
                body = body.encode(content_encoding)

            # If they passed in a string, we can't know anything
            # about it. So assume it's binary data.
            elif not content_encoding:
                content_encoding = 'binary'

        if compression:
            body, headers["compression"] = compress(body, compression)

        return body, content_type, content_encoding


class Consumer(object):
    """Message consumer.

    :param channel: see :attr:`channel`.
    :param queues: see :attr:`queues`.
    :keyword no_ack: see :attr:`no_ack`.
    :keyword auto_declare: see :attr:`auto_declare`
    :keyword callbacks: see :attr:`callbacks`.
    :keyword on_decode_error: see :attr:`on_decode_error`.

    """
    #: The connection channel to use.
    channel = None

    #: A single :class:`~kombu.entity.Queue`, or a list of queues to
    #: consume from.
    queues = None

    #: Flag for message acknowledgment disabled/enabled.
    #: Enabled by default.
    no_ack = None

    #: By default all entities will be declared at instantiation, if you
    #: want to handle this manually you can set this to :const:`False`.
    auto_declare = True

    #: List of callbacks called in order when a message is received.
    #:
    #: The signature of the callbacks must take two arguments:
    #: `(body, message)`, which is the decoded message body and
    #: the `Message` instance (a subclass of
    #: :class:`~kombu.transport.base.Message`).
    callbacks = None

    #: Callback called when a message can't be decoded.
    #:
    #: The signature of the callback must take two arguments: `(message,
    #: exc)`, which is the message that can't be decoded and the exception
    #: that occurred while trying to decode it.
    on_decode_error = None

    _next_tag = count(1).next   # global

    def __init__(self, channel, queues, no_ack=None, auto_declare=None,
            callbacks=None, on_decode_error=None):
        from kombu.connection import BrokerConnection
        if isinstance(channel, BrokerConnection):
            channel = channel.default_channel
        self.channel = channel
        self.connection = self.channel.connection.client

        self.queues = queues
        if no_ack is not None:
            self.no_ack = no_ack
        if auto_declare is not None:
            self.auto_declare = auto_declare
        if on_decode_error is not None:
            self.on_decode_error = on_decode_error

        if callbacks is not None:
            self.callbacks = callbacks
        if self.callbacks is None:
            self.callbacks = []
        self._active_tags = {}

        self.queues = [queue(self.channel)
                            for queue in maybe_list(self.queues)]

        if self.auto_declare:
            self.declare()

    def declare(self):
        """Declare queues, exchanges and bindings.

        This is done automatically at instantiation if :attr:`auto_declare`
        is set.

        """
        for queue in self.queues:
            queue.declare()

    def register_callback(self, callback):
        """Register a new callback to be called when a message
        is received.

        The signature of the callback needs to accept two arguments:
        `(body, message)`, which is the decoded message body
        and the `Message` instance (a subclass of
        :class:`~kombu.transport.base.Message`.

        """
        self.callbacks.append(callback)

    def __enter__(self):
        self.consume()
        return self

    def __exit__(self, *exc_info):
        self.cancel()

    def add_queue(self, queue):
        queue = queue(self.channel)
        if self.auto_declare:
            queue.declare()
        self.queues.append(queue)
        return queue

    def consume(self, no_ack=None):
        """Register consumer on server.

        """
        if not self.queues:
            return
        if no_ack is None:
            no_ack = self.no_ack
        H, T = self.queues[:-1], self.queues[-1]
        for queue in H:
            self._basic_consume(queue, no_ack=no_ack, nowait=True)
        self._basic_consume(T, no_ack=no_ack, nowait=False)

    def cancel(self):
        """End all active queue consumers.

        This does not affect already delivered messages, but it does
        mean the server will not send any more messages for this consumer.

        """
        for tag in self._active_tags.values():
            self.channel.basic_cancel(tag)
        self._active_tags.clear()
    close = cancel

    def cancel_by_queue(self, queue):
        """Cancel consumer by queue name."""
        try:
            tag = self._active_tags.pop(queue)
        except KeyError:
            pass
        else:
            self.queues[:] = [q for q in self.queues if q.name != queue]
            self.channel.basic_cancel(tag)

    def consuming_from(self, queue):
        name = queue
        if isinstance(queue, Queue):
            name = queue.name
        return any(q.name == name for q in self.queues)

    def purge(self):
        """Purge messages from all queues.

        .. warning::
            This will *delete all ready messages*, there is no
            undo operation available.

        """
        return sum(queue.purge() for queue in self.queues)

    def flow(self, active):
        """Enable/disable flow from peer.

        This is a simple flow-control mechanism that a peer can use
        to avoid overflowing its queues or otherwise finding itself
        receiving more messages than it can process.

        The peer that receives a request to stop sending content
        will finish sending the current content (if any), and then wait
        until flow is reactivated.

        """
        self.channel.flow(active)

    def qos(self, prefetch_size=0, prefetch_count=0, apply_global=False):
        """Specify quality of service.

        The client can request that messages should be sent in
        advance so that when the client finishes processing a message,
        the following message is already held locally, rather than needing
        to be sent down the channel. Prefetching gives a performance
        improvement.

        The prefetch window is Ignored if the :attr:`no_ack` option is set.

        :param prefetch_size: Specify the prefetch window in octets.
          The server will send a message in advance if it is equal to
          or smaller in size than the available prefetch size (and
          also falls within other prefetch limits). May be set to zero,
          meaning "no specific limit", although other prefetch limits
          may still apply.

        :param prefetch_count: Specify the prefetch window in terms of
          whole messages.

        :param apply_global: Apply new settings globally on all channels.
          Currently not supported by RabbitMQ.

        """
        return _SYN(self.channel.basic_qos, prefetch_size,
                                            prefetch_count,
                                            apply_global)

    def recover(self, requeue=False):
        """Redeliver unacknowledged messages.

        Asks the broker to redeliver all unacknowledged messages
        on the specified channel.

        :keyword requeue: By default the messages will be redelivered
          to the original recipient. With `requeue` set to true, the
          server will attempt to requeue the message, potentially then
          delivering it to an alternative subscriber.

        """
        return _SYN(self.channel.basic_recover, requeue=requeue)

    def receive(self, body, message):
        """Method called when a message is received.

        This dispatches to the registered :attr:`callbacks`.

        :param body: The decoded message body.
        :param message: The `Message` instance.

        :raises NotImplementedError: If no consumer callbacks have been
          registered.

        """
        if not self.callbacks:
            raise NotImplementedError("No consumer callbacks registered")
        for callback in self.callbacks:
            callback(body, message)

    def revive(self, channel):
        """Revive consumer after connection loss."""
        for queue in self.queues:
            queue.revive(channel)
        self.channel = channel
        self.connection = self.channel.connection.client

    def _basic_consume(self, queue, consumer_tag=None,
            no_ack=no_ack, nowait=True):
        tag = self._active_tags.get(queue.name)
        if tag is None:
            tag = self._add_tag(queue, consumer_tag)
            queue.consume(tag,
                          self._receive_callback,
                          no_ack=no_ack,
                          nowait=nowait)
        return tag

    def _add_tag(self, queue, consumer_tag=None):
        tag = consumer_tag or str(self._next_tag())
        self._active_tags[queue.name] = tag
        return tag

    def _receive_callback(self, raw_message):
        try:
            message = self.channel.message_to_python(raw_message)
            decoded = message.payload
        except Exception, exc:
            if not self.on_decode_error:
                raise
            self.on_decode_error(message, exc)
        else:
            self.receive(decoded, message)

    def __repr__(self):
        return "<Consumer: %s>" % (self.queues, )