this repo has no description

support coroutine callbacks

+11 -13
+11 -13
src/atkafka_consumer/consumer.py
··· 1 import asyncio 2 import json 3 import logging 4 - from typing import Any, Callable, List, Optional 5 - 6 from aiokafka import AIOKafkaConsumer, ConsumerRecord 7 - 8 from .models import AtKafkaEvent 9 10 logger = logging.getLogger(__name__) ··· 16 bootstrap_servers: List[str], 17 input_topic: str, 18 group_id: str, 19 - on_event: Callable[[AtKafkaEvent], None], 20 offset: str = "earliest", 21 max_concurrent_tasks: int = 100, 22 ): ··· 26 self._offset = offset 27 self._max_concurrent_tasks = max_concurrent_tasks 28 self._on_event = on_event 29 - 30 self._consumer: Optional[AIOKafkaConsumer] = None 31 - 32 self._semaphore: Optional[asyncio.Semaphore] = None 33 self._shutdown_event: Optional[asyncio.Event] = None 34 35 async def stop(self): 36 assert self._consumer is not None 37 - 38 if self._shutdown_event: 39 self._shutdown_event.set() 40 - 41 await self._consumer.stop() 42 logger.info("stopped kafka consumer") 43 44 async def _handle_event(self, message: ConsumerRecord[Any, Any]): 45 assert self._semaphore is not None 46 - 47 async with self._semaphore: 48 try: 49 evt = AtKafkaEvent.model_validate(message.value) ··· 51 logger.error(f"Failed to handle event: {e}") 52 raise e 53 54 - self._on_event(evt) 55 56 async def run(self): 57 self._semaphore = asyncio.Semaphore(self._max_concurrent_tasks) 58 self._shutdown_event = asyncio.Event() 59 - 60 self._consumer = AIOKafkaConsumer( 61 self._input_topic, 62 bootstrap_servers=",".join(self._bootstrap_servers), ··· 68 max_poll_interval_ms=300000, 69 value_deserializer=lambda m: json.loads(m.decode("utf-8")), 70 ) 71 - 72 await self._consumer.start() 73 logger.info("started kafka consumer") 74 ··· 88 for t in done: 89 if t.exception(): 90 logger.error(f"Task failed with exception: {t.exception()}") 91 - 92 except Exception as e: 93 logger.error(f"Error consuming messages: {e}") 94 raise
··· 1 import asyncio 2 import json 3 import logging 4 + from typing import Any, Callable, List, Optional, Union, Awaitable 5 from aiokafka import AIOKafkaConsumer, ConsumerRecord 6 from .models import AtKafkaEvent 7 8 logger = logging.getLogger(__name__) ··· 14 bootstrap_servers: List[str], 15 input_topic: str, 16 group_id: str, 17 + on_event: Union[ 18 + Callable[[AtKafkaEvent], None], Callable[[AtKafkaEvent], Awaitable[None]] 19 + ], 20 offset: str = "earliest", 21 max_concurrent_tasks: int = 100, 22 ): ··· 26 self._offset = offset 27 self._max_concurrent_tasks = max_concurrent_tasks 28 self._on_event = on_event 29 self._consumer: Optional[AIOKafkaConsumer] = None 30 self._semaphore: Optional[asyncio.Semaphore] = None 31 self._shutdown_event: Optional[asyncio.Event] = None 32 33 async def stop(self): 34 assert self._consumer is not None 35 if self._shutdown_event: 36 self._shutdown_event.set() 37 await self._consumer.stop() 38 logger.info("stopped kafka consumer") 39 40 async def _handle_event(self, message: ConsumerRecord[Any, Any]): 41 assert self._semaphore is not None 42 async with self._semaphore: 43 try: 44 evt = AtKafkaEvent.model_validate(message.value) ··· 46 logger.error(f"Failed to handle event: {e}") 47 raise e 48 49 + try: 50 + result = self._on_event(evt) 51 + if asyncio.iscoroutine(result): 52 + await result 53 + except Exception as e: 54 + logger.error(f"Error in on_event callback: {e}") 55 + raise 56 57 async def run(self): 58 self._semaphore = asyncio.Semaphore(self._max_concurrent_tasks) 59 self._shutdown_event = asyncio.Event() 60 self._consumer = AIOKafkaConsumer( 61 self._input_topic, 62 bootstrap_servers=",".join(self._bootstrap_servers), ··· 68 max_poll_interval_ms=300000, 69 value_deserializer=lambda m: json.loads(m.decode("utf-8")), 70 ) 71 await self._consumer.start() 72 logger.info("started kafka consumer") 73 ··· 87 for t in done: 88 if t.exception(): 89 logger.error(f"Task failed with exception: {t.exception()}") 90 except Exception as e: 91 logger.error(f"Error consuming messages: {e}") 92 raise