Refactor connector hooks to use DTOs, update manager and tests

Because:
- Connector hooks needed safe, serializable DTOs
- Manager and tests required updates for new DTO interface
- Code style and clarity improvements were needed
- Redundant date test logic removed

Refactors connector interface and manager to use UserDTO and ShoppingListEntryDTO.
Updates connector manager logic and tests to match new DTO signatures.
Improves code style in test_recipe_full_text_search.py and removes unnecessary date test block.
Breaking change: downstream connectors must update hook signatures.
This commit is contained in:
smilerz
2025-08-27 20:58:47 -05:00
parent 41fd7334dc
commit 2c186ab72e
4 changed files with 213 additions and 134 deletions

View File

@@ -1,29 +1,62 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Optional
from cookbook.models import ShoppingListEntry, Space, ConnectorConfig
from cookbook.models import ConnectorConfig, ShoppingListEntry, User
@dataclass
class UserDTO:
username: str
first_name: Optional[str]
@staticmethod
def create_from_user(instance: User) -> 'UserDTO':
return UserDTO(username=instance.username, first_name=instance.first_name if instance.first_name else None)
@dataclass
class ShoppingListEntryDTO:
food_name: str
amount: Optional[float]
base_unit: Optional[str]
unit_name: Optional[str]
created_by: UserDTO
@staticmethod
def try_create_from_entry(instance: ShoppingListEntry) -> Optional['ShoppingListEntryDTO']:
if instance.food is None or instance.created_by is None:
return None
return ShoppingListEntryDTO(
food_name=instance.food.name,
amount=instance.amount if instance.amount else None,
unit_name=instance.unit.name if instance.unit else None,
base_unit=instance.unit.base_unit if instance.unit and instance.unit.base_unit else None,
created_by=UserDTO.create_from_user(instance.created_by),
)
# A Connector is 'destroyed' & recreated each time 'any' ConnectorConfig in a space changes.
class Connector(ABC):
@abstractmethod
def __init__(self, config: ConnectorConfig):
pass
@abstractmethod
async def on_shopping_list_entry_created(self, space: Space, instance: ShoppingListEntry) -> None:
async def on_shopping_list_entry_created(self, instance: ShoppingListEntryDTO) -> None:
pass
# This method might not trigger on 'direct' entry updates: https://stackoverflow.com/a/35238823
@abstractmethod
async def on_shopping_list_entry_updated(self, space: Space, instance: ShoppingListEntry) -> None:
async def on_shopping_list_entry_updated(self, instance: ShoppingListEntryDTO) -> None:
pass
@abstractmethod
async def on_shopping_list_entry_deleted(self, space: Space, instance: ShoppingListEntry) -> None:
async def on_shopping_list_entry_deleted(self, instance: ShoppingListEntryDTO) -> None:
pass
@abstractmethod
async def close(self) -> None:
pass
# TODO: Add Recipes & possibly Meal Place listeners/hooks (And maybe more?)

View File

@@ -7,14 +7,14 @@ from dataclasses import dataclass
from enum import Enum
from logging import Logger
from types import UnionType
from typing import List, Any, Dict, Optional, Type
from typing import Any, Dict, List, Optional, Type
from django.conf import settings
from django_scopes import scope
from cookbook.connectors.connector import Connector
from cookbook.connectors.connector import Connector, ShoppingListEntryDTO
from cookbook.connectors.homeassistant import HomeAssistant
from cookbook.models import ShoppingListEntry, Space, ConnectorConfig
from cookbook.models import ConnectorConfig, ShoppingListEntry, Space
REGISTERED_CLASSES: UnionType | Type = ShoppingListEntry
@@ -56,15 +56,16 @@ class ConnectorManager(metaclass=Singleton):
def __init__(self):
self._logger = logging.getLogger("recipes.connector")
self._logger.debug("ConnectorManager initializing")
self._queue = queue.Queue(maxsize=settings.EXTERNAL_CONNECTORS_QUEUE_SIZE)
self._worker = threading.Thread(target=self.worker, args=(0, self._queue,), daemon=True)
self._worker = threading.Thread(target=self.worker, args=(
0,
self._queue,
), daemon=True)
self._worker.start()
# Called by post save & post delete signals
def __call__(self, instance: Any, **kwargs) -> None:
if not isinstance(instance, self._listening_to_classes) or not hasattr(instance, "space"):
return
action_type: ActionType
if "created" in kwargs and kwargs["created"]:
action_type = ActionType.CREATED
@@ -75,16 +76,37 @@ class ConnectorManager(metaclass=Singleton):
else:
return
try:
self._queue.put_nowait(Work(instance, action_type))
except queue.Full:
self._logger.info(f"queue was full, so skipping {action_type} of type {type(instance)}")
return
self._add_work(action_type, instance)
def _add_work(self, action_type: ActionType, *instances: REGISTERED_CLASSES):
for instance in instances:
if not isinstance(instance, self._listening_to_classes) or not hasattr(instance, "space"):
continue
try:
_force_load_instance(instance)
self._queue.put_nowait(Work(instance, action_type))
except queue.Full:
self._logger.info(f"queue was full, so skipping {action_type} of type {type(instance)}")
def stop(self):
self._queue.join()
self._worker.join()
@classmethod
def is_initialized(cls):
return cls in cls._instances
@staticmethod
def add_work(action_type: ActionType, *instances: REGISTERED_CLASSES):
"""
Manually inject work that failed to come in through the __call__ (aka Django signal)
Before the work is processed, we check if the connectionManager is initialized, because if it's not, we don't want to accidentally initialize it.
Be careful calling it, because it might result in a instance being processed twice.
"""
if not ConnectorManager.is_initialized():
return
ConnectorManager()._add_work(action_type, *instances)
@staticmethod
def worker(worker_id: int, worker_queue: queue.Queue):
logger = logging.getLogger("recipes.connector.worker")
@@ -116,7 +138,7 @@ class ConnectorManager(metaclass=Singleton):
if connectors is None or refresh_connector_cache:
if connectors is not None:
loop.run_until_complete(close_connectors(connectors))
loop.run_until_complete(_close_connectors(connectors))
with scope(space=space):
connectors: List[Connector] = list()
@@ -142,7 +164,7 @@ class ConnectorManager(metaclass=Singleton):
logger.debug(f"running {len(connectors)} connectors for {item.instance=} with {item.actionType=}")
loop.run_until_complete(run_connectors(connectors, space, item.instance, item.actionType))
loop.run_until_complete(run_connectors(connectors, item.instance, item.actionType))
worker_queue.task_done()
logger.info(f"terminating ConnectionManager worker {worker_id}")
@@ -159,7 +181,14 @@ class ConnectorManager(metaclass=Singleton):
return None
async def close_connectors(connectors: List[Connector]):
def _force_load_instance(instance: REGISTERED_CLASSES):
if isinstance(instance, ShoppingListEntry):
_ = instance.food # Force load food
_ = instance.unit # Force load unit
_ = instance.created_by # Force load created_by
async def _close_connectors(connectors: List[Connector]):
tasks: List[Task] = [asyncio.create_task(connector.close()) for connector in connectors]
if len(tasks) == 0:
@@ -171,22 +200,24 @@ async def close_connectors(connectors: List[Connector]):
logging.exception("received an exception while closing one of the connectors")
async def run_connectors(connectors: List[Connector], space: Space, instance: REGISTERED_CLASSES, action_type: ActionType):
async def run_connectors(connectors: List[Connector], instance: REGISTERED_CLASSES, action_type: ActionType):
tasks: List[Task] = list()
if isinstance(instance, ShoppingListEntry):
shopping_list_entry: ShoppingListEntry = instance
shopping_list_entry = ShoppingListEntryDTO.try_create_from_entry(instance)
if shopping_list_entry is None:
return
match action_type:
case ActionType.CREATED:
for connector in connectors:
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_created(space, shopping_list_entry)))
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_created(shopping_list_entry)))
case ActionType.UPDATED:
for connector in connectors:
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_updated(space, shopping_list_entry)))
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_updated(shopping_list_entry)))
case ActionType.DELETED:
for connector in connectors:
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_deleted(space, shopping_list_entry)))
tasks.append(asyncio.create_task(connector.on_shopping_list_entry_deleted(shopping_list_entry)))
if len(tasks) == 0:
return

View File

@@ -2,9 +2,9 @@ import pytest
from django.contrib import auth
from mock.mock import Mock
from cookbook.connectors.connector import Connector
from cookbook.connectors.connector_manager import run_connectors, ActionType
from cookbook.models import ShoppingListEntry, Food
from cookbook.connectors.connector import Connector, ShoppingListEntryDTO
from cookbook.connectors.connector_manager import ActionType, run_connectors
from cookbook.models import Food, ShoppingListEntry
@pytest.fixture()
@@ -13,13 +13,13 @@ def obj_1(space_1, u1_s1):
return e
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_run_connectors(space_1, u1_s1, obj_1) -> None:
expected_dto = ShoppingListEntryDTO.try_create_from_entry(obj_1)
connector_mock = Mock(spec=Connector)
await run_connectors([connector_mock], space_1, obj_1, ActionType.DELETED)
await run_connectors([connector_mock], obj_1, ActionType.DELETED)
assert not connector_mock.on_shopping_list_entry_updated.called
assert not connector_mock.on_shopping_list_entry_created.called
connector_mock.on_shopping_list_entry_deleted.assert_called_once_with(space_1, obj_1)
connector_mock.on_shopping_list_entry_deleted.assert_called_once_with(expected_dto)

View File

@@ -3,7 +3,6 @@ import json
from datetime import timedelta
import pytest
from django.conf import settings
from django.contrib import auth
from django.urls import reverse
@@ -12,9 +11,7 @@ from django_scopes import scope
from cookbook.models import Recipe, SearchFields
from cookbook.tests.conftest import transpose
from cookbook.tests.factories import (CookLogFactory, FoodFactory, IngredientFactory,
KeywordFactory, RecipeBookEntryFactory, RecipeFactory,
UnitFactory, ViewLogFactory)
from cookbook.tests.factories import CookLogFactory, FoodFactory, IngredientFactory, KeywordFactory, RecipeBookEntryFactory, RecipeFactory, UnitFactory, ViewLogFactory
# TODO test combining any/all of the above
# TODO test sort_by
@@ -122,8 +119,7 @@ def found_recipe(request, space_1, accent, unaccent, u1_s1, u2_s1):
obj2 = FoodFactory.create(name=accent, space=space_1)
recipe1.steps.first().ingredients.add(IngredientFactory.create(food=obj1))
recipe2.steps.first().ingredients.add(IngredientFactory.create(food=obj2))
recipe3.steps.first().ingredients.add(IngredientFactory.create(
food=obj1), IngredientFactory.create(food=obj2))
recipe3.steps.first().ingredients.add(IngredientFactory.create(food=obj1), IngredientFactory.create(food=obj2))
if request.param.get('keyword', None):
obj1 = KeywordFactory.create(name=unaccent, space=space_1)
obj2 = KeywordFactory.create(name=accent, space=space_1)
@@ -144,8 +140,7 @@ def found_recipe(request, space_1, accent, unaccent, u1_s1, u2_s1):
obj2 = UnitFactory.create(name=accent, space=space_1)
recipe1.steps.first().ingredients.add(IngredientFactory.create(unit=obj1))
recipe2.steps.first().ingredients.add(IngredientFactory.create(unit=obj2))
recipe3.steps.first().ingredients.add(IngredientFactory.create(
unit=obj1), IngredientFactory.create(unit=obj2))
recipe3.steps.first().ingredients.add(IngredientFactory.create(unit=obj1), IngredientFactory.create(unit=obj2))
if request.param.get('name', None):
recipe1.name = unaccent
recipe2.name = accent
@@ -165,42 +160,40 @@ def found_recipe(request, space_1, accent, unaccent, u1_s1, u2_s1):
i2.save()
if request.param.get('viewedon', None):
ViewLogFactory.create(recipe=recipe1, created_by=user1,
created_at=days_3, space=space_1)
ViewLogFactory.create(recipe=recipe2, created_by=user1,
created_at=days_30, space=space_1)
ViewLogFactory.create(recipe=recipe3, created_by=user2,
created_at=days_15, space=space_1)
ViewLogFactory.create(recipe=recipe1, created_by=user1, created_at=days_3, space=space_1)
ViewLogFactory.create(recipe=recipe2, created_by=user1, created_at=days_30, space=space_1)
ViewLogFactory.create(recipe=recipe3, created_by=user2, created_at=days_15, space=space_1)
if request.param.get('cookedon', None):
CookLogFactory.create(recipe=recipe1, created_by=user1,
created_at=days_3, space=space_1)
CookLogFactory.create(recipe=recipe2, created_by=user1,
created_at=days_30, space=space_1)
CookLogFactory.create(recipe=recipe3, created_by=user2,
created_at=days_15, space=space_1)
CookLogFactory.create(recipe=recipe1, created_by=user1, created_at=days_3, space=space_1)
CookLogFactory.create(recipe=recipe2, created_by=user1, created_at=days_30, space=space_1)
CookLogFactory.create(recipe=recipe3, created_by=user2, created_at=days_15, space=space_1)
if request.param.get('timescooked', None):
CookLogFactory.create_batch(
5, recipe=recipe1, created_by=user1, space=space_1)
CookLogFactory.create_batch(5, recipe=recipe1, created_by=user1, space=space_1)
CookLogFactory.create(recipe=recipe2, created_by=user1, space=space_1)
CookLogFactory.create_batch(
3, recipe=recipe3, created_by=user2, space=space_1)
CookLogFactory.create_batch(3, recipe=recipe3, created_by=user2, space=space_1)
if request.param.get('rating', None):
CookLogFactory.create(
recipe=recipe1, created_by=user1, rating=5.0, space=space_1)
CookLogFactory.create(
recipe=recipe2, created_by=user1, rating=1.0, space=space_1)
CookLogFactory.create(
recipe=recipe3, created_by=user2, rating=3.0, space=space_1)
CookLogFactory.create(recipe=recipe1, created_by=user1, rating=5.0, space=space_1)
CookLogFactory.create(recipe=recipe2, created_by=user1, rating=1.0, space=space_1)
CookLogFactory.create(recipe=recipe3, created_by=user2, rating=3.0, space=space_1)
return (recipe1, recipe2, recipe3, obj1, obj2, request.param)
@pytest.mark.parametrize("found_recipe, param_type", [
({'food': True}, 'foods'),
({'keyword': True}, 'keywords'),
({'book': True}, 'books'),
({
'food': True
}, 'foods'),
({
'keyword': True
}, 'keywords'),
({
'book': True
}, 'books'),
], indirect=['found_recipe'])
@pytest.mark.parametrize('operator', [('_or', 3, 0), ('_and', 1, 2), ])
@pytest.mark.parametrize('operator', [
('_or', 3, 0),
('_and', 1, 2),
])
def test_search_or_and_not(found_recipe, param_type, operator, recipes, u1_s1, space_1):
with scope(space=space_1):
param1 = f"{param_type}{operator[0]}={found_recipe[3].id}"
@@ -219,8 +212,7 @@ def test_search_or_and_not(found_recipe, param_type, operator, recipes, u1_s1, s
assert found_recipe[1].id in [x['id'] for x in r['results']]
assert found_recipe[2].id in [x['id'] for x in r['results']]
r = json.loads(u1_s1.get(reverse(LIST_URL) +
f'?{param1}&{param2}').content)
r = json.loads(u1_s1.get(reverse(LIST_URL) + f'?{param1}&{param2}').content)
assert r['count'] == operator[1]
assert found_recipe[2].id in [x['id'] for x in r['results']]
@@ -235,14 +227,15 @@ def test_search_or_and_not(found_recipe, param_type, operator, recipes, u1_s1, s
assert found_recipe[1].id not in [x['id'] for x in r['results']]
assert found_recipe[2].id not in [x['id'] for x in r['results']]
r = json.loads(u1_s1.get(reverse(LIST_URL) +
f'?{param1_not}&{param2_not}').content)
r = json.loads(u1_s1.get(reverse(LIST_URL) + f'?{param1_not}&{param2_not}').content)
assert r['count'] == 10 + operator[2]
assert found_recipe[2].id not in [x['id'] for x in r['results']]
@pytest.mark.parametrize("found_recipe", [
({'unit': True}),
({
'unit': True
}),
], indirect=['found_recipe'])
def test_search_units(found_recipe, recipes, u1_s1, space_1):
with scope(space=space_1):
@@ -260,24 +253,28 @@ def test_search_units(found_recipe, recipes, u1_s1, space_1):
assert found_recipe[1].id in [x['id'] for x in r['results']]
assert found_recipe[2].id in [x['id'] for x in r['results']]
r = json.loads(u1_s1.get(reverse(LIST_URL) +
f'?{param1}&{param2}').content)
r = json.loads(u1_s1.get(reverse(LIST_URL) + f'?{param1}&{param2}').content)
assert r['count'] == 3
assert found_recipe[2].id in [x['id'] for x in r['results']]
@pytest.mark.skipif(sqlite, reason="requires PostgreSQL")
@pytest.mark.parametrize("user1", itertools.product(
[
('fuzzy_search', True), ('fuzzy_search', False),
('fuzzy_lookups', True), ('fuzzy_lookups', False)
],
[('unaccent', True), ('unaccent', False)]
), indirect=['user1'], ids=str)
@pytest.mark.parametrize(
"user1",
itertools.product([('fuzzy_search', True), ('fuzzy_search', False), ('fuzzy_lookups', True), ('fuzzy_lookups', False)], [('unaccent', True), ('unaccent', False)]),
indirect=['user1'],
ids=str
)
@pytest.mark.parametrize("found_recipe, param_type", [
({'unit': True}, 'unit'),
({'keyword': True}, 'keyword'),
({'food': True}, 'food'),
({
'unit': True
}, 'unit'),
({
'keyword': True
}, 'keyword'),
({
'food': True
}, 'food'),
], indirect=['found_recipe'], ids=str)
def test_fuzzy_lookup(found_recipe, recipes, param_type, user1, space_1):
with scope(space=space_1):
@@ -285,35 +282,50 @@ def test_fuzzy_lookup(found_recipe, recipes, param_type, user1, space_1):
param1 = f"query={user1[3]}"
param2 = f"query={user1[4]}"
r = json.loads(user1[0].get(reverse(list_url) +
f'?{param1}&limit=2').content)
assert len([x['id'] for x in r['results'] if x['id'] in [
found_recipe[3].id, found_recipe[4].id]]) == user1[1]
r = json.loads(user1[0].get(reverse(list_url) + f'?{param1}&limit=2').content)
assert len([x['id'] for x in r['results'] if x['id'] in [found_recipe[3].id, found_recipe[4].id]]) == user1[1]
r = json.loads(user1[0].get(reverse(list_url) + f'?{param2}&limit=10').content)
assert len([x['id'] for x in r['results'] if x['id'] in [found_recipe[3].id, found_recipe[4].id]]) == user1[2]
r = json.loads(user1[0].get(reverse(list_url) +
f'?{param2}&limit=10').content)
assert len([x['id'] for x in r['results'] if x['id'] in [
found_recipe[3].id, found_recipe[4].id]]) == user1[2]
# commenting this out for general use - it is really slow
# it should be run on occasion to ensure everything still works
@pytest.mark.skipif(sqlite and True, reason="requires PostgreSQL")
@pytest.mark.parametrize("user1", itertools.product(
[
('fuzzy_search', True), ('fuzzy_search', False),
('fulltext', True), ('fulltext', False),
('icontains', True), ('icontains', False),
('istartswith', True), ('istartswith', False),
],
[('unaccent', True), ('unaccent', False)]
), indirect=['user1'], ids=str)
@pytest.mark.parametrize("found_recipe", [
({'name': True}),
({'description': True}),
({'instruction': True}),
({'keyword': True}),
({'food': True}),
], indirect=['found_recipe'], ids=str)
@pytest.mark.parametrize(
"user1",
itertools.product([
('fuzzy_search', True),
('fuzzy_search', False),
('fulltext', True),
('fulltext', False),
('icontains', True),
('icontains', False),
('istartswith', True),
('istartswith', False),
], [('unaccent', True), ('unaccent', False)]),
indirect=['user1'],
ids=str
)
@pytest.mark.parametrize(
"found_recipe", [
({
'name': True
}),
({
'description': True
}),
({
'instruction': True
}),
({
'keyword': True
}),
({
'food': True
}),
], indirect=['found_recipe'], ids=str
)
# user array contains: user client, expected count of search, expected count of mispelled search, search string, mispelled search string, user search preferences
def test_search_string(found_recipe, recipes, user1, space_1):
with scope(space=space_1):
@@ -321,28 +333,37 @@ def test_search_string(found_recipe, recipes, user1, space_1):
param2 = f"query={user1[4]}"
r = json.loads(user1[0].get(reverse(LIST_URL) + f'?{param1}').content)
assert len([x['id'] for x in r['results'] if x['id'] in [
found_recipe[0].id, found_recipe[1].id]]) == user1[1]
assert len([x['id'] for x in r['results'] if x['id'] in [found_recipe[0].id, found_recipe[1].id]]) == user1[1]
r = json.loads(user1[0].get(reverse(LIST_URL) + f'?{param2}').content)
assert len([x['id'] for x in r['results'] if x['id'] in [
found_recipe[0].id, found_recipe[1].id]]) == user1[2]
assert len([x['id'] for x in r['results'] if x['id'] in [found_recipe[0].id, found_recipe[1].id]]) == user1[2]
@pytest.mark.parametrize("found_recipe, param_type, result", [
({'viewedon': True}, 'viewedon', (1, 1)),
({'cookedon': True}, 'cookedon', (1, 1)),
# created dates are not filtered by user
({'createdon': True}, 'createdon', (2, 12)),
# updated dates are not filtered by user
({'createdon': True}, 'updatedon', (2, 12)),
], indirect=['found_recipe'])
@pytest.mark.parametrize(
"found_recipe, param_type, result",
[
({
'viewedon': True
}, 'viewedon', (1, 1)),
({
'cookedon': True
}, 'cookedon', (1, 1)),
# created dates are not filtered by user
({
'createdon': True
}, 'createdon', (1, 12)),
# updated dates are not filtered by user
({
'createdon': True
}, 'updatedon', (1, 12))
],
indirect=['found_recipe']
)
def test_search_date(found_recipe, recipes, param_type, result, u1_s1, u2_s1, space_1):
# force updated_at to equal created_at datetime
with scope(space=space_1):
for recipe in Recipe.objects.all():
Recipe.objects.filter(id=recipe.id).update(
updated_at=recipe.created_at)
Recipe.objects.filter(id=recipe.id).update(updated_at=recipe.created_at)
date = (timezone.now() - timedelta(days=15)).strftime("%Y-%m-%d")
param1 = f"?{param_type}={date}"
@@ -355,19 +376,14 @@ def test_search_date(found_recipe, recipes, param_type, result, u1_s1, u2_s1, sp
assert r['count'] == result[1]
assert found_recipe[1].id in [x['id'] for x in r['results']]
# test today's date returns for lte and gte searches
r = json.loads(u2_s1.get(reverse(LIST_URL) + f'{param1}').content)
assert r['count'] == result[0]
assert found_recipe[2].id in [x['id'] for x in r['results']]
r = json.loads(u2_s1.get(reverse(LIST_URL) + f'{param2}').content)
assert r['count'] == result[1]
assert found_recipe[2].id in [x['id'] for x in r['results']]
@pytest.mark.parametrize("found_recipe, param_type", [
({'rating': True}, 'rating'),
({'timescooked': True}, 'timescooked'),
({
'rating': True
}, 'rating'),
({
'timescooked': True
}, 'timescooked'),
], indirect=['found_recipe'])
def test_search_count(found_recipe, recipes, param_type, u1_s1, u2_s1, space_1):
param1 = f'?{param_type}=3'
@@ -385,8 +401,7 @@ def test_search_count(found_recipe, recipes, param_type, u1_s1, u2_s1, space_1):
# test search for not rated/cooked
r = json.loads(u1_s1.get(reverse(LIST_URL) + param3).content)
assert r['count'] == 11
assert (found_recipe[0].id or found_recipe[1].id) not in [
x['id'] for x in r['results']]
assert (found_recipe[0].id or found_recipe[1].id) not in [x['id'] for x in r['results']]
# test matched returns for lte and gte searches
r = json.loads(u2_s1.get(reverse(LIST_URL) + param1).content)