1
0
Fork 0
mirror of https://github.com/mealie-recipes/mealie.git synced 2025-08-05 13:35:23 +02:00

fix: misused update over patch in last_update call (#2168)

* fixed mealplan timeline event task
fixed indentation to only look at one group at a time
changed grumpy update to happy patch

* updated pytest to catch this error

* I don't know how this got past the pre-commit
This commit is contained in:
Michael Genson 2023-02-23 16:10:47 -06:00 committed by GitHub
parent 6a1503d1f6
commit 6418a10428
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 52 additions and 33 deletions

View file

@ -5,7 +5,7 @@ from pydantic import UUID4
from mealie.db.db_setup import session_context
from mealie.repos.all_repositories import get_repositories
from mealie.schema.meal_plan.new_meal import PlanEntryType
from mealie.schema.recipe.recipe import Recipe, RecipeSummary
from mealie.schema.recipe.recipe import RecipeSummary
from mealie.schema.recipe.recipe_timeline_events import (
RecipeTimelineEventCreate,
TimelineEventType,
@ -26,12 +26,6 @@ def create_mealplan_timeline_events(group_id: UUID4 | None = None):
with session_context() as session:
repos = get_repositories(session)
event_bus_service = EventBusService(session=session, group_id=group_id)
timeline_events_to_create: list[RecipeTimelineEventCreate] = []
recipes_to_update: dict[UUID4, RecipeSummary] = {}
recipe_id_to_slug_map: dict[UUID4, str] = {}
if group_id is None:
# if not specified, we check all groups
groups_data = repos.groups.page_all(PaginationQuery(page=1, per_page=-1))
@ -41,6 +35,12 @@ def create_mealplan_timeline_events(group_id: UUID4 | None = None):
group_ids = [group_id]
for group_id in group_ids:
event_bus_service = EventBusService(session=session, group_id=group_id)
timeline_events_to_create: list[RecipeTimelineEventCreate] = []
recipes_to_update: dict[UUID4, RecipeSummary] = {}
recipe_id_to_slug_map: dict[UUID4, str] = {}
mealplans = repos.meals.get_today(group_id)
for mealplan in mealplans:
if not (mealplan.recipe and mealplan.user_id):
@ -92,29 +92,28 @@ def create_mealplan_timeline_events(group_id: UUID4 | None = None):
recipe_id_to_slug_map[mealplan.recipe_id] = mealplan.recipe.slug
if not timeline_events_to_create:
return
if not timeline_events_to_create:
return
# TODO: use bulk operations
for event in timeline_events_to_create:
new_event = repos.recipe_timeline_events.create(event)
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id, # type: ignore
event_type=EventTypes.recipe_updated,
document_data=EventRecipeTimelineEventData(
operation=EventOperation.create,
recipe_slug=recipe_id_to_slug_map[new_event.recipe_id],
recipe_timeline_event_id=new_event.id,
),
)
# TODO: use bulk operations
for event in timeline_events_to_create:
new_event = repos.recipe_timeline_events.create(event)
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id,
event_type=EventTypes.recipe_updated,
document_data=EventRecipeTimelineEventData(
operation=EventOperation.create,
recipe_slug=recipe_id_to_slug_map[new_event.recipe_id],
recipe_timeline_event_id=new_event.id,
),
)
for recipe in recipes_to_update.values():
recipe.last_made = event_time
repos.recipes.update(recipe.slug, recipe.cast(Recipe))
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id, # type: ignore
event_type=EventTypes.recipe_updated,
document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
)
for recipe in recipes_to_update.values():
repos.recipes.patch(recipe.slug, {"last_made": event_time})
event_bus_service.dispatch(
integration_id=DEFAULT_INTEGRATION_ID,
group_id=group_id,
event_type=EventTypes.recipe_updated,
document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
)

View file

@ -25,7 +25,8 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
assert response.status_code == 201
response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token)
recipe = RecipeSummary.parse_obj(response.json())
original_recipe_data: dict = response.json()
recipe = RecipeSummary.parse_obj(original_recipe_data)
recipe_id = recipe.id
assert recipe.last_made is None
@ -57,9 +58,28 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
# make sure the recipe's last made date was updated
response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token)
recipe = RecipeSummary.parse_obj(response.json())
new_recipe_data: dict = response.json()
recipe = RecipeSummary.parse_obj(new_recipe_data)
assert recipe.last_made.date() == date.today() # type: ignore
# make sure nothing else was updated
for data in [original_recipe_data, new_recipe_data]:
data.pop("dateUpdated")
data.pop("updateAt")
data.pop("lastMade")
# instructions ids are generated randomly and aren't consistent between get requests
old_instructions: list[dict] = original_recipe_data.pop("recipeInstructions")
new_instructions: list[dict] = new_recipe_data.pop("recipeInstructions")
assert len(old_instructions) == len(new_instructions)
for old, new in zip(old_instructions, new_instructions, strict=True):
old.pop("id")
new.pop("id")
assert old == new
assert original_recipe_data == new_recipe_data
def test_new_mealplan_event_duplicates(api_client: TestClient, unique_user: TestUser):
recipe_name = random_string(length=25)