mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-08-03 04:25:24 +02:00
feat: Move "on hand" and "last made" to household (#4616)
Co-authored-by: Kuchenpirat <24235032+Kuchenpirat@users.noreply.github.com>
This commit is contained in:
parent
e565b919df
commit
e9892aba89
53 changed files with 1618 additions and 400 deletions
|
@ -1,6 +1,13 @@
|
|||
from datetime import datetime, timezone
|
||||
from uuid import UUID
|
||||
|
||||
from dateutil.parser import parse as parse_dt
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from mealie.db.models.household import HouseholdToRecipe
|
||||
from mealie.schema.recipe.recipe import Recipe
|
||||
from tests.utils import api_routes
|
||||
from tests.utils.factories import random_string
|
||||
from tests.utils.fixture_schemas import TestUser
|
||||
|
||||
|
||||
|
@ -18,3 +25,61 @@ def test_get_household_members(api_client: TestClient, user_tuple: list[TestUser
|
|||
assert str(usr_1.user_id) in all_ids
|
||||
assert str(usr_2.user_id) in all_ids
|
||||
assert str(h2_user.user_id) not in all_ids
|
||||
|
||||
|
||||
def test_get_household_recipe_default(api_client: TestClient, unique_user: TestUser):
|
||||
recipe = unique_user.repos.recipes.create(
|
||||
Recipe(
|
||||
user_id=unique_user.user_id,
|
||||
group_id=UUID(unique_user.group_id),
|
||||
name=random_string(),
|
||||
)
|
||||
)
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe.slug), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["recipeId"] == str(recipe.id)
|
||||
assert response.json()["lastMade"] is None
|
||||
|
||||
|
||||
def test_get_household_recipe(api_client: TestClient, unique_user: TestUser, h2_user: TestUser):
|
||||
dt_now = datetime.now(tz=timezone.utc)
|
||||
recipe = unique_user.repos.recipes.create(
|
||||
Recipe(
|
||||
user_id=unique_user.user_id,
|
||||
group_id=UUID(unique_user.group_id),
|
||||
name=random_string(),
|
||||
)
|
||||
)
|
||||
|
||||
session = unique_user.repos.session
|
||||
session.add(
|
||||
HouseholdToRecipe(
|
||||
session=session,
|
||||
household_id=UUID(unique_user.household_id),
|
||||
recipe_id=recipe.id,
|
||||
last_made=dt_now,
|
||||
)
|
||||
)
|
||||
session.commit()
|
||||
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe.slug), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["recipeId"] == str(recipe.id)
|
||||
assert data["lastMade"]
|
||||
assert parse_dt(data["lastMade"]) == dt_now
|
||||
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe.slug), headers=h2_user.token)
|
||||
assert response.status_code == 200
|
||||
|
||||
h2_data = response.json()
|
||||
assert h2_data["recipeId"] == str(recipe.id)
|
||||
assert h2_data["lastMade"] is None
|
||||
|
||||
|
||||
def test_get_household_recipe_invalid_recipe(api_client: TestClient, unique_user: TestUser):
|
||||
response = api_client.get(
|
||||
api_routes.households_self_recipes_recipe_slug(random_string()), headers=unique_user.token
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from datetime import UTC, datetime
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import pytest
|
||||
from dateutil.parser import parse as parse_dt
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from mealie.schema.cookbook.cookbook import SaveCookBook
|
||||
|
@ -233,28 +234,50 @@ def test_user_can_update_last_made_on_other_household(
|
|||
assert response.status_code == 201
|
||||
h2_recipe = h2_user.repos.recipes.get_one(response.json())
|
||||
assert h2_recipe and h2_recipe.id
|
||||
h2_recipe_id = h2_recipe.id
|
||||
h2_recipe_slug = h2_recipe.slug
|
||||
|
||||
response = api_client.get(api_routes.recipes_slug(h2_recipe_slug), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
recipe = response.json()
|
||||
assert recipe["id"] == str(h2_recipe_id)
|
||||
old_last_made = recipe["lastMade"]
|
||||
dt_1 = datetime.now(tz=UTC)
|
||||
dt_2 = dt_1 + timedelta(days=2)
|
||||
|
||||
now = datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
||||
# set last made for unique_user and make sure it only updates globally and for unique_user
|
||||
response = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(h2_recipe_slug), json={"timestamp": now}, headers=unique_user.token
|
||||
api_routes.recipes_slug_last_made(h2_recipe.slug),
|
||||
json={"timestamp": dt_2.isoformat()},
|
||||
headers=unique_user.token,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# confirm the last made date was updated
|
||||
response = api_client.get(api_routes.recipes_slug(h2_recipe_slug), headers=unique_user.token)
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(h2_recipe_slug), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
recipe = response.json()
|
||||
assert recipe["id"] == str(h2_recipe_id)
|
||||
new_last_made = recipe["lastMade"]
|
||||
assert new_last_made == now != old_last_made
|
||||
assert (last_made_json := response.json()["lastMade"])
|
||||
assert parse_dt(last_made_json) == dt_2
|
||||
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(h2_recipe_slug), headers=h2_user.token)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["lastMade"] is None
|
||||
|
||||
recipe = h2_user.repos.recipes.get_one(h2_recipe_slug)
|
||||
assert recipe
|
||||
assert recipe.last_made == dt_2
|
||||
|
||||
# set last made for h2_user and make sure it only updates globally and for h2_user
|
||||
response = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(h2_recipe.slug), json={"timestamp": dt_1.isoformat()}, headers=h2_user.token
|
||||
)
|
||||
assert response.status_code == 200
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(h2_recipe_slug), headers=h2_user.token)
|
||||
assert response.status_code == 200
|
||||
assert (last_made_json := response.json()["lastMade"])
|
||||
assert parse_dt(last_made_json) == dt_1
|
||||
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(h2_recipe_slug), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
assert (last_made_json := response.json()["lastMade"])
|
||||
assert parse_dt(last_made_json) == dt_2
|
||||
|
||||
# this shouldn't have updated since dt_2 is newer than dt_1
|
||||
recipe = h2_user.repos.recipes.get_one(h2_recipe_slug)
|
||||
assert recipe
|
||||
assert recipe.last_made == dt_2
|
||||
|
||||
|
||||
def test_cookbook_recipes_includes_all_households(api_client: TestClient, unique_user: TestUser, h2_user: TestUser):
|
||||
|
|
|
@ -14,14 +14,32 @@ from tests.utils.fixture_schemas import TestUser
|
|||
|
||||
|
||||
def create_food(user: TestUser, on_hand: bool = False):
|
||||
if on_hand:
|
||||
household = user.repos.households.get_by_slug_or_id(user.household_id)
|
||||
assert household
|
||||
households = [household.slug]
|
||||
else:
|
||||
households = []
|
||||
|
||||
return user.repos.ingredient_foods.create(
|
||||
SaveIngredientFood(id=uuid4(), name=random_string(), group_id=user.group_id, on_hand=on_hand)
|
||||
SaveIngredientFood(
|
||||
id=uuid4(), name=random_string(), group_id=user.group_id, households_with_ingredient_food=households
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_tool(user: TestUser, on_hand: bool = False):
|
||||
if on_hand:
|
||||
household = user.repos.households.get_by_slug_or_id(user.household_id)
|
||||
assert household
|
||||
households = [household.slug]
|
||||
else:
|
||||
households = []
|
||||
|
||||
return user.repos.tools.create(
|
||||
RecipeToolSave(id=uuid4(), name=random_string(), group_id=user.group_id, on_hand=on_hand)
|
||||
RecipeToolSave(
|
||||
id=uuid4(), name=random_string(), group_id=user.group_id, on_hand=on_hand, households_with_tool=households
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
@ -568,7 +586,7 @@ def test_include_cross_household_recipes(api_client: TestClient, unique_user: Te
|
|||
try:
|
||||
response = api_client.get(
|
||||
api_routes.recipes_suggestions,
|
||||
params={"maxMissingFoods": 0, "foods": [str(known_food.id)], "includeCrossHousehold": True},
|
||||
params={"maxMissingFoods": 0, "foods": [str(known_food.id)]},
|
||||
headers=h2_user.token,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
@ -579,3 +597,61 @@ def test_include_cross_household_recipes(api_client: TestClient, unique_user: Te
|
|||
finally:
|
||||
unique_user.repos.recipes.delete(recipe.slug)
|
||||
h2_user.repos.recipes.delete(other_recipe.slug)
|
||||
|
||||
|
||||
def test_respect_cross_household_on_hand_food(api_client: TestClient, unique_user: TestUser, h2_user: TestUser):
|
||||
on_hand_food = create_food(unique_user, on_hand=True) # only on-hand for unique_user
|
||||
other_food = create_food(unique_user)
|
||||
|
||||
recipe = create_recipe(unique_user, foods=[on_hand_food, other_food])
|
||||
try:
|
||||
response = api_client.get(
|
||||
api_routes.recipes_suggestions,
|
||||
params={"maxMissingFoods": 0, "foods": [str(other_food.id)]},
|
||||
headers=unique_user.token,
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
assert len(data["items"]) == 1
|
||||
assert data["items"][0]["recipe"]["id"] == str(recipe.id)
|
||||
|
||||
response = api_client.get(
|
||||
api_routes.recipes_suggestions,
|
||||
params={"maxMissingFoods": 0, "foods": [str(other_food.id)]},
|
||||
headers=h2_user.token,
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
assert len(data["items"]) == 0
|
||||
|
||||
finally:
|
||||
unique_user.repos.recipes.delete(recipe.slug)
|
||||
|
||||
|
||||
def test_respect_cross_household_on_hand_tool(api_client: TestClient, unique_user: TestUser, h2_user: TestUser):
|
||||
on_hand_tool = create_tool(unique_user, on_hand=True) # only on-hand for unique_user
|
||||
other_tool = create_tool(unique_user)
|
||||
|
||||
recipe = create_recipe(unique_user, tools=[on_hand_tool, other_tool])
|
||||
try:
|
||||
response = api_client.get(
|
||||
api_routes.recipes_suggestions,
|
||||
params={"maxMissingTools": 0, "tools": [str(other_tool.id)]},
|
||||
headers=unique_user.token,
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
assert len(data["items"]) == 1
|
||||
assert data["items"][0]["recipe"]["id"] == str(recipe.id)
|
||||
|
||||
response = api_client.get(
|
||||
api_routes.recipes_suggestions,
|
||||
params={"maxMissingTools": 0, "tools": [str(other_tool.id)]},
|
||||
headers=h2_user.token,
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
assert len(data["items"]) == 0
|
||||
|
||||
finally:
|
||||
unique_user.repos.recipes.delete(recipe.slug)
|
||||
|
|
|
@ -33,6 +33,7 @@ from mealie.schema.response.pagination import (
|
|||
OrderDirection,
|
||||
PaginationQuery,
|
||||
)
|
||||
from mealie.schema.user.user import UserRatingUpdate
|
||||
from mealie.services.seeder.seeder_service import SeederService
|
||||
from tests.utils import api_routes
|
||||
from tests.utils.factories import random_int, random_string
|
||||
|
@ -1320,3 +1321,105 @@ def test_pagination_filter_nested(api_client: TestClient, user_tuple: list[TestU
|
|||
recipe_id = event_data["recipeId"]
|
||||
assert recipe_id in recipe_ids[i]
|
||||
assert recipe_id not in recipe_ids[(i + 1) % len(user_tuple)]
|
||||
|
||||
|
||||
def test_pagination_filter_by_custom_last_made(api_client: TestClient, unique_user: TestUser, h2_user: TestUser):
|
||||
recipe_1, recipe_2 = (
|
||||
unique_user.repos.recipes.create(
|
||||
Recipe(user_id=unique_user.user_id, group_id=unique_user.group_id, name=random_string())
|
||||
)
|
||||
for _ in range(2)
|
||||
)
|
||||
dt_1 = "2023-02-25"
|
||||
dt_2 = "2023-03-25"
|
||||
|
||||
r = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(recipe_1.slug),
|
||||
json={"timestamp": dt_1},
|
||||
headers=unique_user.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
r = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(recipe_2.slug),
|
||||
json={"timestamp": dt_2},
|
||||
headers=unique_user.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
r = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(recipe_1.slug),
|
||||
json={"timestamp": dt_2},
|
||||
headers=h2_user.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
r = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(recipe_2.slug),
|
||||
json={"timestamp": dt_1},
|
||||
headers=h2_user.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
params = {"page": 1, "perPage": -1, "queryFilter": "lastMade > 2023-03-01"}
|
||||
|
||||
# User 1 should fetch Recipe 2
|
||||
response = api_client.get(api_routes.recipes, params=params, headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
recipes_data = response.json()["items"]
|
||||
assert len(recipes_data) == 1
|
||||
assert recipes_data[0]["id"] == str(recipe_2.id)
|
||||
|
||||
# User 2 should fetch Recipe 1
|
||||
response = api_client.get(api_routes.recipes, params=params, headers=h2_user.token)
|
||||
assert response.status_code == 200
|
||||
recipes_data = response.json()["items"]
|
||||
assert len(recipes_data) == 1
|
||||
assert recipes_data[0]["id"] == str(recipe_1.id)
|
||||
|
||||
|
||||
def test_pagination_filter_by_custom_rating(api_client: TestClient, user_tuple: list[TestUser]):
|
||||
user_1, user_2 = user_tuple
|
||||
recipe_1, recipe_2 = (
|
||||
user_1.repos.recipes.create(Recipe(user_id=user_1.user_id, group_id=user_1.group_id, name=random_string()))
|
||||
for _ in range(2)
|
||||
)
|
||||
|
||||
r = api_client.post(
|
||||
api_routes.users_id_ratings_slug(user_1.user_id, recipe_1.slug),
|
||||
json=UserRatingUpdate(rating=5).model_dump(),
|
||||
headers=user_1.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
r = api_client.post(
|
||||
api_routes.users_id_ratings_slug(user_1.user_id, recipe_2.slug),
|
||||
json=UserRatingUpdate(rating=1).model_dump(),
|
||||
headers=user_1.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
r = api_client.post(
|
||||
api_routes.users_id_ratings_slug(user_2.user_id, recipe_1.slug),
|
||||
json=UserRatingUpdate(rating=1).model_dump(),
|
||||
headers=user_2.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
r = api_client.post(
|
||||
api_routes.users_id_ratings_slug(user_2.user_id, recipe_2.slug),
|
||||
json=UserRatingUpdate(rating=5).model_dump(),
|
||||
headers=user_2.token,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
qf = "rating > 3"
|
||||
params = {"page": 1, "perPage": -1, "queryFilter": qf}
|
||||
|
||||
# User 1 should fetch Recipe 1
|
||||
response = api_client.get(api_routes.recipes, params=params, headers=user_1.token)
|
||||
assert response.status_code == 200
|
||||
recipes_data = response.json()["items"]
|
||||
assert len(recipes_data) == 1
|
||||
assert recipes_data[0]["id"] == str(recipe_1.id)
|
||||
|
||||
# User 2 should fetch Recipe 2
|
||||
response = api_client.get(api_routes.recipes, params=params, headers=user_2.token)
|
||||
assert response.status_code == 200
|
||||
recipes_data = response.json()["items"]
|
||||
assert len(recipes_data) == 1
|
||||
assert recipes_data[0]["id"] == str(recipe_2.id)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from datetime import UTC, datetime
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import cast
|
||||
from uuid import UUID
|
||||
|
||||
|
@ -8,7 +8,7 @@ from sqlalchemy.orm import Session
|
|||
from mealie.repos.all_repositories import get_repositories
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.repos.repository_recipes import RepositoryRecipes
|
||||
from mealie.schema.household.household import HouseholdCreate
|
||||
from mealie.schema.household.household import HouseholdCreate, HouseholdRecipeCreate
|
||||
from mealie.schema.recipe import RecipeIngredient, SaveIngredientFood
|
||||
from mealie.schema.recipe.recipe import Recipe, RecipeCategory, RecipeSummary
|
||||
from mealie.schema.recipe.recipe_category import CategoryOut, CategorySave, TagSave
|
||||
|
@ -706,6 +706,63 @@ def test_random_order_recipe_search(
|
|||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
|
||||
|
||||
def test_order_by_last_made(unique_user: TestUser, h2_user: TestUser):
|
||||
dt_1 = datetime.now(UTC)
|
||||
dt_2 = dt_1 + timedelta(days=2)
|
||||
|
||||
recipe_1, recipe_2 = (
|
||||
unique_user.repos.recipes.create(
|
||||
Recipe(user_id=unique_user.user_id, group_id=unique_user.group_id, name=random_string())
|
||||
)
|
||||
for _ in range(2)
|
||||
)
|
||||
|
||||
# In ascending order:
|
||||
# unique_user: recipe_1, recipe_2
|
||||
# h2_user: recipe_2, recipe_1
|
||||
unique_user.repos.household_recipes.create(
|
||||
HouseholdRecipeCreate(recipe_id=recipe_1.id, household_id=unique_user.household_id, last_made=dt_1)
|
||||
)
|
||||
h2_user.repos.household_recipes.create(
|
||||
HouseholdRecipeCreate(recipe_id=recipe_1.id, household_id=h2_user.household_id, last_made=dt_2)
|
||||
)
|
||||
unique_user.repos.household_recipes.create(
|
||||
HouseholdRecipeCreate(recipe_id=recipe_2.id, household_id=unique_user.household_id, last_made=dt_2)
|
||||
)
|
||||
h2_user.repos.household_recipes.create(
|
||||
HouseholdRecipeCreate(recipe_id=recipe_2.id, household_id=h2_user.household_id, last_made=dt_1)
|
||||
)
|
||||
|
||||
h1_recipes = get_repositories(
|
||||
unique_user.repos.session, group_id=unique_user.group_id, household_id=None
|
||||
).recipes.by_user(unique_user.user_id)
|
||||
h2_recipes = get_repositories(h2_user.repos.session, group_id=h2_user.group_id, household_id=None).recipes.by_user(
|
||||
h2_user.user_id
|
||||
)
|
||||
|
||||
h1_query = h1_recipes.page_all(
|
||||
PaginationQuery(
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="last_made",
|
||||
order_direction=OrderDirection.asc,
|
||||
query_filter=f"id IN [{recipe_1.id}, {recipe_2.id}]",
|
||||
)
|
||||
)
|
||||
assert [item.id for item in h1_query.items] == [recipe_1.id, recipe_2.id]
|
||||
|
||||
h2_query = h2_recipes.page_all(
|
||||
PaginationQuery(
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="lastMade",
|
||||
order_direction=OrderDirection.asc,
|
||||
query_filter=f"id IN [{recipe_1.id}, {recipe_2.id}]",
|
||||
)
|
||||
)
|
||||
assert [item.id for item in h2_query.items] == [recipe_2.id, recipe_1.id]
|
||||
|
||||
|
||||
def test_order_by_rating(user_tuple: tuple[TestUser, TestUser]):
|
||||
user_1, user_2 = user_tuple
|
||||
database = user_1.repos
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import filecmp
|
||||
import statistics
|
||||
from pathlib import Path
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
import tests.data as test_data
|
||||
|
@ -12,11 +11,14 @@ from mealie.db.db_setup import session_context
|
|||
from mealie.db.models._model_utils.guid import GUID
|
||||
from mealie.db.models.group import Group
|
||||
from mealie.db.models.household.cookbook import CookBook
|
||||
from mealie.db.models.household.household import Household
|
||||
from mealie.db.models.household.household_to_recipe import HouseholdToRecipe
|
||||
from mealie.db.models.household.mealplan import GroupMealPlanRules
|
||||
from mealie.db.models.household.shopping_list import ShoppingList
|
||||
from mealie.db.models.labels import MultiPurposeLabel
|
||||
from mealie.db.models.recipe.ingredient import IngredientFoodModel, IngredientUnitModel
|
||||
from mealie.db.models.recipe.recipe import RecipeModel
|
||||
from mealie.db.models.recipe.tool import Tool
|
||||
from mealie.db.models.users.user_to_recipe import UserToRecipe
|
||||
from mealie.db.models.users.users import User
|
||||
from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter
|
||||
|
@ -74,30 +76,148 @@ def test_database_restore():
|
|||
assert snapshop_1[s1].sort(key=dict_sorter) == snapshop_2[s2].sort(key=dict_sorter)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"backup_path",
|
||||
[
|
||||
test_data.backup_version_44e8d670719d_1,
|
||||
test_data.backup_version_44e8d670719d_2,
|
||||
test_data.backup_version_44e8d670719d_3,
|
||||
test_data.backup_version_44e8d670719d_4,
|
||||
test_data.backup_version_ba1e4a6cfe99_1,
|
||||
test_data.backup_version_bcfdad6b7355_1,
|
||||
test_data.backup_version_09aba125b57a_1,
|
||||
test_data.backup_version_86054b40fd06_1,
|
||||
],
|
||||
ids=[
|
||||
"44e8d670719d_1: add extras to shopping lists, list items, and ingredient foods",
|
||||
"44e8d670719d_2: add extras to shopping lists, list items, and ingredient foods",
|
||||
"44e8d670719d_3: add extras to shopping lists, list items, and ingredient foods",
|
||||
"44e8d670719d_4: add extras to shopping lists, list items, and ingredient foods",
|
||||
"bcfdad6b7355_1: remove tool name and slug unique contraints",
|
||||
"ba1e4a6cfe99_1: added plural names and alias tables for foods and units",
|
||||
"09aba125b57a_1: add OIDC auth method (Safari-mangled ZIP structure)",
|
||||
"86054b40fd06_1: added query_filter_string to cookbook and mealplan",
|
||||
],
|
||||
)
|
||||
def test_database_restore_data(backup_path: Path):
|
||||
def _5ab195a474eb_add_normalized_search_properties(session: Session):
|
||||
recipes = session.query(RecipeModel).all()
|
||||
|
||||
for recipe in recipes:
|
||||
if recipe.name:
|
||||
assert recipe.name_normalized
|
||||
if recipe.description:
|
||||
assert recipe.description_normalized
|
||||
|
||||
for ingredient in recipe.recipe_ingredient:
|
||||
if ingredient.note:
|
||||
assert ingredient.note_normalized
|
||||
if ingredient.original_text:
|
||||
assert ingredient.original_text_normalized
|
||||
|
||||
|
||||
def _b04a08da2108_added_shopping_list_label_settings(session: Session):
|
||||
shopping_lists = session.query(ShoppingList).all()
|
||||
labels = session.query(MultiPurposeLabel).all()
|
||||
|
||||
for shopping_list in shopping_lists:
|
||||
group_labels = [label for label in labels if label.group_id == shopping_list.group_id]
|
||||
assert len(shopping_list.label_settings) == len(group_labels)
|
||||
for label_setting, label in zip(
|
||||
sorted(shopping_list.label_settings, key=lambda x: x.label.id),
|
||||
sorted(group_labels, key=lambda x: x.id),
|
||||
strict=True,
|
||||
):
|
||||
assert label_setting.label == label
|
||||
|
||||
|
||||
def _04ac51cbe9a4_added_group_slug(session: Session):
|
||||
groups = session.query(Group).all()
|
||||
|
||||
for group in groups:
|
||||
assert group.slug
|
||||
|
||||
|
||||
def _0341b154f79a_added_normalized_unit_and_food_names(session: Session):
|
||||
foods = session.query(IngredientFoodModel).all()
|
||||
units = session.query(IngredientUnitModel).all()
|
||||
|
||||
for food in foods:
|
||||
if food.name:
|
||||
assert food.name_normalized
|
||||
|
||||
for unit in units:
|
||||
assert unit.name_normalized
|
||||
if unit.abbreviation:
|
||||
assert unit.abbreviation_normalized
|
||||
|
||||
|
||||
def _d7c6efd2de42_migrate_favorites_and_ratings_to_user_ratings(session: Session):
|
||||
recipes = session.query(RecipeModel).all()
|
||||
|
||||
users_by_group_id: dict[GUID, list[User]] = {}
|
||||
for recipe in recipes:
|
||||
users = users_by_group_id.get(recipe.group_id)
|
||||
if users is None:
|
||||
users = session.query(User).filter(User.group_id == recipe.group_id).all()
|
||||
users_by_group_id[recipe.group_id] = users
|
||||
|
||||
user_to_recipes = session.query(UserToRecipe).filter(UserToRecipe.recipe_id == recipe.id).all()
|
||||
user_ratings = [x.rating for x in user_to_recipes if x.rating]
|
||||
assert recipe.rating == (statistics.mean(user_ratings) if user_ratings else None)
|
||||
|
||||
|
||||
def _86054b40fd06_added_query_filter_string_to_cookbook_and_mealplan(session: Session):
|
||||
cookbooks = session.query(CookBook).all()
|
||||
mealplan_rules = session.query(GroupMealPlanRules).all()
|
||||
|
||||
for cookbook in cookbooks:
|
||||
parts = []
|
||||
if cookbook.categories:
|
||||
relop = "CONTAINS ALL" if cookbook.require_all_categories else "IN"
|
||||
vals = ",".join([f'"{cat.id}"' for cat in cookbook.categories])
|
||||
parts.append(f"recipe_category.id {relop} [{vals}]")
|
||||
if cookbook.tags:
|
||||
relop = "CONTAINS ALL" if cookbook.require_all_tags else "IN"
|
||||
vals = ",".join([f'"{tag.id}"' for tag in cookbook.tags])
|
||||
parts.append(f"tags.id {relop} [{vals}]")
|
||||
if cookbook.tools:
|
||||
relop = "CONTAINS ALL" if cookbook.require_all_tools else "IN"
|
||||
vals = ",".join([f'"{tool.id}"' for tool in cookbook.tools])
|
||||
parts.append(f"tools.id {relop} [{vals}]")
|
||||
|
||||
expected_query_filter_string = " AND ".join(parts)
|
||||
assert cookbook.query_filter_string == expected_query_filter_string
|
||||
|
||||
for rule in mealplan_rules:
|
||||
parts = []
|
||||
if rule.categories:
|
||||
vals = ",".join([f'"{cat.id}"' for cat in rule.categories])
|
||||
parts.append(f"recipe_category.id CONTAINS ALL [{vals}]")
|
||||
if rule.tags:
|
||||
vals = ",".join([f'"{tag.id}"' for tag in rule.tags])
|
||||
parts.append(f"tags.id CONTAINS ALL [{vals}]")
|
||||
if rule.households:
|
||||
vals = ",".join([f'"{household.id}"' for household in rule.households])
|
||||
parts.append(f"household_id IN [{vals}]")
|
||||
|
||||
expected_query_filter_string = " AND ".join(parts)
|
||||
assert rule.query_filter_string == expected_query_filter_string
|
||||
|
||||
|
||||
def _b9e516e2d3b3_add_household_to_recipe_last_made_household_to_foods_and_tools(session: Session):
|
||||
groups = session.query(Group).all()
|
||||
|
||||
for group in groups:
|
||||
households = session.query(Household).filter(Household.group_id == group.id).all()
|
||||
household_ids = {household.id for household in households}
|
||||
recipes = session.query(RecipeModel).filter(RecipeModel.group_id == group.id).all()
|
||||
for recipe in recipes:
|
||||
for household in households:
|
||||
household_to_recipe = (
|
||||
session.query(HouseholdToRecipe)
|
||||
.filter(HouseholdToRecipe.recipe_id == recipe.id, HouseholdToRecipe.household_id == household.id)
|
||||
.one_or_none()
|
||||
)
|
||||
|
||||
if recipe.last_made:
|
||||
assert household_to_recipe
|
||||
assert household_to_recipe.last_made == recipe.last_made
|
||||
else:
|
||||
assert not household_to_recipe
|
||||
|
||||
foods = session.query(IngredientFoodModel).filter(IngredientFoodModel.group_id == group.id).all()
|
||||
for food in foods:
|
||||
if food.on_hand:
|
||||
assert {hh.id for hh in food.households_with_ingredient_food} == household_ids
|
||||
else:
|
||||
assert not food.households_with_ingredient_food
|
||||
|
||||
tools = session.query(Tool).filter(Tool.group_id == group.id).all()
|
||||
for tool in tools:
|
||||
if tool.on_hand:
|
||||
assert {hh.id for hh in tool.households_with_tool} == household_ids
|
||||
else:
|
||||
assert not tool.households_with_tool
|
||||
|
||||
|
||||
def test_database_restore_data():
|
||||
"""
|
||||
This tests real user backups to make sure the data is restored correctly. The data has been anonymized, but
|
||||
relationships and data types should be preserved.
|
||||
|
@ -106,114 +226,45 @@ def test_database_restore_data(backup_path: Path):
|
|||
If a new migration is added that does any sort of data manipulation, this test should be updated.
|
||||
"""
|
||||
|
||||
backup_paths = [
|
||||
test_data.backup_version_44e8d670719d_1,
|
||||
test_data.backup_version_44e8d670719d_2,
|
||||
test_data.backup_version_44e8d670719d_3,
|
||||
test_data.backup_version_44e8d670719d_4,
|
||||
test_data.backup_version_ba1e4a6cfe99_1,
|
||||
test_data.backup_version_bcfdad6b7355_1,
|
||||
test_data.backup_version_09aba125b57a_1,
|
||||
test_data.backup_version_86054b40fd06_1,
|
||||
]
|
||||
|
||||
migration_funcs = [
|
||||
_5ab195a474eb_add_normalized_search_properties,
|
||||
_b04a08da2108_added_shopping_list_label_settings,
|
||||
_04ac51cbe9a4_added_group_slug,
|
||||
_0341b154f79a_added_normalized_unit_and_food_names,
|
||||
_d7c6efd2de42_migrate_favorites_and_ratings_to_user_ratings,
|
||||
_86054b40fd06_added_query_filter_string_to_cookbook_and_mealplan,
|
||||
_b9e516e2d3b3_add_household_to_recipe_last_made_household_to_foods_and_tools,
|
||||
]
|
||||
|
||||
settings = get_app_settings()
|
||||
backup_v2 = BackupV2(settings.DB_URL)
|
||||
|
||||
# create a backup of the existing data so we can restore it later
|
||||
original_data_backup = backup_v2.backup()
|
||||
|
||||
try:
|
||||
assert backup_path.exists()
|
||||
backup_v2.restore(backup_path)
|
||||
for backup_path in backup_paths:
|
||||
assert backup_path.exists()
|
||||
backup_v2.restore(backup_path)
|
||||
|
||||
# make sure migrations populated data successfully
|
||||
with session_context() as session:
|
||||
session = cast(Session, session)
|
||||
|
||||
groups = session.query(Group).all()
|
||||
recipes = session.query(RecipeModel).all()
|
||||
shopping_lists = session.query(ShoppingList).all()
|
||||
labels = session.query(MultiPurposeLabel).all()
|
||||
|
||||
foods = session.query(IngredientFoodModel).all()
|
||||
units = session.query(IngredientUnitModel).all()
|
||||
|
||||
cookbooks = session.query(CookBook).all()
|
||||
mealplan_rules = session.query(GroupMealPlanRules).all()
|
||||
|
||||
# 2023-02-14-20.45.41_5ab195a474eb_add_normalized_search_properties
|
||||
for recipe in recipes:
|
||||
if recipe.name:
|
||||
assert recipe.name_normalized
|
||||
if recipe.description:
|
||||
assert recipe.description_normalized
|
||||
|
||||
for ingredient in recipe.recipe_ingredient:
|
||||
if ingredient.note:
|
||||
assert ingredient.note_normalized
|
||||
if ingredient.original_text:
|
||||
assert ingredient.original_text_normalized
|
||||
|
||||
# 2023-02-21-22.03.19_b04a08da2108_added_shopping_list_label_settings
|
||||
for shopping_list in shopping_lists:
|
||||
group_labels = [label for label in labels if label.group_id == shopping_list.group_id]
|
||||
assert len(shopping_list.label_settings) == len(group_labels)
|
||||
for label_setting, label in zip(
|
||||
sorted(shopping_list.label_settings, key=lambda x: x.label.id),
|
||||
sorted(group_labels, key=lambda x: x.id),
|
||||
strict=True,
|
||||
):
|
||||
assert label_setting.label == label
|
||||
|
||||
# 2023-08-06-21.00.34_04ac51cbe9a4_added_group_slug
|
||||
for group in groups:
|
||||
assert group.slug
|
||||
|
||||
# 2023-09-01-14.55.42_0341b154f79a_added_normalized_unit_and_food_names
|
||||
for food in foods:
|
||||
if food.name:
|
||||
assert food.name_normalized
|
||||
|
||||
for unit in units:
|
||||
assert unit.name_normalized
|
||||
if unit.abbreviation:
|
||||
assert unit.abbreviation_normalized
|
||||
|
||||
# 2024-03-18-02.28.15_d7c6efd2de42_migrate_favorites_and_ratings_to_user_ratings
|
||||
users_by_group_id: dict[GUID, list[User]] = {}
|
||||
for recipe in recipes:
|
||||
users = users_by_group_id.get(recipe.group_id)
|
||||
if users is None:
|
||||
users = session.query(User).filter(User.group_id == recipe.group_id).all()
|
||||
users_by_group_id[recipe.group_id] = users
|
||||
|
||||
user_to_recipes = session.query(UserToRecipe).filter(UserToRecipe.recipe_id == recipe.id).all()
|
||||
user_ratings = [x.rating for x in user_to_recipes if x.rating]
|
||||
assert recipe.rating == (statistics.mean(user_ratings) if user_ratings else None)
|
||||
|
||||
# 2024-10-08-21.17.31_86054b40fd06_added_query_filter_string_to_cookbook_and_mealplan
|
||||
for cookbook in cookbooks:
|
||||
parts = []
|
||||
if cookbook.categories:
|
||||
relop = "CONTAINS ALL" if cookbook.require_all_categories else "IN"
|
||||
vals = ",".join([f'"{cat.id}"' for cat in cookbook.categories])
|
||||
parts.append(f"recipe_category.id {relop} [{vals}]")
|
||||
if cookbook.tags:
|
||||
relop = "CONTAINS ALL" if cookbook.require_all_tags else "IN"
|
||||
vals = ",".join([f'"{tag.id}"' for tag in cookbook.tags])
|
||||
parts.append(f"tags.id {relop} [{vals}]")
|
||||
if cookbook.tools:
|
||||
relop = "CONTAINS ALL" if cookbook.require_all_tools else "IN"
|
||||
vals = ",".join([f'"{tool.id}"' for tool in cookbook.tools])
|
||||
parts.append(f"tools.id {relop} [{vals}]")
|
||||
|
||||
expected_query_filter_string = " AND ".join(parts)
|
||||
assert cookbook.query_filter_string == expected_query_filter_string
|
||||
|
||||
for rule in mealplan_rules:
|
||||
parts = []
|
||||
if rule.categories:
|
||||
vals = ",".join([f'"{cat.id}"' for cat in rule.categories])
|
||||
parts.append(f"recipe_category.id CONTAINS ALL [{vals}]")
|
||||
if rule.tags:
|
||||
vals = ",".join([f'"{tag.id}"' for tag in rule.tags])
|
||||
parts.append(f"tags.id CONTAINS ALL [{vals}]")
|
||||
if rule.households:
|
||||
vals = ",".join([f'"{household.id}"' for household in rule.households])
|
||||
parts.append(f"household_id IN [{vals}]")
|
||||
|
||||
expected_query_filter_string = " AND ".join(parts)
|
||||
assert rule.query_filter_string == expected_query_filter_string
|
||||
with session_context() as session:
|
||||
for migration_func in migration_funcs:
|
||||
try:
|
||||
migration_func(session)
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise Exception(
|
||||
f'Migration "{migration_func.__name__}" failed on backup "{backup_path}"'
|
||||
) from e
|
||||
|
||||
finally:
|
||||
backup_v2.restore(original_data_backup)
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from dateutil.parser import parse as parse_dt
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import UUID4
|
||||
|
||||
from mealie.schema.household.household import HouseholdRecipeSummary
|
||||
from mealie.schema.meal_plan.new_meal import CreatePlanEntry
|
||||
from mealie.schema.recipe.recipe import RecipeSummary
|
||||
from mealie.schema.recipe.recipe import RecipeLastMade, RecipeSummary
|
||||
from mealie.services.scheduler.tasks.create_timeline_events import create_mealplan_timeline_events
|
||||
from tests import utils
|
||||
from tests.utils import api_routes
|
||||
from tests.utils.factories import random_int, random_string
|
||||
from tests.utils.fixture_schemas import TestUser
|
||||
|
@ -17,7 +18,7 @@ def test_no_mealplans():
|
|||
create_mealplan_timeline_events()
|
||||
|
||||
|
||||
def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
|
||||
def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser, h2_user: TestUser):
|
||||
recipe_name = random_string(length=25)
|
||||
response = api_client.post(api_routes.recipes, json={"name": recipe_name}, headers=unique_user.token)
|
||||
assert response.status_code == 201
|
||||
|
@ -65,7 +66,7 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
|
|||
response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token)
|
||||
new_recipe_data: dict = response.json()
|
||||
recipe = RecipeSummary.model_validate(new_recipe_data)
|
||||
assert recipe.last_made.date() == datetime.now(UTC).date() # type: ignore
|
||||
assert recipe.last_made and recipe.last_made.date() == datetime.now(UTC).date()
|
||||
|
||||
# make sure nothing else was updated
|
||||
for data in [original_recipe_data, new_recipe_data]:
|
||||
|
@ -85,6 +86,19 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
|
|||
|
||||
assert original_recipe_data == new_recipe_data
|
||||
|
||||
# make sure the user's last made date was updated
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe_name), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert response_json["lastMade"]
|
||||
assert parse_dt(response_json["lastMade"]).date() == datetime.now(UTC).date()
|
||||
|
||||
# make sure the other user's last made date was not updated
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe_name), headers=h2_user.token)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert response_json["lastMade"] is None
|
||||
|
||||
|
||||
def test_new_mealplan_event_duplicates(api_client: TestClient, unique_user: TestUser):
|
||||
recipe_name = random_string(length=25)
|
||||
|
@ -191,7 +205,7 @@ def test_new_mealplan_events_with_multiple_recipes(api_client: TestClient, uniqu
|
|||
assert len(response_json["items"]) == target_count
|
||||
|
||||
|
||||
def test_preserve_future_made_date(api_client: TestClient, unique_user: TestUser):
|
||||
def test_preserve_future_made_date(api_client: TestClient, unique_user: TestUser, h2_user: TestUser):
|
||||
recipe_name = random_string(length=25)
|
||||
response = api_client.post(api_routes.recipes, json={"name": recipe_name}, headers=unique_user.token)
|
||||
assert response.status_code == 201
|
||||
|
@ -201,12 +215,22 @@ def test_preserve_future_made_date(api_client: TestClient, unique_user: TestUser
|
|||
recipe_id = str(recipe.id)
|
||||
|
||||
future_dt = datetime.now(UTC) + timedelta(days=random_int(1, 10))
|
||||
recipe.last_made = future_dt
|
||||
response = api_client.put(
|
||||
api_routes.recipes_slug(recipe.slug), json=utils.jsonify(recipe), headers=unique_user.token
|
||||
response = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(recipe.slug),
|
||||
data=RecipeLastMade(timestamp=future_dt).model_dump_json(),
|
||||
headers=unique_user.token,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# verify the last made date was updated only on unique_user
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe.slug), headers=unique_user.token)
|
||||
household_recipe = HouseholdRecipeSummary.model_validate(response.json())
|
||||
assert household_recipe.last_made == future_dt
|
||||
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe.slug), headers=h2_user.token)
|
||||
household_recipe = HouseholdRecipeSummary.model_validate(response.json())
|
||||
assert household_recipe.last_made is None
|
||||
|
||||
new_plan = CreatePlanEntry(date=datetime.now(UTC).date(), entry_type="dinner", recipe_id=recipe_id).model_dump(
|
||||
by_alias=True
|
||||
)
|
||||
|
@ -216,9 +240,14 @@ def test_preserve_future_made_date(api_client: TestClient, unique_user: TestUser
|
|||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
assert response.status_code == 201
|
||||
|
||||
# run the task and make sure the recipe's last made date was not updated
|
||||
# run the task and make sure the recipe's last made date was not updated for either user
|
||||
create_mealplan_timeline_events()
|
||||
|
||||
response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token)
|
||||
recipe = RecipeSummary.model_validate(response.json())
|
||||
assert recipe.last_made == future_dt
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe.slug), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
household_recipe = HouseholdRecipeSummary.model_validate(response.json())
|
||||
assert household_recipe.last_made == future_dt
|
||||
|
||||
response = api_client.get(api_routes.households_self_recipes_recipe_slug(recipe.slug), headers=h2_user.token)
|
||||
household_recipe = HouseholdRecipeSummary.model_validate(response.json())
|
||||
assert household_recipe.last_made is None
|
||||
|
|
|
@ -370,6 +370,11 @@ def households_recipe_actions_item_id_trigger_recipe_slug(item_id, recipe_slug):
|
|||
return f"{prefix}/households/recipe-actions/{item_id}/trigger/{recipe_slug}"
|
||||
|
||||
|
||||
def households_self_recipes_recipe_slug(recipe_slug):
|
||||
"""`/api/households/self/recipes/{recipe_slug}`"""
|
||||
return f"{prefix}/households/self/recipes/{recipe_slug}"
|
||||
|
||||
|
||||
def households_shopping_items_item_id(item_id):
|
||||
"""`/api/households/shopping/items/{item_id}`"""
|
||||
return f"{prefix}/households/shopping/items/{item_id}"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue