1
0
Fork 0
mirror of https://github.com/mealie-recipes/mealie.git synced 2025-08-05 21:45:25 +02:00

feat: Add Households to Mealie (#3970)

This commit is contained in:
Michael Genson 2024-08-22 10:14:32 -05:00 committed by GitHub
parent 0c29cef17d
commit eb170cc7e5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
315 changed files with 6975 additions and 3577 deletions

View file

@ -11,7 +11,7 @@ from mealie.core.config import get_app_settings
from mealie.db.db_setup import session_context
from mealie.db.models._model_utils.guid import GUID
from mealie.db.models.group import Group
from mealie.db.models.group.shopping_list import ShoppingList
from mealie.db.models.household.shopping_list import ShoppingList
from mealie.db.models.labels import MultiPurposeLabel
from mealie.db.models.recipe.ingredient import IngredientFoodModel, IngredientUnitModel
from mealie.db.models.recipe.recipe import RecipeModel

View file

@ -40,7 +40,7 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
new_plan["recipeId"] = str(recipe_id)
response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token)
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
assert response.status_code == 201
# run the task and check to make sure a new event was created from the mealplan
@ -70,7 +70,7 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
# make sure nothing else was updated
for data in [original_recipe_data, new_recipe_data]:
data.pop("dateUpdated")
data.pop("updateAt")
data.pop("updatedAt")
data.pop("lastMade")
# instructions ids are generated randomly and aren't consistent between get requests
@ -107,7 +107,7 @@ def test_new_mealplan_event_duplicates(api_client: TestClient, unique_user: Test
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
new_plan["recipeId"] = str(recipe_id)
response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token)
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
assert response.status_code == 201
# run the task multiple times and make sure we only create one event
@ -153,7 +153,7 @@ def test_new_mealplan_events_with_multiple_recipes(api_client: TestClient, uniqu
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
new_plan["recipeId"] = str(recipe.id)
response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token)
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
assert response.status_code == 201
mealplan_count_by_recipe_id[recipe.id] += 1 # type: ignore
@ -213,7 +213,7 @@ def test_preserve_future_made_date(api_client: TestClient, unique_user: TestUser
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
new_plan["recipeId"] = str(recipe_id)
response = api_client.post(api_routes.groups_mealplans, json=new_plan, headers=unique_user.token)
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
assert response.status_code == 201
# run the task and make sure the recipe's last made date was not updated

View file

@ -1,7 +1,6 @@
from datetime import datetime, timezone
from mealie.repos.repository_factory import AllRepositories
from mealie.schema.group.group_shopping_list import ShoppingListItemCreate, ShoppingListItemOut, ShoppingListSave
from mealie.schema.household.group_shopping_list import ShoppingListItemCreate, ShoppingListItemOut, ShoppingListSave
from mealie.services.scheduler.tasks.delete_old_checked_shopping_list_items import (
MAX_CHECKED_ITEMS,
delete_old_checked_list_items,
@ -10,12 +9,17 @@ from tests.utils.factories import random_int, random_string
from tests.utils.fixture_schemas import TestUser
def test_cleanup(database: AllRepositories, unique_user: TestUser):
list_repo = database.group_shopping_lists.by_group(unique_user.group_id)
def test_cleanup(unique_user: TestUser):
database = unique_user.repos
list_repo = database.group_shopping_lists
list_item_repo = database.group_shopping_list_item
shopping_list = list_repo.create(
ShoppingListSave(name=random_string(), group_id=unique_user.group_id, user_id=unique_user.user_id)
ShoppingListSave(
name=random_string(),
group_id=unique_user.group_id,
user_id=unique_user.user_id,
)
)
unchecked_items = list_item_repo.create_many(
[
@ -40,12 +44,13 @@ def test_cleanup(database: AllRepositories, unique_user: TestUser):
for item in unchecked_items + checked_items:
assert item in shopping_list.list_items
checked_items.sort(key=lambda x: x.update_at or datetime.now(timezone.utc), reverse=True)
checked_items.sort(key=lambda x: x.updated_at or datetime.now(timezone.utc), reverse=True)
expected_kept_items = unchecked_items + checked_items[:MAX_CHECKED_ITEMS]
expected_deleted_items = checked_items[MAX_CHECKED_ITEMS:]
# make sure we only see the expected items
delete_old_checked_list_items()
database.session.commit()
shopping_list = list_repo.get_one(shopping_list.id) # type: ignore
assert shopping_list
assert len(shopping_list.list_items) == len(expected_kept_items)
@ -55,12 +60,17 @@ def test_cleanup(database: AllRepositories, unique_user: TestUser):
assert item not in shopping_list.list_items
def test_no_cleanup(database: AllRepositories, unique_user: TestUser):
list_repo = database.group_shopping_lists.by_group(unique_user.group_id)
def test_no_cleanup(unique_user: TestUser):
database = unique_user.repos
list_repo = database.group_shopping_lists
list_item_repo = database.group_shopping_list_item
shopping_list = list_repo.create(
ShoppingListSave(name=random_string(), group_id=unique_user.group_id, user_id=unique_user.user_id)
ShoppingListSave(
name=random_string(),
group_id=unique_user.group_id,
user_id=unique_user.user_id,
)
)
unchecked_items = list_item_repo.create_many(
[
@ -87,6 +97,7 @@ def test_no_cleanup(database: AllRepositories, unique_user: TestUser):
# make sure we still see all items
delete_old_checked_list_items()
database.session.commit()
shopping_list = list_repo.get_one(shopping_list.id) # type: ignore
assert shopping_list
assert len(shopping_list.list_items) == len(unchecked_items) + len(checked_items)

View file

@ -1,9 +1,9 @@
from datetime import datetime, timedelta, timezone
from uuid import UUID
from pydantic import UUID4
from mealie.repos.repository_factory import AllRepositories
from mealie.schema.group.webhook import SaveWebhook, WebhookType
from mealie.schema.household.webhook import SaveWebhook, WebhookType
from mealie.services.event_bus_service.event_bus_listeners import WebhookEventListener
from tests.utils import random_string
from tests.utils.factories import random_bool
@ -12,6 +12,7 @@ from tests.utils.fixture_schemas import TestUser
def webhook_factory(
group_id: str | UUID4,
household_id: str | UUID4,
enabled: bool = True,
name: str = "",
url: str = "",
@ -25,22 +26,27 @@ def webhook_factory(
webhook_type=webhook_type,
scheduled_time=scheduled_time.time() if scheduled_time else datetime.now(timezone.utc).time(),
group_id=group_id,
household_id=household_id,
)
def test_get_scheduled_webhooks_filter_query(database: AllRepositories, unique_user: TestUser):
def test_get_scheduled_webhooks_filter_query(unique_user: TestUser):
"""
get_scheduled_webhooks_test tests the get_scheduled_webhooks function on the webhook event bus listener.
"""
database = unique_user.repos
expected: list[SaveWebhook] = []
start = datetime.now(timezone.utc)
for _ in range(5):
new_item = webhook_factory(group_id=unique_user.group_id, enabled=random_bool())
new_item = webhook_factory(
group_id=unique_user.group_id, household_id=unique_user.household_id, enabled=random_bool()
)
out_of_range_item = webhook_factory(
group_id=unique_user.group_id,
household_id=unique_user.household_id,
enabled=random_bool(),
scheduled_time=(start - timedelta(minutes=20)),
)
@ -51,7 +57,7 @@ def test_get_scheduled_webhooks_filter_query(database: AllRepositories, unique_u
if new_item.enabled:
expected.append(new_item)
event_bus_listener = WebhookEventListener(unique_user.group_id) # type: ignore
event_bus_listener = WebhookEventListener(UUID(unique_user.group_id), UUID(unique_user.household_id))
results = event_bus_listener.get_scheduled_webhooks(start, datetime.now(timezone.utc) + timedelta(minutes=5))
assert len(results) == len(expected)

View file

@ -1,7 +1,7 @@
import tempfile
from pathlib import Path
from uuid import UUID
from mealie.repos.repository_factory import AllRepositories
from mealie.schema.recipe.recipe import Recipe
from mealie.services.recipe.recipe_bulk_service import RecipeBulkActionsService
from mealie.services.scheduler.tasks.purge_group_exports import purge_group_data_exports
@ -9,7 +9,9 @@ from tests.utils.factories import random_int, random_string
from tests.utils.fixture_schemas import TestUser
def test_purge_group_exports(database: AllRepositories, unique_user: TestUser):
def test_purge_group_exports(unique_user: TestUser):
database = unique_user.repos
# create the export
group = database.groups.get_one(unique_user.group_id)
assert group
@ -17,7 +19,14 @@ def test_purge_group_exports(database: AllRepositories, unique_user: TestUser):
assert user
recipe_exporter = RecipeBulkActionsService(database, user, group)
recipes = [
database.recipes.create(Recipe(name=random_string(), group_id=group.id)) for _ in range(random_int(2, 5))
database.recipes.create(
Recipe(
name=random_string(),
group_id=UUID(unique_user.group_id),
user_id=unique_user.user_id,
)
)
for _ in range(random_int(2, 5))
]
with tempfile.NamedTemporaryFile() as tmpfile:

View file

@ -5,14 +5,16 @@ from mealie.services.user_services.user_service import UserService
from tests.utils.fixture_schemas import TestUser
def test_get_locked_users(database: AllRepositories, user_tuple: list[TestUser]) -> None:
def test_get_locked_users(user_tuple: list[TestUser]) -> None:
usr_1, usr_2 = user_tuple
database = usr_1.repos
# Setup
user_service = UserService(database)
user_1 = database.users.get_one(usr_1.user_id)
user_2 = database.users.get_one(usr_2.user_id)
assert user_1 and user_2
locked_users = user_service.get_locked_users()
assert len(locked_users) == 0
@ -41,11 +43,13 @@ def test_get_locked_users(database: AllRepositories, user_tuple: list[TestUser])
user_service.unlock_user(user_2)
def test_lock_unlocker_user(database: AllRepositories, unique_user: TestUser) -> None:
def test_lock_unlocker_user(unique_user: TestUser) -> None:
database = unique_user.repos
user_service = UserService(database)
# Test that the user is unlocked
user = database.users.get_one(unique_user.user_id)
assert user
assert not user.locked_at
# Test that the user is locked
@ -63,11 +67,13 @@ def test_lock_unlocker_user(database: AllRepositories, unique_user: TestUser) ->
assert not user.is_locked
def test_reset_locked_users(database: AllRepositories, unique_user: TestUser) -> None:
def test_reset_locked_users(unique_user: TestUser) -> None:
database = unique_user.repos
user_service = UserService(database)
# Test that the user is unlocked
user = database.users.get_one(unique_user.user_id)
assert user
assert not user.is_locked
assert not user.locked_at
@ -80,6 +86,7 @@ def test_reset_locked_users(database: AllRepositories, unique_user: TestUser) ->
# Test that the locked user is not unlocked by reset
unlocked = user_service.reset_locked_users()
user = database.users.get_one(unique_user.user_id)
assert user
assert unlocked == 0
assert user.is_locked
assert user.login_attemps == 5
@ -89,6 +96,7 @@ def test_reset_locked_users(database: AllRepositories, unique_user: TestUser) ->
database.users.update(user.id, user)
unlocked = user_service.reset_locked_users()
user = database.users.get_one(unique_user.user_id)
assert user
assert unlocked == 1
assert not user.is_locked
assert user.login_attemps == 0