mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-08-05 13:35:23 +02:00
fix(deps): update dependency recipe-scrapers to v14.57.0 (#3804)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Michael Genson <71845777+michael-genson@users.noreply.github.com> Co-authored-by: Kuchenpirat <24235032+Kuchenpirat@users.noreply.github.com>
This commit is contained in:
parent
aabab73310
commit
dc64484b8e
3 changed files with 52 additions and 64 deletions
8
poetry.lock
generated
8
poetry.lock
generated
|
@ -2609,18 +2609,18 @@ tests = ["html5lib", "pytest", "pytest-cov"]
|
|||
|
||||
[[package]]
|
||||
name = "recipe-scrapers"
|
||||
version = "14.56.0"
|
||||
version = "14.57.0"
|
||||
description = "Python package, scraping recipes from all over the internet"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "recipe_scrapers-14.56.0-py3-none-any.whl", hash = "sha256:77ad1a1d21077877dd5d29ae0423a488bdf9084fb495d7f8a8b20dd2545160cf"},
|
||||
{file = "recipe_scrapers-14.56.0.tar.gz", hash = "sha256:97b40b33a2e29416a7348d86db784a1e21568b78d978c3148b572244ff85ca00"},
|
||||
{file = "recipe_scrapers-14.57.0-py3-none-any.whl", hash = "sha256:6e45de0ca6fdb634a319799973940ab70fae02821ca525e6b3917a146b86a99f"},
|
||||
{file = "recipe_scrapers-14.57.0.tar.gz", hash = "sha256:e6a83cb82519f9730d6deebe44219db28e29f9738a497ab0a60bfa67135e775c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
beautifulsoup4 = ">=4.12.3"
|
||||
extruct = ">=0.15.0"
|
||||
extruct = ">=0.17.0"
|
||||
isodate = ">=0.6.1"
|
||||
requests = ">=2.31.0"
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import inspect
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
|
@ -8,14 +9,17 @@ from typing import Generator
|
|||
from uuid import uuid4
|
||||
from zipfile import ZipFile
|
||||
|
||||
from httpx import Response
|
||||
import pytest
|
||||
from bs4 import BeautifulSoup
|
||||
from fastapi.testclient import TestClient
|
||||
from pytest import MonkeyPatch
|
||||
from recipe_scrapers._abstract import AbstractScraper
|
||||
from recipe_scrapers._schemaorg import SchemaOrg
|
||||
from recipe_scrapers.plugins import SchemaOrgFillPlugin
|
||||
from slugify import slugify
|
||||
|
||||
from mealie.pkgs.safehttp.transport import AsyncSafeTransport
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.recipe.recipe import Recipe, RecipeCategory, RecipeSummary, RecipeTag
|
||||
from mealie.schema.recipe.recipe_category import CategorySave, TagSave
|
||||
|
@ -72,6 +76,14 @@ def get_init(html_path: Path):
|
|||
self.url = url
|
||||
self.schema = SchemaOrg(page_data)
|
||||
|
||||
# attach the SchemaOrgFill plugin
|
||||
if not hasattr(self.__class__, "plugins_initialized"):
|
||||
for name, _ in inspect.getmembers(self, inspect.ismethod): # type: ignore
|
||||
current_method = getattr(self.__class__, name)
|
||||
current_method = SchemaOrgFillPlugin.run(current_method)
|
||||
setattr(self.__class__, name, current_method)
|
||||
setattr(self.__class__, "plugins_initialized", True)
|
||||
|
||||
return init_override
|
||||
|
||||
|
||||
|
@ -102,6 +114,16 @@ def test_create_by_url(
|
|||
"get_html",
|
||||
open_graph_override(recipe_data.html_file.read_text()),
|
||||
)
|
||||
|
||||
# Skip AsyncSafeTransport requests
|
||||
async def return_empty_response(*args, **kwargs):
|
||||
return Response(200, content=b"")
|
||||
|
||||
monkeypatch.setattr(
|
||||
AsyncSafeTransport,
|
||||
"handle_async_request",
|
||||
return_empty_response,
|
||||
)
|
||||
# Skip image downloader
|
||||
monkeypatch.setattr(
|
||||
RecipeDataService,
|
||||
|
@ -112,7 +134,9 @@ def test_create_by_url(
|
|||
api_client.delete(api_routes.recipes_slug(recipe_data.expected_slug), headers=unique_user.token)
|
||||
|
||||
response = api_client.post(
|
||||
api_routes.recipes_create_url, json={"url": recipe_data.url, "include_tags": False}, headers=unique_user.token
|
||||
api_routes.recipes_create_url,
|
||||
json={"url": recipe_data.url, "include_tags": recipe_data.include_tags},
|
||||
headers=unique_user.token,
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
|
@ -128,67 +152,13 @@ def test_create_by_url(
|
|||
assert len(recipe_dict["recipeInstructions"]) == recipe_data.num_steps
|
||||
assert len(recipe_dict["recipeIngredient"]) == recipe_data.num_ingredients
|
||||
|
||||
if not recipe_data.include_tags:
|
||||
return
|
||||
|
||||
def test_create_by_url_with_tags(
|
||||
api_client: TestClient,
|
||||
unique_user: TestUser,
|
||||
monkeypatch: MonkeyPatch,
|
||||
):
|
||||
html_file = data.html_nutty_umami_noodles_with_scallion_brown_butter_and_snow_peas_recipe
|
||||
expected_tags = recipe_data.expected_tags or set()
|
||||
assert len(recipe_dict["tags"]) == len(expected_tags)
|
||||
|
||||
# Override init function for AbstractScraper to use the test html instead of calling the url
|
||||
monkeypatch.setattr(
|
||||
AbstractScraper,
|
||||
"__init__",
|
||||
get_init(html_file),
|
||||
)
|
||||
# Override the get_html method of all scraper strategies to return the test html
|
||||
for scraper_cls in DEFAULT_SCRAPER_STRATEGIES:
|
||||
monkeypatch.setattr(
|
||||
scraper_cls,
|
||||
"get_html",
|
||||
open_graph_override(html_file.read_text()),
|
||||
)
|
||||
# Skip image downloader
|
||||
monkeypatch.setattr(
|
||||
RecipeDataService,
|
||||
"scrape_image",
|
||||
lambda *_: "TEST_IMAGE",
|
||||
)
|
||||
|
||||
response = api_client.post(
|
||||
api_routes.recipes_create_url,
|
||||
json={"url": "https://google.com", "include_tags": True}, # URL Doesn't matter
|
||||
headers=unique_user.token,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
slug = "nutty-umami-noodles-with-scallion-brown-butter-and-snow-peas"
|
||||
|
||||
# Get the recipe
|
||||
response = api_client.get(api_routes.recipes_slug(slug), headers=unique_user.token)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verifiy the tags are present and title cased
|
||||
expected_tags = {
|
||||
"Sauté",
|
||||
"Pea",
|
||||
"Noodle",
|
||||
"Udon Noodle",
|
||||
"Ramen Noodle",
|
||||
"Dinner",
|
||||
"Main",
|
||||
"Vegetarian",
|
||||
"Easy",
|
||||
"Quick",
|
||||
"Weeknight Meals",
|
||||
"Web",
|
||||
}
|
||||
|
||||
recipe = json.loads(response.text)
|
||||
|
||||
assert len(recipe["tags"]) == len(expected_tags)
|
||||
|
||||
for tag in recipe["tags"]:
|
||||
for tag in recipe_dict["tags"]:
|
||||
assert tag["name"] in expected_tags
|
||||
|
||||
|
||||
|
|
|
@ -13,6 +13,9 @@ class RecipeSiteTestCase:
|
|||
num_steps: int
|
||||
html_file: Path
|
||||
|
||||
include_tags: bool = False
|
||||
expected_tags: set[str] | None = None
|
||||
|
||||
|
||||
def get_recipe_test_cases():
|
||||
return [
|
||||
|
@ -63,6 +66,21 @@ def get_recipe_test_cases():
|
|||
expected_slug="detroit-style-pepperoni-pizza",
|
||||
num_ingredients=8,
|
||||
num_steps=5,
|
||||
include_tags=True,
|
||||
expected_tags={
|
||||
"Pizza",
|
||||
"Basil",
|
||||
"Dough",
|
||||
"Dinner",
|
||||
"Oregano",
|
||||
"Mozzarella",
|
||||
"Olive Oil",
|
||||
"Pizza Dough",
|
||||
"Basically",
|
||||
"Flour",
|
||||
"Web",
|
||||
"Web Recipe",
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue