mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2025-12-24 02:39:20 -05:00
Merge branch 'develop' of https://github.com/TandoorRecipes/recipes into develop
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
FROM python:3.9-alpine3.15
|
||||
|
||||
#Install all dependencies.
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap
|
||||
RUN apk add --no-cache postgresql-libs postgresql-client gettext zlib libjpeg libwebp libxml2-dev libxslt-dev py-cryptography openldap gcompat
|
||||
|
||||
#Print all logs without buffering it.
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
@@ -15,19 +15,16 @@ RUN mkdir /opt/recipes
|
||||
WORKDIR /opt/recipes
|
||||
|
||||
COPY requirements.txt ./
|
||||
|
||||
RUN \
|
||||
if [ `apk --print-arch` = "armv7" ]; then \
|
||||
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
|
||||
fi
|
||||
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev zlib-dev jpeg-dev libwebp-dev openssl-dev libffi-dev cargo openldap-dev python3-dev && \
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev zlib-dev jpeg-dev libwebp-dev python3-dev && \
|
||||
echo -n "INPUT ( libldap.so )" > /usr/lib/libldap_r.so && \
|
||||
python -m venv venv && \
|
||||
/opt/recipes/venv/bin/python -m pip install --upgrade pip && \
|
||||
venv/bin/pip install wheel==0.37.1 && \
|
||||
venv/bin/pip install setuptools_rust==1.1.2 && \
|
||||
venv/bin/pip install -r requirements.txt --no-cache-dir &&\
|
||||
venv/bin/pip install -r requirements.txt --no-cache-dir --no-binary=Pillow && \
|
||||
apk --purge del .build-deps
|
||||
|
||||
#Copy project and execute it.
|
||||
|
||||
@@ -9,6 +9,8 @@ from recipe_scrapers._utils import get_host_name, normalize_string
|
||||
|
||||
from cookbook.helper import recipe_url_import as helper
|
||||
from cookbook.helper.scrapers.scrapers import text_scraper
|
||||
from recipe_scrapers import scrape_me
|
||||
from recipe_scrapers._exceptions import NoSchemaFoundInWildMode
|
||||
|
||||
|
||||
def get_recipe_from_source(text, url, request):
|
||||
@@ -63,34 +65,41 @@ def get_recipe_from_source(text, url, request):
|
||||
html_data = []
|
||||
images = []
|
||||
text = unquote(text)
|
||||
scrape = None
|
||||
|
||||
try:
|
||||
parse_list.append(remove_graph(json.loads(text)))
|
||||
if not url and 'url' in parse_list[0]:
|
||||
url = parse_list[0]['url']
|
||||
scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
if url:
|
||||
try:
|
||||
scrape = scrape_me(url_path=url, wild_mode=True)
|
||||
except(NoSchemaFoundInWildMode):
|
||||
pass
|
||||
if not scrape:
|
||||
try:
|
||||
parse_list.append(remove_graph(json.loads(text)))
|
||||
if not url and 'url' in parse_list[0]:
|
||||
url = parse_list[0]['url']
|
||||
scrape = text_scraper("<script type='application/ld+json'>" + text + "</script>", url=url)
|
||||
|
||||
except JSONDecodeError:
|
||||
soup = BeautifulSoup(text, "html.parser")
|
||||
html_data = get_from_html(soup)
|
||||
images += get_images_from_source(soup, url)
|
||||
for el in soup.find_all('script', type='application/ld+json'):
|
||||
el = remove_graph(el)
|
||||
if not url and 'url' in el:
|
||||
url = el['url']
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
for el in soup.find_all(type='application/json'):
|
||||
el = remove_graph(el)
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
scrape = text_scraper(text, url=url)
|
||||
except JSONDecodeError:
|
||||
soup = BeautifulSoup(text, "html.parser")
|
||||
html_data = get_from_html(soup)
|
||||
images += get_images_from_source(soup, url)
|
||||
for el in soup.find_all('script', type='application/ld+json'):
|
||||
el = remove_graph(el)
|
||||
if not url and 'url' in el:
|
||||
url = el['url']
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
for el in soup.find_all(type='application/json'):
|
||||
el = remove_graph(el)
|
||||
if type(el) == list:
|
||||
for le in el:
|
||||
parse_list.append(le)
|
||||
elif type(el) == dict:
|
||||
parse_list.append(el)
|
||||
scrape = text_scraper(text, url=url)
|
||||
|
||||
recipe_json = helper.get_from_scraper(scrape, request)
|
||||
|
||||
|
||||
@@ -114,7 +114,14 @@ def get_from_scraper(scrape, request):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if source_url := scrape.url:
|
||||
try:
|
||||
source_url = scrape.canonical_url()
|
||||
except Exception:
|
||||
try:
|
||||
source_url = scrape.url
|
||||
except Exception:
|
||||
pass
|
||||
if source_url:
|
||||
recipe_json['source_url'] = source_url
|
||||
try:
|
||||
keywords.append(source_url.replace('http://', '').replace('https://', '').split('/')[0])
|
||||
@@ -129,9 +136,11 @@ def get_from_scraper(scrape, request):
|
||||
ingredient_parser = IngredientParser(request, True)
|
||||
|
||||
recipe_json['steps'] = []
|
||||
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], })
|
||||
try:
|
||||
for i in parse_instructions(scrape.instructions()):
|
||||
recipe_json['steps'].append({'instruction': i, 'ingredients': [], })
|
||||
except Exception:
|
||||
pass
|
||||
if len(recipe_json['steps']) == 0:
|
||||
recipe_json['steps'].append({'instruction': '', 'ingredients': [], })
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from bs4 import BeautifulSoup
|
||||
from json import JSONDecodeError
|
||||
from recipe_scrapers import SCRAPERS, get_host_name
|
||||
from recipe_scrapers import SCRAPERS
|
||||
from recipe_scrapers._factory import SchemaScraperFactory
|
||||
from recipe_scrapers._schemaorg import SchemaOrg
|
||||
|
||||
@@ -15,13 +15,7 @@ SCRAPERS.update(CUSTOM_SCRAPERS)
|
||||
|
||||
|
||||
def text_scraper(text, url=None):
|
||||
domain = None
|
||||
if url:
|
||||
domain = get_host_name(url)
|
||||
if domain in SCRAPERS:
|
||||
scraper_class = SCRAPERS[domain]
|
||||
else:
|
||||
scraper_class = SchemaScraperFactory.SchemaScraper
|
||||
scraper_class = SchemaScraperFactory.SchemaScraper
|
||||
|
||||
class TextScraper(scraper_class):
|
||||
def __init__(
|
||||
|
||||
@@ -27,7 +27,7 @@ class Paprika(Integration):
|
||||
recipe.description = '' if len(recipe_json['description'].strip()) > 500 else recipe_json['description'].strip()
|
||||
|
||||
try:
|
||||
if 'servings' in recipe_json['servings']:
|
||||
if 'servings' in recipe_json:
|
||||
recipe.servings = parse_servings(recipe_json['servings'])
|
||||
recipe.servings_text = parse_servings_text(recipe_json['servings'])
|
||||
|
||||
|
||||
@@ -78,7 +78,11 @@ class Plantoeat(Integration):
|
||||
current_recipe = ''
|
||||
|
||||
for fl in file.readlines():
|
||||
line = fl.decode("windows-1250")
|
||||
try:
|
||||
line = fl.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
line = fl.decode("windows-1250")
|
||||
|
||||
if line.startswith('--------------'):
|
||||
if current_recipe != '':
|
||||
recipe_list.append(current_recipe)
|
||||
|
||||
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/bg/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
BIN
cookbook/locale/da/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -343,6 +343,11 @@ ProxyPassReverse / http://localhost:8080/ # replace port
|
||||
!!!info
|
||||
Always wait at least 2-3 minutes after the very first start, since migrations will take some time!
|
||||
|
||||
!!!warning
|
||||
If you want to use Tandoor on a Raspberry Pi running a 32-bit operating system you will need to use the following
|
||||
docker image tags: `latest-raspi`, `beta-raspi` and the versioned `<x.y.z>-raspi`
|
||||
We strongly recommend using the new 64-bit Raspian image as the 32-bit version is not tested.
|
||||
|
||||
If you're having issues with installing Tandoor on your Raspberry Pi or similar device,
|
||||
follow these instructions:
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -366,8 +366,10 @@ USE_TZ = True
|
||||
|
||||
LANGUAGES = [
|
||||
('hy', _('Armenian ')),
|
||||
('bg', _('Bulgarian')),
|
||||
('ca', _('Catalan')),
|
||||
('cs', _('Czech')),
|
||||
('da', _('Danish')),
|
||||
('nl', _('Dutch')),
|
||||
('en', _('English')),
|
||||
('fr', _('French')),
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
</thead>
|
||||
<tr v-for="r in Recipes" :key="r.list_recipe">
|
||||
<td>{{ r.recipe_mealplan.name }}</td>
|
||||
<td>{{ r.recipe_mealplan.recipe_name }}</td>
|
||||
<td><a :href="resolveDjangoUrl('view_recipe', r.recipe_mealplan.recipe)">{{ r.recipe_mealplan.recipe_name }}</a></td>
|
||||
<td class="block-inline">
|
||||
<b-form-input min="1" type="number" :debounce="300"
|
||||
:value="r.recipe_mealplan.servings"
|
||||
@@ -648,7 +648,7 @@
|
||||
<div class="col-6">
|
||||
<a class="btn btn-block btn-success shadow-none" @click="entrymode = !entrymode; "
|
||||
><i class="fas fa-cart-plus"></i>
|
||||
{{ $t("New Entry") }}
|
||||
{{ $t("New_Entry") }}
|
||||
</a>
|
||||
</div>
|
||||
<div class="col-6">
|
||||
@@ -781,7 +781,7 @@ import GenericMultiselect from "@/components/GenericMultiselect"
|
||||
import LookupInput from "@/components/Modals/LookupInput"
|
||||
import ShoppingModal from "@/components/Modals/ShoppingModal"
|
||||
|
||||
import {ApiMixin, getUserPreference, StandardToasts, makeToast} from "@/utils/utils"
|
||||
import {ApiMixin, getUserPreference, StandardToasts, makeToast, ResolveUrlMixin} from "@/utils/utils"
|
||||
import {ApiApiFactory} from "@/utils/openapi/api"
|
||||
|
||||
Vue.use(BootstrapVue)
|
||||
@@ -790,7 +790,7 @@ let SETTINGS_COOKIE_NAME = "shopping_settings"
|
||||
|
||||
export default {
|
||||
name: "ShoppingListView",
|
||||
mixins: [ApiMixin],
|
||||
mixins: [ApiMixin,ResolveUrlMixin],
|
||||
components: {
|
||||
ContextMenu,
|
||||
ContextMenuItem,
|
||||
|
||||
@@ -202,6 +202,7 @@
|
||||
"Starting_Day": "Starting day of the week",
|
||||
"Meal_Types": "Meal types",
|
||||
"Meal_Type": "Meal type",
|
||||
"New_Entry": "New Entry",
|
||||
"Clone": "Clone",
|
||||
"Drag_Here_To_Delete": "Drag here to delete",
|
||||
"Meal_Type_Required": "Meal type is required",
|
||||
|
||||
Reference in New Issue
Block a user