mirror of
https://github.com/TandoorRecipes/recipes.git
synced 2026-01-01 04:10:06 -05:00
Merge remote-tracking branch 'origin/develop' into HomeAssistantConnector
# Conflicts: # cookbook/forms.py # requirements.txt
This commit is contained in:
@@ -1,5 +1,20 @@
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
from decimal import Decimal
|
||||
|
||||
from cookbook.models import (Food, FoodProperty, Property, PropertyType, Supermarket,
|
||||
SupermarketCategory, SupermarketCategoryRelation, Unit, UnitConversion)
|
||||
import re
|
||||
|
||||
|
||||
class OpenDataImportResponse:
|
||||
total_created = 0
|
||||
total_updated = 0
|
||||
total_untouched = 0
|
||||
total_errored = 0
|
||||
|
||||
def to_dict(self):
|
||||
return {'total_created': self.total_created, 'total_updated': self.total_updated, 'total_untouched': self.total_untouched, 'total_errored': self.total_errored}
|
||||
|
||||
|
||||
class OpenDataImporter:
|
||||
@@ -18,69 +33,269 @@ class OpenDataImporter:
|
||||
def _update_slug_cache(self, object_class, datatype):
|
||||
self.slug_id_cache[datatype] = dict(object_class.objects.filter(space=self.request.space, open_data_slug__isnull=False).values_list('open_data_slug', 'id', ))
|
||||
|
||||
def import_units(self):
|
||||
datatype = 'unit'
|
||||
@staticmethod
|
||||
def _is_obj_identical(field_list, obj, existing_obj):
|
||||
"""
|
||||
checks if the obj meant for import is identical to an already existing one
|
||||
:param field_list: list of field names to check
|
||||
:type field_list: list[str]
|
||||
:param obj: object meant for import
|
||||
:type obj: Object
|
||||
:param existing_obj: object already in DB
|
||||
:type existing_obj: Object
|
||||
:return: if objects are identical
|
||||
:rtype: bool
|
||||
"""
|
||||
for field in field_list:
|
||||
if isinstance(getattr(obj, field), float) or isinstance(getattr(obj, field), Decimal):
|
||||
if abs(float(getattr(obj, field)) - float(existing_obj[field])) > 0.001: # convert both to float and check if basically equal
|
||||
print(f'comparing FLOAT {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
|
||||
return False
|
||||
elif getattr(obj, field) != existing_obj[field]:
|
||||
print(f'comparing {obj} failed because field {field} is not equal ({getattr(obj, field)} != {existing_obj[field]})')
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
|
||||
"""
|
||||
sometimes there might be two objects conflicting for open data import (one has the slug, the other the name)
|
||||
this function checks if that is the case and merges the two objects if possible
|
||||
:param model_type: type of model to check/merge
|
||||
:type model_type: Model
|
||||
:param obj: object that should be created/updated
|
||||
:type obj: Model
|
||||
:param existing_data_slugs: dict of open data slugs mapped to objects
|
||||
:type existing_data_slugs: dict
|
||||
:param existing_data_names: dict of names mapped to objects
|
||||
:type existing_data_names: dict
|
||||
:return: true if merge was successful or not necessary else false
|
||||
:rtype: bool
|
||||
"""
|
||||
if obj.open_data_slug in existing_data_slugs and obj.name in existing_data_names and existing_data_slugs[obj.open_data_slug]['pk'] != existing_data_names[obj.name]['pk']:
|
||||
try:
|
||||
source_obj = model_type.objects.get(pk=existing_data_slugs[obj.open_data_slug]['pk'])
|
||||
del existing_data_slugs[obj.open_data_slug]
|
||||
source_obj.merge_into(model_type.objects.get(pk=existing_data_names[obj.name]['pk']))
|
||||
return True
|
||||
except RuntimeError:
|
||||
return False # in the edge case (e.g. parent/child) that an object cannot be merged don't update it for now
|
||||
else:
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_existing_obj(obj, existing_data_slugs, existing_data_names):
|
||||
"""
|
||||
gets the existing object from slug or name cache
|
||||
:param obj: object that should be found
|
||||
:type obj: Model
|
||||
:param existing_data_slugs: dict of open data slugs mapped to objects
|
||||
:type existing_data_slugs: dict
|
||||
:param existing_data_names: dict of names mapped to objects
|
||||
:type existing_data_names: dict
|
||||
:return: existing object
|
||||
:rtype: dict
|
||||
"""
|
||||
existing_obj = None
|
||||
if obj.open_data_slug in existing_data_slugs:
|
||||
existing_obj = existing_data_slugs[obj.open_data_slug]
|
||||
elif obj.name in existing_data_names:
|
||||
existing_obj = existing_data_names[obj.name]
|
||||
|
||||
return existing_obj
|
||||
|
||||
def import_units(self):
|
||||
od_response = OpenDataImportResponse()
|
||||
datatype = 'unit'
|
||||
model_type = Unit
|
||||
field_list = ['name', 'plural_name', 'base_unit', 'open_data_slug']
|
||||
|
||||
existing_data_slugs = {}
|
||||
existing_data_names = {}
|
||||
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
|
||||
existing_data_slugs[obj['open_data_slug']] = obj
|
||||
existing_data_names[obj['name']] = obj
|
||||
|
||||
update_list = []
|
||||
create_list = []
|
||||
|
||||
insert_list = []
|
||||
for u in list(self.data[datatype].keys()):
|
||||
insert_list.append(Unit(
|
||||
obj = model_type(
|
||||
name=self.data[datatype][u]['name'],
|
||||
plural_name=self.data[datatype][u]['plural_name'],
|
||||
base_unit=self.data[datatype][u]['base_unit'] if self.data[datatype][u]['base_unit'] != '' else None,
|
||||
base_unit=self.data[datatype][u]['base_unit'].lower() if self.data[datatype][u]['base_unit'] != '' else None,
|
||||
open_data_slug=u,
|
||||
space=self.request.space
|
||||
))
|
||||
)
|
||||
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
|
||||
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
|
||||
od_response.total_errored += 1
|
||||
continue # if conflicting objects exist and cannot be merged skip object
|
||||
|
||||
if self.update_existing:
|
||||
return Unit.objects.bulk_create(insert_list, update_conflicts=True, update_fields=(
|
||||
'name', 'plural_name', 'base_unit', 'open_data_slug'), unique_fields=('space', 'name',))
|
||||
else:
|
||||
return Unit.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
|
||||
|
||||
if not self._is_obj_identical(field_list, obj, existing_obj):
|
||||
obj.pk = existing_obj['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
od_response.total_untouched += 1
|
||||
else:
|
||||
create_list.append(obj)
|
||||
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_updated += len(update_list)
|
||||
|
||||
if len(create_list) > 0:
|
||||
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
|
||||
od_response.total_created += len(create_list)
|
||||
|
||||
return od_response
|
||||
|
||||
def import_category(self):
|
||||
od_response = OpenDataImportResponse()
|
||||
datatype = 'category'
|
||||
model_type = SupermarketCategory
|
||||
field_list = ['name', 'open_data_slug']
|
||||
|
||||
existing_data_slugs = {}
|
||||
existing_data_names = {}
|
||||
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
|
||||
existing_data_slugs[obj['open_data_slug']] = obj
|
||||
existing_data_names[obj['name']] = obj
|
||||
|
||||
update_list = []
|
||||
create_list = []
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(SupermarketCategory(
|
||||
obj = model_type(
|
||||
name=self.data[datatype][k]['name'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
)
|
||||
|
||||
return SupermarketCategory.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
|
||||
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
|
||||
od_response.total_errored += 1
|
||||
continue # if conflicting objects exist and cannot be merged skip object
|
||||
|
||||
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
|
||||
|
||||
if not self._is_obj_identical(field_list, obj, existing_obj):
|
||||
obj.pk = existing_obj['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
od_response.total_untouched += 1
|
||||
else:
|
||||
create_list.append(obj)
|
||||
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_updated += len(update_list)
|
||||
|
||||
if len(create_list) > 0:
|
||||
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
|
||||
od_response.total_created += len(create_list)
|
||||
|
||||
return od_response
|
||||
|
||||
def import_property(self):
|
||||
od_response = OpenDataImportResponse()
|
||||
datatype = 'property'
|
||||
model_type = PropertyType
|
||||
field_list = ['name', 'unit', 'fdc_id', 'open_data_slug']
|
||||
|
||||
existing_data_slugs = {}
|
||||
existing_data_names = {}
|
||||
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
|
||||
existing_data_slugs[obj['open_data_slug']] = obj
|
||||
existing_data_names[obj['name']] = obj
|
||||
|
||||
update_list = []
|
||||
create_list = []
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(PropertyType(
|
||||
obj = model_type(
|
||||
name=self.data[datatype][k]['name'],
|
||||
unit=self.data[datatype][k]['unit'],
|
||||
fdc_id=self.data[datatype][k]['fdc_id'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
)
|
||||
|
||||
return PropertyType.objects.bulk_create(insert_list, update_conflicts=True, update_fields=('open_data_slug',), unique_fields=('space', 'name',))
|
||||
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
|
||||
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
|
||||
od_response.total_errored += 1
|
||||
continue # if conflicting objects exist and cannot be merged skip object
|
||||
|
||||
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
|
||||
|
||||
if not self._is_obj_identical(field_list, obj, existing_obj):
|
||||
obj.pk = existing_obj['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
od_response.total_untouched += 1
|
||||
else:
|
||||
create_list.append(obj)
|
||||
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_updated += len(update_list)
|
||||
|
||||
if len(create_list) > 0:
|
||||
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
|
||||
od_response.total_created += len(create_list)
|
||||
|
||||
return od_response
|
||||
|
||||
def import_supermarket(self):
|
||||
od_response = OpenDataImportResponse()
|
||||
datatype = 'store'
|
||||
model_type = Supermarket
|
||||
field_list = ['name', 'open_data_slug']
|
||||
|
||||
existing_data_slugs = {}
|
||||
existing_data_names = {}
|
||||
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
|
||||
existing_data_slugs[obj['open_data_slug']] = obj
|
||||
existing_data_names[obj['name']] = obj
|
||||
|
||||
update_list = []
|
||||
create_list = []
|
||||
|
||||
self._update_slug_cache(SupermarketCategory, 'category')
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
insert_list.append(Supermarket(
|
||||
obj = model_type(
|
||||
name=self.data[datatype][k]['name'],
|
||||
open_data_slug=k,
|
||||
space=self.request.space
|
||||
))
|
||||
)
|
||||
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
|
||||
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
|
||||
od_response.total_errored += 1
|
||||
continue # if conflicting objects exist and cannot be merged skip object
|
||||
|
||||
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
|
||||
|
||||
if not self._is_obj_identical(field_list, obj, existing_obj):
|
||||
obj.pk = existing_obj['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
od_response.total_untouched += 1
|
||||
else:
|
||||
create_list.append(obj)
|
||||
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_updated += len(update_list)
|
||||
|
||||
if len(create_list) > 0:
|
||||
model_type.objects.bulk_create(create_list, update_conflicts=True, update_fields=field_list, unique_fields=('space', 'name',))
|
||||
od_response.total_created += len(create_list)
|
||||
|
||||
# always add open data slug if matching supermarket is found, otherwise relation might fail
|
||||
supermarkets = Supermarket.objects.bulk_create(insert_list, unique_fields=('space', 'name',), update_conflicts=True, update_fields=('open_data_slug',))
|
||||
self._update_slug_cache(Supermarket, 'store')
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
relations = []
|
||||
order = 0
|
||||
@@ -96,115 +311,186 @@ class OpenDataImporter:
|
||||
|
||||
SupermarketCategoryRelation.objects.bulk_create(relations, ignore_conflicts=True, unique_fields=('supermarket', 'category',))
|
||||
|
||||
return supermarkets
|
||||
return od_response
|
||||
|
||||
def import_food(self):
|
||||
identifier_list = []
|
||||
od_response = OpenDataImportResponse()
|
||||
datatype = 'food'
|
||||
for k in list(self.data[datatype].keys()):
|
||||
identifier_list.append(self.data[datatype][k]['name'])
|
||||
identifier_list.append(self.data[datatype][k]['plural_name'])
|
||||
model_type = Food
|
||||
field_list = ['name', 'open_data_slug']
|
||||
|
||||
existing_objects_flat = []
|
||||
existing_objects = {}
|
||||
for f in Food.objects.filter(space=self.request.space).filter(name__in=identifier_list).values_list('id', 'name', 'plural_name'):
|
||||
existing_objects_flat.append(f[1])
|
||||
existing_objects_flat.append(f[2])
|
||||
existing_objects[f[1]] = f
|
||||
existing_objects[f[2]] = f
|
||||
existing_data_slugs = {}
|
||||
existing_data_names = {}
|
||||
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
|
||||
existing_data_slugs[obj['open_data_slug']] = obj
|
||||
existing_data_names[obj['name']] = obj
|
||||
|
||||
update_list = []
|
||||
create_list = []
|
||||
|
||||
self._update_slug_cache(Unit, 'unit')
|
||||
self._update_slug_cache(PropertyType, 'property')
|
||||
self._update_slug_cache(SupermarketCategory, 'category')
|
||||
|
||||
unit_g = Unit.objects.filter(space=self.request.space, base_unit__iexact='g').first()
|
||||
|
||||
insert_list = []
|
||||
insert_list_flat = []
|
||||
update_list = []
|
||||
update_field_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
if not (self.data[datatype][k]['name'] in existing_objects_flat or self.data[datatype][k]['plural_name'] in existing_objects_flat):
|
||||
if not (self.data[datatype][k]['name'] in insert_list_flat or self.data[datatype][k]['plural_name'] in insert_list_flat):
|
||||
insert_list.append({'data': {
|
||||
'name': self.data[datatype][k]['name'],
|
||||
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
|
||||
'fdc_id': self.data[datatype][k]['fdc_id'] if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
'open_data_slug': k,
|
||||
'space': self.request.space.id,
|
||||
}})
|
||||
# build a fake second flat array to prevent duplicate foods from being inserted.
|
||||
# trying to insert a duplicate would throw a db error :(
|
||||
insert_list_flat.append(self.data[datatype][k]['name'])
|
||||
insert_list_flat.append(self.data[datatype][k]['plural_name'])
|
||||
|
||||
obj_dict = {
|
||||
'name': self.data[datatype][k]['name'],
|
||||
'plural_name': self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
'supermarket_category_id': self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
|
||||
'fdc_id': re.sub(r'\D', '', self.data[datatype][k]['fdc_id']) if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
'open_data_slug': k,
|
||||
'properties_food_unit_id': None,
|
||||
'space_id': self.request.space.id,
|
||||
}
|
||||
|
||||
if unit_g:
|
||||
obj_dict['properties_food_unit_id'] = unit_g.id
|
||||
|
||||
obj = model_type(**obj_dict)
|
||||
|
||||
if obj.open_data_slug in existing_data_slugs or obj.name in existing_data_names:
|
||||
if not self._merge_if_conflicting(model_type, obj, existing_data_slugs, existing_data_names):
|
||||
od_response.total_errored += 1
|
||||
continue # if conflicting objects exist and cannot be merged skip object
|
||||
|
||||
existing_obj = self._get_existing_obj(obj, existing_data_slugs, existing_data_names)
|
||||
|
||||
if not self._is_obj_identical(field_list, obj, existing_obj):
|
||||
obj.pk = existing_obj['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
od_response.total_untouched += 1
|
||||
else:
|
||||
if self.data[datatype][k]['name'] in existing_objects:
|
||||
existing_food_id = existing_objects[self.data[datatype][k]['name']][0]
|
||||
else:
|
||||
existing_food_id = existing_objects[self.data[datatype][k]['plural_name']][0]
|
||||
create_list.append({'data': obj_dict})
|
||||
|
||||
if self.update_existing:
|
||||
update_field_list = ['name', 'plural_name', 'preferred_unit_id', 'preferred_shopping_unit_id', 'supermarket_category_id', 'fdc_id', 'open_data_slug', ]
|
||||
update_list.append(Food(
|
||||
id=existing_food_id,
|
||||
name=self.data[datatype][k]['name'],
|
||||
plural_name=self.data[datatype][k]['plural_name'] if self.data[datatype][k]['plural_name'] != '' else None,
|
||||
supermarket_category_id=self.slug_id_cache['category'][self.data[datatype][k]['store_category']],
|
||||
fdc_id=self.data[datatype][k]['fdc_id'] if self.data[datatype][k]['fdc_id'] != '' else None,
|
||||
open_data_slug=k,
|
||||
))
|
||||
else:
|
||||
update_field_list = ['open_data_slug', ]
|
||||
update_list.append(Food(id=existing_food_id, open_data_slug=k, ))
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_updated += len(update_list)
|
||||
|
||||
Food.load_bulk(insert_list, None)
|
||||
if len(update_list) > 0:
|
||||
Food.objects.bulk_update(update_list, update_field_list)
|
||||
if len(create_list) > 0:
|
||||
Food.load_bulk(create_list, None)
|
||||
od_response.total_created += len(create_list)
|
||||
|
||||
# --------------- PROPERTY STUFF -----------------------
|
||||
model_type = Property
|
||||
field_list = ['property_type_id', 'property_amount', 'open_data_food_slug']
|
||||
|
||||
existing_data_slugs = {}
|
||||
existing_data_property_types = {}
|
||||
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
|
||||
existing_data_slugs[obj['open_data_food_slug']] = obj
|
||||
existing_data_property_types[obj['property_type_id']] = obj
|
||||
|
||||
update_list = []
|
||||
create_list = []
|
||||
|
||||
self._update_slug_cache(Food, 'food')
|
||||
|
||||
food_property_list = []
|
||||
# alias_list = []
|
||||
for k in list(self.data['food'].keys()):
|
||||
for fp in self.data['food'][k]['properties']['type_values']:
|
||||
obj = model_type(
|
||||
property_type_id=self.slug_id_cache['property'][fp['property_type']],
|
||||
property_amount=fp['property_value'],
|
||||
open_data_food_slug=k,
|
||||
space=self.request.space,
|
||||
)
|
||||
|
||||
for k in list(self.data[datatype].keys()):
|
||||
for fp in self.data[datatype][k]['properties']['type_values']:
|
||||
# try catch here because somettimes key "k" is not set for he food cache
|
||||
try:
|
||||
food_property_list.append(Property(
|
||||
property_type_id=self.slug_id_cache['property'][fp['property_type']],
|
||||
property_amount=fp['property_value'],
|
||||
import_food_id=self.slug_id_cache['food'][k],
|
||||
space=self.request.space,
|
||||
))
|
||||
except KeyError:
|
||||
print(str(k) + ' is not in self.slug_id_cache["food"]')
|
||||
if obj.open_data_food_slug in existing_data_slugs and obj.property_type_id in existing_data_property_types and existing_data_slugs[obj.open_data_food_slug] == existing_data_property_types[obj.property_type_id]:
|
||||
existing_obj = existing_data_slugs[obj.open_data_food_slug]
|
||||
|
||||
Property.objects.bulk_create(food_property_list, ignore_conflicts=True, unique_fields=('space', 'import_food_id', 'property_type',))
|
||||
if not self._is_obj_identical(field_list, obj, existing_obj):
|
||||
obj.pk = existing_obj['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
create_list.append(obj)
|
||||
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
model_type.objects.bulk_update(update_list, field_list)
|
||||
|
||||
if len(create_list) > 0:
|
||||
model_type.objects.bulk_create(create_list, ignore_conflicts=True, unique_fields=('space', 'open_data_food_slug', 'property_type',))
|
||||
|
||||
linked_properties = list(FoodProperty.objects.filter(food__space=self.request.space).values_list('property_id', flat=True).all())
|
||||
|
||||
property_food_relation_list = []
|
||||
for p in Property.objects.filter(space=self.request.space, import_food_id__isnull=False).values_list('import_food_id', 'id', ):
|
||||
property_food_relation_list.append(Food.properties.through(food_id=p[0], property_id=p[1]))
|
||||
for p in model_type.objects.filter(space=self.request.space, open_data_food_slug__isnull=False).values_list('open_data_food_slug', 'id', ):
|
||||
if p[1] == 147:
|
||||
pass
|
||||
# slug_id_cache should always exist, don't create relations for already linked properties (ignore_conflicts would do that as well but this is more performant)
|
||||
if p[0] in self.slug_id_cache['food'] and p[1] not in linked_properties:
|
||||
property_food_relation_list.append(Food.properties.through(food_id=self.slug_id_cache['food'][p[0]], property_id=p[1]))
|
||||
|
||||
FoodProperty.objects.bulk_create(property_food_relation_list, ignore_conflicts=True, unique_fields=('food_id', 'property_id',))
|
||||
FoodProperty.objects.bulk_create(property_food_relation_list, unique_fields=('food_id', 'property_id',))
|
||||
|
||||
return insert_list + update_list
|
||||
return od_response
|
||||
|
||||
def import_conversion(self):
|
||||
od_response = OpenDataImportResponse()
|
||||
datatype = 'conversion'
|
||||
model_type = UnitConversion
|
||||
field_list = ['base_amount', 'base_unit_id', 'converted_amount', 'converted_unit_id', 'food_id', 'open_data_slug']
|
||||
|
||||
self._update_slug_cache(Food, 'food')
|
||||
self._update_slug_cache(Unit, 'unit')
|
||||
|
||||
existing_data_slugs = {}
|
||||
existing_data_foods = defaultdict(list)
|
||||
for obj in model_type.objects.filter(space=self.request.space).values('pk', *field_list):
|
||||
existing_data_slugs[obj['open_data_slug']] = obj
|
||||
existing_data_foods[obj['food_id']].append(obj)
|
||||
|
||||
update_list = []
|
||||
create_list = []
|
||||
|
||||
insert_list = []
|
||||
for k in list(self.data[datatype].keys()):
|
||||
# try catch here because sometimes key "k" is not set for he food cache
|
||||
# try catch here because sometimes key "k" is not set for the food cache
|
||||
try:
|
||||
insert_list.append(UnitConversion(
|
||||
base_amount=self.data[datatype][k]['base_amount'],
|
||||
obj = model_type(
|
||||
base_amount=Decimal(self.data[datatype][k]['base_amount']),
|
||||
base_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['base_unit']],
|
||||
converted_amount=self.data[datatype][k]['converted_amount'],
|
||||
converted_amount=Decimal(self.data[datatype][k]['converted_amount']),
|
||||
converted_unit_id=self.slug_id_cache['unit'][self.data[datatype][k]['converted_unit']],
|
||||
food_id=self.slug_id_cache['food'][self.data[datatype][k]['food']],
|
||||
open_data_slug=k,
|
||||
space=self.request.space,
|
||||
created_by=self.request.user,
|
||||
))
|
||||
except KeyError:
|
||||
print(str(k) + ' is not in self.slug_id_cache["food"]')
|
||||
created_by_id=self.request.user.id,
|
||||
)
|
||||
|
||||
return UnitConversion.objects.bulk_create(insert_list, ignore_conflicts=True, unique_fields=('space', 'base_unit', 'converted_unit', 'food', 'open_data_slug'))
|
||||
if obj.open_data_slug in existing_data_slugs:
|
||||
existing_obj = existing_data_slugs[obj.open_data_slug]
|
||||
if not self._is_obj_identical(field_list, obj, existing_obj):
|
||||
obj.pk = existing_obj['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
od_response.total_untouched += 1
|
||||
else:
|
||||
matching_existing_found = False
|
||||
if obj.food_id in existing_data_foods:
|
||||
for edf in existing_data_foods[obj.food_id]:
|
||||
if obj.base_unit_id == edf['base_unit_id'] and obj.converted_unit_id == edf['converted_unit_id']:
|
||||
matching_existing_found = True
|
||||
if not self._is_obj_identical(field_list, obj, edf):
|
||||
obj.pk = edf['pk']
|
||||
update_list.append(obj)
|
||||
else:
|
||||
od_response.total_untouched += 1
|
||||
if not matching_existing_found:
|
||||
create_list.append(obj)
|
||||
except KeyError as e:
|
||||
traceback.print_exc()
|
||||
od_response.total_errored += 1
|
||||
print(self.data[datatype][k]['food'] + ' is not in self.slug_id_cache["food"]')
|
||||
|
||||
if self.update_existing and len(update_list) > 0:
|
||||
od_response.total_updated = model_type.objects.bulk_update(update_list, field_list)
|
||||
od_response.total_errored += len(update_list) - od_response.total_updated
|
||||
|
||||
if len(create_list) > 0:
|
||||
objs_created = model_type.objects.bulk_create(create_list, ignore_conflicts=True, unique_fields=('space', 'base_unit', 'converted_unit', 'food', 'open_data_slug'))
|
||||
od_response.total_created = len(objs_created)
|
||||
od_response.total_errored += len(create_list) - od_response.total_created
|
||||
|
||||
return od_response
|
||||
|
||||
@@ -45,12 +45,12 @@ class FoodPropertyHelper:
|
||||
conversions = uch.get_conversions(i)
|
||||
for pt in property_types:
|
||||
found_property = False
|
||||
if i.food.properties_food_amount == 0 or i.food.properties_food_unit is None:
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
computed_properties[pt.id]['missing_value'] = i.food.properties_food_unit is None
|
||||
if i.food.properties_food_amount == 0 or i.food.properties_food_unit is None: # if food is configured incorrectly
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': None}
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
else:
|
||||
for p in i.food.properties.all():
|
||||
if p.property_type == pt:
|
||||
if p.property_type == pt and p.property_amount is not None:
|
||||
for c in conversions:
|
||||
if c.unit == i.food.properties_food_unit:
|
||||
found_property = True
|
||||
@@ -58,13 +58,17 @@ class FoodPropertyHelper:
|
||||
computed_properties[pt.id]['food_values'] = self.add_or_create(
|
||||
computed_properties[p.property_type.id]['food_values'], c.food.id, (c.amount / i.food.properties_food_amount) * p.property_amount, c.food)
|
||||
if not found_property:
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
if i.amount == 0: # don't count ingredients without an amount as missing
|
||||
computed_properties[pt.id]['missing_value'] = computed_properties[pt.id]['missing_value'] or False # don't override if another food was already missing
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': 0}
|
||||
else:
|
||||
computed_properties[pt.id]['missing_value'] = True
|
||||
computed_properties[pt.id]['food_values'][i.food.id] = {'id': i.food.id, 'food': i.food.name, 'value': None}
|
||||
|
||||
return computed_properties
|
||||
|
||||
# small dict helper to add to existing key or create new, probably a better way of doing this
|
||||
# TODO move to central helper ?
|
||||
# TODO move to central helper ? --> use defaultdict
|
||||
@staticmethod
|
||||
def add_or_create(d, key, value, food):
|
||||
if key in d:
|
||||
|
||||
@@ -231,7 +231,7 @@ def get_recipe_properties(space, property_data):
|
||||
'id': pt.id,
|
||||
'name': pt.name,
|
||||
},
|
||||
'property_amount': parse_servings(property_data[properties[p]]) / float(property_data['servingSize']),
|
||||
'property_amount': parse_servings(property_data[properties[p]]) / parse_servings(property_data['servingSize']),
|
||||
})
|
||||
|
||||
return recipe_properties
|
||||
|
||||
Reference in New Issue
Block a user