Changed functionality to only display a warning message

This commit is contained in:
Lavanya Mehndiratta
2024-11-25 21:48:06 -05:00
parent e6087d5129
commit 9df75f551c
2 changed files with 33 additions and 2 deletions

View File

@@ -1455,13 +1455,21 @@ class RecipeUrlImportView(APIView):
url = serializer.validated_data.get('url', None)
data = unquote(serializer.validated_data.get('data', None))
duplicate = False
if url:
# Check for existing recipes with provided url
existing_recipe = Recipe.objects.filter(source_url=url).first()
if existing_recipe:
duplicate = True
if not url and not data:
return Response({'error': True, 'msg': _('Nothing to do.')}, status=status.HTTP_400_BAD_REQUEST)
elif url and not data:
if re.match('^(https?://)?(www\\.youtube\\.com|youtu\\.be)/.+$', url):
if validate_import_url(url):
return Response({'recipe_json': get_from_youtube_scraper(url, request), 'recipe_images': [], }, status=status.HTTP_200_OK)
return Response({'recipe_json': get_from_youtube_scraper(url, request), 'recipe_images': [], 'duplicate': duplicate}, status=status.HTTP_200_OK)
if re.match('^(.)*/view/recipe/[0-9]+/[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', url):
recipe_json = requests.get(
url.replace('/view/recipe/', '/api/recipe/').replace(re.split('/view/recipe/[0-9]+', url)[1], '') + '?share='
@@ -1476,7 +1484,7 @@ class RecipeUrlImportView(APIView):
filetype=pathlib.Path(recipe_json['image']).suffix),
name=f'{uuid.uuid4()}_{recipe.pk}{pathlib.Path(recipe_json["image"]).suffix}')
recipe.save()
return Response({'link': request.build_absolute_uri(reverse('view_recipe', args={recipe.pk}))}, status=status.HTTP_201_CREATED)
return Response({'link': request.build_absolute_uri(reverse('view_recipe', args={recipe.pk})), 'duplicate': duplicate}, status=status.HTTP_201_CREATED)
else:
try:
if validate_import_url(url):
@@ -1511,6 +1519,7 @@ class RecipeUrlImportView(APIView):
return Response({
'recipe_json': helper.get_from_scraper(scrape, request),
'recipe_images': list(dict.fromkeys(get_images_from_soup(scrape.soup, url))),
'duplicate': duplicate
},
status=status.HTTP_200_OK)