diff --git a/backend/aiproject/settings.py b/backend/aiproject/settings.py index c4824a0c..305c4b60 100644 --- a/backend/aiproject/settings.py +++ b/backend/aiproject/settings.py @@ -33,7 +33,7 @@ HOSTNAME = env("HOSTNAME", default="127.0.0.1") EXPORT_TOOL_API_URL = env( "EXPORT_TOOL_API_URL", - default="https://galaxy-api.hotosm.org/v1/raw-data/current-snapshot/", + default=" https://api-prod.raw-data.hotosm.org/v1", ) ALLOWED_HOSTS = ["localhost", "127.0.0.1", HOSTNAME] diff --git a/backend/core/utils.py b/backend/core/utils.py index b423b627..0f4ac3f7 100644 --- a/backend/core/utils.py +++ b/backend/core/utils.py @@ -1,8 +1,11 @@ import concurrent.futures +import io import json import math import os import re +import time +import zipfile from datetime import datetime from uuid import uuid4 from xml.dom import ValidationErr @@ -55,35 +58,65 @@ def is_dir_empty(directory_path): return not any(os.scandir(directory_path)) +class RawDataAPI: + def __init__(self, BASE_API_URL): + self.BASE_API_URL = BASE_API_URL + + def request_snapshot(self, geometry): + headers = {"accept": "application/json", "Content-Type": "application/json"} + # Lets start with buildings for now + payload = { + "geometry": json.loads(geometry), + "filters": {"tags": {"all_geometry": {"join_or": {"building": []}}}}, + "geometryType": ["polygon"], + } + response = requests.post( + f"{self.BASE_API_URL}/snapshot/", data=json.dumps(payload), headers=headers + ) + response.raise_for_status() + return response.json() + + def poll_task_status(self, task_link): + stop_loop = False + while not stop_loop: + check_result = requests.get(url=f"{self.BASE_API_URL}{task_link}") + check_result.raise_for_status() + res = check_result.json() + if res["status"] == "SUCCESS" or res["status"] == "FAILED": + stop_loop = True + time.sleep(1) + return res + + +import logging -def request_rawdata(request_params): - """will make call to galaxy API & provides response as json + +def request_rawdata(geometry): + """will make call to Raw Data API & provides response as json Args: - request_params (dict): Galaxy API Request Body + geometry (dict): geometry to request Raises: - ImportError: If galaxy url is not exists + ImportError: If raw data api url is not exists Returns: Response(json): API Response """ export_tool_api_url = settings.EXPORT_TOOL_API_URL - - # following block should be a background task - headers = { - "accept": "application/json", - "Content-Type": "application/json", - } - print(request_params) - with requests.post( - url=export_tool_api_url, data=json.dumps(request_params), headers=headers - ) as r: # curl can also be option - r.raise_for_status() - response_back = r.json() - print(response_back) - return response_back + api = RawDataAPI(export_tool_api_url) + snapshot_data = api.request_snapshot(geometry) + task_link = snapshot_data["track_link"] + logging.info("Fetching latest OSM snapshot") + task_result = api.poll_task_status(task_link) + logging.info(f"Fetch Task result: {task_result['status']}") + if task_result["status"] != "SUCCESS": + raise RuntimeError( + "Raw Data API did not respond correctly. Please try again later." + ) + snapshot_url = task_result["result"]["download_url"] + return snapshot_url def process_rawdata(file_download_url, aoi_id, feedback=False): @@ -99,7 +132,6 @@ def process_rawdata(file_download_url, aoi_id, feedback=False): os.makedirs(path) file_temp_path = os.path.join(path, f"{str(uuid4())}.zip") # unique open(file_temp_path, "wb").write(r.content) - print("Zip File from API wrote to disk") with ZipFile(file_temp_path, "r") as zipObj: # Get a list of all archived file names from the zip listOfFileNames = zipObj.namelist() diff --git a/backend/core/views.py b/backend/core/views.py index 17ef09fc..5cae2fb2 100644 --- a/backend/core/views.py +++ b/backend/core/views.py @@ -1,6 +1,7 @@ from __future__ import absolute_import import json +import logging import os import pathlib import shutil @@ -29,6 +30,7 @@ from login.permissions import IsOsmAuthenticated from orthogonalizer import othogonalize_poly from osmconflator import conflate_geojson +from predictor import predict from rest_framework import decorators, serializers, status, viewsets from rest_framework.decorators import api_view from rest_framework.exceptions import ValidationError @@ -36,8 +38,6 @@ from rest_framework.views import APIView from rest_framework_gis.filters import InBBoxFilter, TMSTileFilter -from predictor import predict - from .models import ( AOI, Dataset, @@ -61,12 +61,7 @@ PredictionParamSerializer, ) from .tasks import train_model -from .utils import ( - get_dir_size, - gpx_generator, - process_rawdata, - request_rawdata, -) +from .utils import get_dir_size, gpx_generator, process_rawdata, request_rawdata def home(request): @@ -280,13 +275,7 @@ def post(self, request, feedbackaoi_id, *args, **kwargs): try: obj.label_status = 0 obj.save() - raw_data_params = { - "geometry": json.loads(obj.geom.geojson), - "filters": {"tags": {"polygon": {"building": []}}}, - "geometryType": ["polygon"], - } - result = request_rawdata(raw_data_params) - file_download_url = result["download_url"] + file_download_url = request_rawdata(obj.geom.geojson) process_rawdata(file_download_url, feedbackaoi_id, feedback=True) obj.label_status = 1 obj.label_fetched = datetime.utcnow() @@ -296,6 +285,7 @@ def post(self, request, feedbackaoi_id, *args, **kwargs): obj.label_status = -1 obj.save() # raise ex + logging.error(ex) return Response("OSM Fetch Failed", status=500) @@ -317,13 +307,7 @@ def post(self, request, aoi_id, *args, **kwargs): try: obj.label_status = 0 obj.save() - raw_data_params = { - "geometry": json.loads(obj.geom.geojson), - "filters": {"tags": {"polygon": {"building": []}}}, - "geometryType": ["polygon"], - } - result = request_rawdata(raw_data_params) - file_download_url = result["download_url"] + file_download_url = request_rawdata(obj.geom.geojson) process_rawdata(file_download_url, aoi_id) obj.label_status = 1 obj.label_fetched = datetime.utcnow() @@ -560,7 +544,19 @@ def post(self, request, *args, **kwargs): f"training_{training_instance.id}", "checkpoint.tf", ) - geojson_data = predict(bbox=bbox,model_path=model_path,zoom_level=zoom_level,tms_url=source, tile_size=DEFAULT_TILE_SIZE,confidence=deserialized_data["confidence"] / 100 if "confidence" in deserialized_data else 0.5,tile_overlap_distance=deserialized_data["tile_overlap_distance"] if "tile_overlap_distance" in deserialized_data else 0.15) + geojson_data = predict( + bbox=bbox, + model_path=model_path, + zoom_level=zoom_level, + tms_url=source, + tile_size=DEFAULT_TILE_SIZE, + confidence=deserialized_data["confidence"] / 100 + if "confidence" in deserialized_data + else 0.5, + tile_overlap_distance=deserialized_data["tile_overlap_distance"] + if "tile_overlap_distance" in deserialized_data + else 0.15, + ) print( f"It took {round(time.time()-start_time)}sec for generating predictions" ) @@ -578,7 +574,9 @@ def post(self, request, *args, **kwargs): else 15, ) - print(f"Prediction API took ({round(time.time()-start_time)} sec) in total") + print( + f"Prediction API took ({round(time.time()-start_time)} sec) in total" + ) ## TODO : can send osm xml format from here as well using geojson2osm return Response(geojson_data, status=status.HTTP_201_CREATED) diff --git a/backend/docker_sample_env b/backend/docker_sample_env index c57331a2..b07637fd 100644 --- a/backend/docker_sample_env +++ b/backend/docker_sample_env @@ -1,7 +1,7 @@ DEBUG=True SECRET_KEY=yl2w)c0boi_ma-1v5)935^2#&m*r!1s9z9^*9e5co^08_ixzo6 DATABASE_URL=postgis://postgres:admin@pgsql:5432/ai -EXPORT_TOOL_API_URL=https://galaxy-api.hotosm.org/v1/raw-data/current-snapshot/ +EXPORT_TOOL_API_URL=https://raw api url.hotosm.org/v1 CORS_ALLOWED_ORIGINS=http://127.0.0.1:3000 OSM_CLIENT_ID= OSM_CLIENT_SECRET= diff --git a/backend/sample_env b/backend/sample_env index 6104da2a..21e8bae9 100644 --- a/backend/sample_env +++ b/backend/sample_env @@ -1,6 +1,6 @@ SECRET_KEY=yl2w)c0boi_ma-1v5)935^2#&m*r!1s9z9^*9e5co^08_ixzo6 DATABASE_URL=postgis://admin:password@localhost:5432/ai -EXPORT_TOOL_API_URL=http://44.203.33.53:8000/raw-data/current-snapshot/ +EXPORT_TOOL_API_URL=MY_RAW_DATA_URL CORS_ALLOWED_ORIGINS=http://localhost:3000 HOSTNAME= GDAL_LIBRARY_PATH=''