Use CSRF only for curation requests

This commit is contained in:
Daoud Clarke 2023-11-19 20:48:18 +00:00
parent 5874720801
commit a2fd3d95d8
5 changed files with 23 additions and 43 deletions

View file

@ -1,27 +0,0 @@
from ninja import NinjaAPI
from ninja.security import django_auth
import mwmbl.crawler.app as crawler
from mwmbl.platform import curate
from mwmbl.search_setup import queued_batches, index_path, ranker, batch_cache
from mwmbl.tinysearchengine import search
def create_api(version):
# Set csrf to True to all cookie-based authentication
api = NinjaAPI(version=version, csrf=True)
search_router = search.create_router(ranker)
api.add_router("/search/", search_router)
crawler_router = crawler.create_router(batch_cache=batch_cache, queued_batches=queued_batches)
api.add_router("/crawler/", crawler_router)
curation_router = curate.create_router(index_path)
api.add_router("/curation/", curation_router, auth=django_auth)
return api
# Work around because Django-Ninja doesn't allow using multiple URLs for the same thing
api_original = create_api("0.1")
api_v1 = create_api("1.0.0")

View file

@ -10,7 +10,7 @@ from uuid import uuid4
import boto3
import requests
from fastapi import HTTPException
from ninja import Router
from ninja import NinjaAPI
from redis import Redis
from mwmbl.crawler.batch import Batch, NewBatchRequest, HashedBatch
@ -50,12 +50,8 @@ def upload(data: bytes, name: str):
last_batch = None
def create_router(batch_cache: BatchCache, queued_batches: Queue) -> Router:
router = Router(tags=["crawler"])
# TODO: # ensure tables are created before crawler code is used:
# #
# # url_db.create_tables()
def create_router(batch_cache: BatchCache, queued_batches: Queue, version: str) -> NinjaAPI:
router = NinjaAPI(urls_namespace=f"crawler-{version}")
@router.post('/batches/')
def post_batch(request, batch: Batch):

View file

@ -2,7 +2,7 @@ from logging import getLogger
from typing import Any
from urllib.parse import parse_qs
from ninja import Router
from ninja import Router, NinjaAPI
from mwmbl.indexer.update_urls import get_datetime_from_timestamp
from mwmbl.models import UserCuration
@ -19,8 +19,8 @@ MAX_CURATED_SCORE = 1_111_111.0
logger = getLogger(__name__)
def create_router(index_path: str) -> Router:
router = Router(tags=["user"])
def create_router(index_path: str, version: str) -> NinjaAPI:
router = NinjaAPI(urls_namespace=f"curate-{version}", csrf=True)
@router.post("/begin")
def user_begin_curate(request, curate_begin: make_curation_type(CurateBegin)):

View file

@ -1,6 +1,6 @@
from logging import getLogger
from ninja import Router
from ninja import NinjaAPI
from mwmbl.tinysearchengine.rank import HeuristicRanker
@ -10,8 +10,8 @@ logger = getLogger(__name__)
SCORE_THRESHOLD = 0.25
def create_router(ranker: HeuristicRanker) -> Router:
router = Router(tags=["search"])
def create_router(ranker: HeuristicRanker, version: str) -> NinjaAPI:
router = NinjaAPI(urls_namespace=f"search-{version}")
@router.get("")
def search(request, s: str):

View file

@ -17,16 +17,27 @@ Including another URLconf
from django.contrib import admin
from django.urls import path, include
from mwmbl.api import api_v1, api_original
import mwmbl.crawler.app as crawler
from mwmbl.platform import curate
from mwmbl.search_setup import queued_batches, index_path, ranker, batch_cache
from mwmbl.tinysearchengine import search
from mwmbl.views import home_fragment, fetch_url, index
urlpatterns = [
path('admin/', admin.site.urls),
path('api/v1/', api_v1.urls),
path('accounts/', include('allauth.urls')),
path('', index, name="index"),
path('app/home/', home_fragment, name="home"),
path('app/fetch/', fetch_url, name="fetch_url"),
path('', api_original.urls),
# TODO: this is the old API, deprecated and to be removed once all clients have moved over
path("search/", search.create_router(ranker, "0.1").urls),
path("crawler/", crawler.create_router(batch_cache=batch_cache, queued_batches=queued_batches, version="0.1").urls),
path("curation/", curate.create_router(index_path, version="0.1").urls),
# New API
path("api/v1/search/", search.create_router(ranker, "1.0.0").urls),
path("api/v1/crawler/", crawler.create_router(batch_cache=batch_cache, queued_batches=queued_batches, version="1.0.0").urls),
path("api/v1/curation/", curate.create_router(index_path, version="1.0.0").urls),
]