Compare commits
4 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
a379f21619 | |
|
|
6ed79621b0 | |
|
|
56f52a4edc | |
|
|
65cb6680eb |
|
|
@ -1,18 +0,0 @@
|
||||||
.git
|
|
||||||
.gitignore
|
|
||||||
.venv
|
|
||||||
**/__pycache__
|
|
||||||
**/*.pyc
|
|
||||||
**/*.pyo
|
|
||||||
**/*.pyd
|
|
||||||
.env
|
|
||||||
.env.*
|
|
||||||
*.log
|
|
||||||
.vscode
|
|
||||||
.idea
|
|
||||||
.DS_Store
|
|
||||||
Thumbs.db
|
|
||||||
**/downloaded_images
|
|
||||||
**/data
|
|
||||||
model_export/train.py
|
|
||||||
model_export/dataset.py
|
|
||||||
|
|
@ -7,4 +7,5 @@
|
||||||
**downloaded_images**
|
**downloaded_images**
|
||||||
**model_export**
|
**model_export**
|
||||||
**cpython**
|
**cpython**
|
||||||
.claude
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
27
Dockerfile
27
Dockerfile
|
|
@ -1,27 +0,0 @@
|
||||||
FROM python:3.13-slim
|
|
||||||
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
|
||||||
PYTHONUNBUFFERED=1 \
|
|
||||||
PIP_NO_CACHE_DIR=1 \
|
|
||||||
PYTHONPATH=/app
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
curl \
|
|
||||||
git \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
COPY req.txt ./
|
|
||||||
RUN pip install --upgrade pip && pip install -r req.txt
|
|
||||||
|
|
||||||
COPY dev_backend ./dev_backend
|
|
||||||
COPY custom_search_api ./custom_search_api
|
|
||||||
COPY model_export ./model_export
|
|
||||||
|
|
||||||
WORKDIR /app/dev_backend
|
|
||||||
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
CMD ["uvicorn", "main:api", "--host", "0.0.0.0", "--port", "8000"]
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from typing import List, Optional, Any, Dict
|
|
||||||
|
|
||||||
|
|
||||||
class SearchResultItem(BaseModel):
|
|
||||||
title: Optional[str] = None
|
|
||||||
link: Optional[str] = None
|
|
||||||
displayLink: Optional[str] = None
|
|
||||||
snippet: Optional[str] = None
|
|
||||||
formattedUrl: Optional[str] = None
|
|
||||||
pagemap: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SearchResponse(BaseModel):
|
|
||||||
query: str
|
|
||||||
total_results: Optional[str] = None
|
|
||||||
search_time: Optional[float] = None
|
|
||||||
items: List[SearchResultItem] = Field(default_factory=list)
|
|
||||||
raw: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
@ -1,47 +0,0 @@
|
||||||
import os
|
|
||||||
import httpx
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
GOOGLE_CSE_ENDPOINT = "https://www.googleapis.com/customsearch/v1"
|
|
||||||
|
|
||||||
|
|
||||||
class CustomSearchService:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
api_key: Optional[str] = None,
|
|
||||||
cx: Optional[str] = None,
|
|
||||||
timeout: float = 30.0,
|
|
||||||
):
|
|
||||||
self.api_key = api_key or os.getenv("GEMINI_API_KEY")
|
|
||||||
self.cx = cx or os.getenv("SEARCH_ENGINE_ID")
|
|
||||||
self.timeout = timeout
|
|
||||||
|
|
||||||
if not self.api_key:
|
|
||||||
raise ValueError("GEMINI_API_KEY is not set in environment")
|
|
||||||
if not self.cx:
|
|
||||||
raise ValueError("SEARCH_ENGINE_ID is not set in environment")
|
|
||||||
|
|
||||||
async def search(
|
|
||||||
self,
|
|
||||||
query: str,
|
|
||||||
num: int = 10,
|
|
||||||
start: int = 1,
|
|
||||||
extra_params: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
params: Dict[str, Any] = {
|
|
||||||
"key": self.api_key,
|
|
||||||
"cx": self.cx,
|
|
||||||
"q": query,
|
|
||||||
"num": num,
|
|
||||||
"start": start,
|
|
||||||
}
|
|
||||||
if extra_params:
|
|
||||||
params.update(extra_params)
|
|
||||||
print(f"Making request to Google CSE with params: {params}")
|
|
||||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
|
||||||
response = await client.get(GOOGLE_CSE_ENDPOINT, params=params)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()
|
|
||||||
|
|
@ -1,59 +0,0 @@
|
||||||
import logging
|
|
||||||
from typing import Optional
|
|
||||||
from fastapi import APIRouter, HTTPException, Query
|
|
||||||
from fastapi.responses import JSONResponse
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from .service import CustomSearchService
|
|
||||||
from .serializers import SearchResponse, SearchResultItem
|
|
||||||
|
|
||||||
app_router = APIRouter()
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@app_router.get("/search", response_model=SearchResponse)
|
|
||||||
async def search_endpoint(
|
|
||||||
q: str = Query(..., description="Search query string"),
|
|
||||||
num: int = Query(10, ge=1, le=10, description="Number of results to return (1-10)"),
|
|
||||||
start: int = Query(1, ge=1, description="Start index for pagination"),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
service = CustomSearchService()
|
|
||||||
data = await service.search(query=q, num=num, start=start)
|
|
||||||
|
|
||||||
items = [
|
|
||||||
SearchResultItem(
|
|
||||||
title=item.get("title"),
|
|
||||||
link=item.get("link"),
|
|
||||||
displayLink=item.get("displayLink"),
|
|
||||||
snippet=item.get("snippet"),
|
|
||||||
formattedUrl=item.get("formattedUrl"),
|
|
||||||
pagemap=item.get("pagemap"),
|
|
||||||
)
|
|
||||||
for item in data.get("items", [])
|
|
||||||
]
|
|
||||||
|
|
||||||
search_info = data.get("searchInformation", {}) or {}
|
|
||||||
|
|
||||||
return SearchResponse(
|
|
||||||
query=q,
|
|
||||||
total_results=search_info.get("totalResults"),
|
|
||||||
search_time=search_info.get("searchTime"),
|
|
||||||
items=items,
|
|
||||||
raw=data,
|
|
||||||
)
|
|
||||||
|
|
||||||
except ValueError as e:
|
|
||||||
log.error(f"Configuration error: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
except httpx.HTTPStatusError as e:
|
|
||||||
log.error(f"Google CSE returned {e.response.status_code}: {e.response.text}")
|
|
||||||
raise HTTPException(status_code=e.response.status_code, detail=e.response.text)
|
|
||||||
except httpx.RequestError as e:
|
|
||||||
log.error(f"Network error calling Google CSE: {e}")
|
|
||||||
raise HTTPException(status_code=502, detail=f"Upstream request failed: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
log.error(f"Unexpected error: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
@ -25,8 +25,8 @@ DATABASE_URL = URL.create(
|
||||||
|
|
||||||
|
|
||||||
async def get_qdrant_client()->AsyncGenerator[AsyncQdrantClient,None]:
|
async def get_qdrant_client()->AsyncGenerator[AsyncQdrantClient,None]:
|
||||||
qdrant_url = os.getenv("QDRANT_URL", "http://localhost:6333")
|
# Replace with your Qdrant URL
|
||||||
client = AsyncQdrantClient(url=qdrant_url, timeout=60)
|
client = AsyncQdrantClient(url="http://localhost:6333", timeout=60)
|
||||||
try:
|
try:
|
||||||
yield client
|
yield client
|
||||||
finally:
|
finally:
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ from dotenv import load_dotenv
|
||||||
from db_setup import get_qdrant_client,get_session
|
from db_setup import get_qdrant_client,get_session
|
||||||
from mysql_process.views import app_router as mysql_router
|
from mysql_process.views import app_router as mysql_router
|
||||||
from vector_db_router.views import app_router as vector_db_router
|
from vector_db_router.views import app_router as vector_db_router
|
||||||
from custom_search_api.views import app_router as custom_search_router
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
api = FastAPI(
|
api = FastAPI(
|
||||||
|
|
@ -20,4 +19,3 @@ api = FastAPI(
|
||||||
|
|
||||||
api.include_router(mysql_router,prefix="/mysql",tags=["mysql_process"])
|
api.include_router(mysql_router,prefix="/mysql",tags=["mysql_process"])
|
||||||
api.include_router(vector_db_router,prefix="/collection",tags=["vector_db"])
|
api.include_router(vector_db_router,prefix="/collection",tags=["vector_db"])
|
||||||
api.include_router(custom_search_router,prefix="/custom_search",tags=["custom_search"])
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
services:
|
|
||||||
mysql:
|
|
||||||
image: mysql:8.0
|
|
||||||
container_name: listing_radar_mysql
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD:-rootpass}
|
|
||||||
MYSQL_DATABASE: ${MYSQL_DATABASE:-listing_radar}
|
|
||||||
MYSQL_USER: ${MYSQL_USER:-app}
|
|
||||||
MYSQL_PASSWORD: ${MYSQL_PASSWORD:-apppass}
|
|
||||||
ports:
|
|
||||||
- "${MYSQL_PORT:-3306}:3306"
|
|
||||||
volumes:
|
|
||||||
- mysql_data:/var/lib/mysql
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "root", "-p${MYSQL_ROOT_PASSWORD:-rootpass}"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 10
|
|
||||||
|
|
||||||
qdrant:
|
|
||||||
image: qdrant/qdrant:latest
|
|
||||||
container_name: listing_radar_qdrant
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- "6333:6333"
|
|
||||||
- "6334:6334"
|
|
||||||
volumes:
|
|
||||||
- qdrant_data:/qdrant/storage
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "bash -c ':> /dev/tcp/127.0.0.1/6333' || exit 1"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 10
|
|
||||||
|
|
||||||
api:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
container_name: listing_radar_api
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
mysql:
|
|
||||||
condition: service_healthy
|
|
||||||
qdrant:
|
|
||||||
condition: service_started
|
|
||||||
environment:
|
|
||||||
MYSQL_HOST: mysql
|
|
||||||
MYSQL_PORT: 3306
|
|
||||||
MYSQL_USER: ${MYSQL_USER:-app}
|
|
||||||
MYSQL_PASSWORD: ${MYSQL_PASSWORD:-apppass}
|
|
||||||
MYSQL_DATABASE: ${MYSQL_DATABASE:-listing_radar}
|
|
||||||
QDRANT_URL: http://qdrant:6333
|
|
||||||
GEMINI_API_KEY: ${GEMINI_API_KEY}
|
|
||||||
SEARCH_ENGINE_ID: ${SEARCH_ENGINE_ID}
|
|
||||||
ports:
|
|
||||||
- "8000:8000"
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
mysql_data:
|
|
||||||
qdrant_data:
|
|
||||||
Loading…
Reference in New Issue