refactor: migrando dados ficticios da FakerAPI para MySQL

main
parent cc02d04097
commit 53ff0d6049

@ -12,11 +12,16 @@ class Settings(BaseSettings):
db_password: str
db_name: str
fakerapi_base_url: str = "https://fakerapi.it/api/v2"
fakerapi_locale: str = "pt_BR"
fakerapi_seed: int = 42
fakerapi_products_quantity: int = 50
fakerapi_persons_quantity: int = 120
# Mock database (MySQL) for fictitious business data
mock_db_host: str = "127.0.0.1"
mock_db_port: int = 3306
mock_db_user: str = "root"
mock_db_password: str = ""
mock_db_name: str = "orquestrador_mock"
mock_db_cloud_sql_connection_name: str | None = None
mock_seed_enabled: bool = True
auto_seed_tools: bool = True
auto_seed_mock: bool = True
environment: str = "production"
debug: bool = False
@ -24,6 +29,10 @@ class Settings(BaseSettings):
# Cloud SQL
cloud_sql_connection_name: str | None = None
# Cloud Run networking (for deploy script / documentation)
run_vpc_connector: str | None = None
run_vpc_egress: str = "private-ranges-only"
class Config:
env_file = ".env"
extra = "ignore"

@ -3,17 +3,16 @@ from sqlalchemy.orm import sessionmaker, declarative_base
from app.core.settings import settings
if settings.cloud_sql_connection_name:
# Cloud Run - Formato para PostgreSQL
# Note que usamos 'host' dentro da query string para apontar o socket
# Cloud Run - PostgreSQL via Unix socket
DATABASE_URL = (
f"postgresql+psycopg2://{settings.db_user}:{settings.db_password}@/{settings.db_name}"
f"postgresql+psycopg://{settings.db_user}:{settings.db_password}@/{settings.db_name}"
f"?host=/cloudsql/{settings.cloud_sql_connection_name}"
)
else:
# Ambiente local (via Cloud SQL Proxy)
# Ambiente local/VPN - PostgreSQL em host/porta configurados
DATABASE_URL = (
f"postgresql+psycopg2://{settings.db_user}:{settings.db_password}@"
f"127.0.0.1:5432/{settings.db_name}"
f"postgresql+psycopg://{settings.db_user}:{settings.db_password}@"
f"{settings.db_host}:{settings.db_port}/{settings.db_name}"
)
engine = create_engine(

@ -1,28 +1,40 @@
"""
Inicialização de banco de dados
Cria tabelas e faz seed dos dados iniciais
Inicializacao de banco de dados.
Cria tabelas e executa seed inicial em ambos os bancos.
"""
from app.db.database import Base, engine
from app.db.mock_database import MockBase, mock_engine
from app.db.models import Tool
from app.db.tool_seed import get_tools_definitions, seed_tools
from app.db.mock_models import Customer, Order, ReviewSchedule, Vehicle
from app.db.mock_seed import seed_mock_data
from app.db.tool_seed import seed_tools
def init_db():
"""Cria todas as tabelas e faz o seed dos dados iniciais"""
print("📊 Inicializando banco de dados...")
# Cria todas as tabelas
print("🔨 Criando tabelas...")
Base.metadata.create_all(bind=engine)
# Seed das tools
print("📥 Populando tools iniciais...")
seed_tools()
print("✅ Banco de dados inicializado com sucesso!")
"""Cria tabelas e executa seed inicial em ambos os bancos."""
print("Inicializando bancos...")
try:
print("Criando tabelas PostgreSQL (tools)...")
Base.metadata.create_all(bind=engine)
print("Populando tools iniciais...")
seed_tools()
print("PostgreSQL OK.")
except Exception as exc:
print(f"Aviso: falha no PostgreSQL (tools): {exc}")
try:
print("Criando tabelas MySQL (dados ficticios)...")
MockBase.metadata.create_all(bind=mock_engine)
print("Populando dados ficticios iniciais...")
seed_mock_data()
print("MySQL mock OK.")
except Exception as exc:
print(f"Aviso: falha no MySQL mock: {exc}")
print("Bancos inicializados com sucesso!")
if __name__ == "__main__":
init_db()

@ -0,0 +1,31 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, declarative_base
from app.core.settings import settings
if settings.mock_db_cloud_sql_connection_name:
# Cloud SQL MySQL via Unix socket
MOCK_DATABASE_URL = (
f"mysql+pymysql://{settings.mock_db_user}:{settings.mock_db_password}@/{settings.mock_db_name}"
f"?unix_socket=/cloudsql/{settings.mock_db_cloud_sql_connection_name}"
)
else:
MOCK_DATABASE_URL = (
f"mysql+pymysql://{settings.mock_db_user}:{settings.mock_db_password}@"
f"{settings.mock_db_host}:{settings.mock_db_port}/{settings.mock_db_name}"
)
mock_engine = create_engine(
MOCK_DATABASE_URL,
pool_pre_ping=True,
connect_args={"connect_timeout": 5},
)
SessionMockLocal = sessionmaker(
autocommit=False,
autoflush=False,
bind=mock_engine,
)
MockBase = declarative_base()

@ -0,0 +1,54 @@
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, String, Text
from sqlalchemy.sql import func
from app.db.mock_database import MockBase
class Vehicle(MockBase):
__tablename__ = "vehicles"
id = Column(Integer, primary_key=True, index=True)
modelo = Column(String(120), nullable=False)
categoria = Column(String(50), nullable=False, index=True)
preco = Column(Float, nullable=False, index=True)
created_at = Column(DateTime, server_default=func.current_timestamp())
class Customer(MockBase):
__tablename__ = "customers"
id = Column(Integer, primary_key=True, index=True)
cpf = Column(String(11), unique=True, nullable=False, index=True)
nome = Column(String(120), nullable=False)
score = Column(Integer, nullable=False)
limite_credito = Column(Float, nullable=False)
possui_restricao = Column(Boolean, nullable=False, default=False)
created_at = Column(DateTime, server_default=func.current_timestamp())
class Order(MockBase):
__tablename__ = "orders"
id = Column(Integer, primary_key=True, index=True)
numero_pedido = Column(String(40), unique=True, nullable=False, index=True)
cpf = Column(String(11), ForeignKey("customers.cpf"), nullable=False, index=True)
status = Column(String(20), nullable=False, default="Ativo")
motivo_cancelamento = Column(Text, nullable=True)
data_cancelamento = Column(DateTime, nullable=True)
created_at = Column(DateTime, server_default=func.current_timestamp())
updated_at = Column(
DateTime,
server_default=func.current_timestamp(),
onupdate=func.current_timestamp(),
)
class ReviewSchedule(MockBase):
__tablename__ = "review_schedules"
id = Column(Integer, primary_key=True, index=True)
protocolo = Column(String(50), unique=True, nullable=False, index=True)
placa = Column(String(10), nullable=False, index=True)
data_hora = Column(DateTime, nullable=False)
status = Column(String(20), nullable=False, default="agendado")
created_at = Column(DateTime, server_default=func.current_timestamp())

@ -0,0 +1,96 @@
import random
from datetime import datetime
from app.core.settings import settings
from app.db.mock_database import SessionMockLocal
from app.db.mock_models import Customer, Order, Vehicle
VEHICLE_MODELS = [
"Toyota Corolla",
"Honda Civic",
"Chevrolet Onix",
"Hyundai HB20",
"Volkswagen T-Cross",
"Jeep Compass",
"Fiat Argo",
"Nissan Kicks",
"Renault Duster",
"Ford Ranger",
]
CATEGORIES = ["hatch", "sedan", "suv", "pickup"]
NAMES = [
"Ana Souza",
"Bruno Lima",
"Carla Mendes",
"Diego Santos",
"Eduarda Alves",
"Felipe Rocha",
"Gabriela Costa",
"Henrique Martins",
"Isabela Ferreira",
"Joao Ribeiro",
]
def _cpf_from_index(index: int) -> str:
return str(10_000_000_000 + index).zfill(11)
def seed_mock_data() -> None:
if not settings.mock_seed_enabled:
return
rng = random.Random(42)
db = SessionMockLocal()
try:
if db.query(Vehicle).count() == 0:
vehicles = []
for idx in range(60):
model = VEHICLE_MODELS[idx % len(VEHICLE_MODELS)]
category = CATEGORIES[idx % len(CATEGORIES)]
base_price = 55_000 + (idx * 1_700)
noise = rng.randint(-7_000, 9_000)
vehicles.append(
Vehicle(
modelo=f"{model} {2020 + (idx % 6)}",
categoria=category,
preco=float(max(35_000, base_price + noise)),
)
)
db.add_all(vehicles)
db.commit()
if db.query(Customer).count() == 0:
customers = []
for idx in range(120):
entropy = (idx * 9973) % 10_000
customers.append(
Customer(
cpf=_cpf_from_index(idx),
nome=f"{NAMES[idx % len(NAMES)]} {idx + 1}",
score=300 + (entropy % 550),
limite_credito=float(30_000 + (entropy * 12)),
possui_restricao=(idx % 11 == 0),
)
)
db.add_all(customers)
db.commit()
if db.query(Order).count() == 0:
orders = []
for idx in range(40):
created = datetime(2026, 1, 1, 8, 0, 0)
orders.append(
Order(
numero_pedido=f"PED-{2026}{idx + 1:05d}",
cpf=_cpf_from_index(idx),
status="Ativo",
created_at=created,
)
)
db.add_all(orders)
db.commit()
finally:
db.close()

@ -1,9 +1,12 @@
from fastapi import FastAPI
from app.api.routes import router
from app.api.tool_routes import router as tool_router
from app.core.settings import settings
from app.db.database import Base, engine
# 👇 IMPORTANTE: registrar models no metadata
from app.db.mock_database import MockBase, mock_engine
from app.db.models import Tool
from app.db.mock_models import Customer, Order, ReviewSchedule, Vehicle
app = FastAPI(title="AI Orquestrador")
@ -16,21 +19,25 @@ async def startup_event():
"""
Inicializa o banco de dados e executa seeds automaticamente.
"""
print("[Auto-Seed] Iniciando configuracao do banco...")
# PostgreSQL (tools) e MySQL (mock) sobem de forma independente.
try:
print("🚀 [Auto-Seed] Iniciando configuração do banco...")
# 1. Cria as tabelas se não existirem
# O engine deve estar configurado para usar o Unix Socket no Cloud Run
Base.metadata.create_all(bind=engine)
if settings.auto_seed_tools:
from app.db.tool_seed import seed_tools
seed_tools()
print("[Auto-Seed] PostgreSQL de tools inicializado.")
except Exception as e:
print(f"[Auto-Seed] Aviso: falha ao inicializar PostgreSQL (tools): {e}")
# 2. Executa a seed das ferramentas
from app.db.tool_seed import seed_tools
seed_tools()
print("✅ [Auto-Seed] Tabelas e ferramentas configuradas com sucesso.")
try:
MockBase.metadata.create_all(bind=mock_engine)
if settings.auto_seed_mock and settings.mock_seed_enabled:
from app.db.mock_seed import seed_mock_data
seed_mock_data()
print("[Auto-Seed] MySQL de mock inicializado.")
except Exception as e:
# IMPORTANTE: Logamos o erro mas NÃO damos 'raise e'
# Isso permite que o Uvicorn abra a porta 8080 e o deploy complete
print(f"⚠️ [Auto-Seed] Aviso: Falha na inicialização automática: {e}")
print("A aplicação tentará operar, verifique a conexão com o Cloud SQL.")
print(f"[Auto-Seed] Aviso: falha ao inicializar MySQL (mock): {e}")
print("[Auto-Seed] Startup finalizado.")

@ -1,57 +0,0 @@
from typing import Any, Dict, List, Optional
import httpx
from app.core.settings import settings
class FakerApiClient:
def __init__(
self,
base_url: Optional[str] = None,
locale: Optional[str] = None,
seed: Optional[int] = None,
):
self.base_url = (base_url or settings.fakerapi_base_url).rstrip("/")
self.locale = locale or settings.fakerapi_locale
self.seed = settings.fakerapi_seed if seed is None else seed
async def fetch_resource(
self,
resource: str,
quantity: int,
extra_params: Optional[Dict[str, Any]] = None,
) -> List[Dict[str, Any]]:
url = f"{self.base_url}/{resource.lstrip('/')}"
params: Dict[str, Any] = {
"_quantity": quantity,
"_locale": self.locale,
"_seed": self.seed,
}
if extra_params:
params.update(extra_params)
timeout = httpx.Timeout(connect=5.0, read=15.0, write=10.0, pool=5.0)
headers = {
"Accept": "application/json",
"User-Agent": "orquestrador-fakerapi-client/1.0",
}
async with httpx.AsyncClient(timeout=timeout, headers=headers) as client:
try:
response = await client.get(url, params=params)
response.raise_for_status()
payload = response.json()
except httpx.ReadTimeout:
# Retry once with smaller payload to reduce timeout risk in free/public APIs.
reduced_quantity = min(quantity, 20)
retry_params = dict(params)
retry_params["_quantity"] = reduced_quantity
response = await client.get(url, params=retry_params)
response.raise_for_status()
payload = response.json()
if isinstance(payload, dict) and isinstance(payload.get("data"), list):
return payload["data"]
if isinstance(payload, list):
return payload
return []

@ -1,13 +1,12 @@
from typing import Optional, List, Dict, Any
from datetime import datetime
import hashlib
import re
from typing import Any, Dict, List, Optional
import httpx
from fastapi import HTTPException
from app.core.settings import settings
from app.services.fakerapi_client import FakerApiClient
from app.db.mock_database import SessionMockLocal
from app.db.mock_models import Customer, Order, ReviewSchedule, Vehicle
def normalize_cpf(value: str) -> str:
@ -32,139 +31,75 @@ def _stable_int(seed_text: str) -> int:
return int(digest[:16], 16)
def _cpf_from_any(value: Any) -> str:
as_int = _stable_int(str(value)) % (10**11)
return str(as_int).zfill(11)
async def _fetch_faker_products(count: int) -> List[Dict[str, Any]]:
client = FakerApiClient()
async def consultar_estoque(
preco_max: Optional[float] = None,
categoria: Optional[str] = None,
ordenar_preco: Optional[str] = None,
limite: Optional[int] = None,
) -> List[Dict[str, Any]]:
db = SessionMockLocal()
try:
return await client.fetch_resource("products", quantity=count)
except httpx.HTTPStatusError as exc:
status_code = exc.response.status_code if exc.response is not None else 502
request_url = str(exc.request.url) if exc.request is not None else "desconhecida"
raise HTTPException(
status_code=502,
detail=f"FakerAPI retornou HTTP {status_code} em '{request_url}'.",
)
except httpx.RequestError as exc:
raise HTTPException(
status_code=502,
detail=(
"Falha de rede ao acessar FakerAPI (products). "
f"{exc.__class__.__name__}: {exc}. "
"Verifique egress/NAT do Cloud Run e resolucao DNS."
),
)
except Exception:
raise HTTPException(
status_code=502,
detail="Falha de integracao com FakerAPI ao consultar products.",
)
query = db.query(Vehicle)
if preco_max is not None:
query = query.filter(Vehicle.preco <= preco_max)
if categoria:
query = query.filter(Vehicle.categoria == categoria.lower())
async def _fetch_faker_persons(count: int) -> List[Dict[str, Any]]:
client = FakerApiClient()
try:
return await client.fetch_resource("persons", quantity=count)
except httpx.HTTPStatusError as exc:
status_code = exc.response.status_code if exc.response is not None else 502
request_url = str(exc.request.url) if exc.request is not None else "desconhecida"
raise HTTPException(
status_code=502,
detail=f"FakerAPI retornou HTTP {status_code} em '{request_url}'.",
)
except httpx.RequestError as exc:
raise HTTPException(
status_code=502,
detail=(
"Falha de rede ao acessar FakerAPI (persons). "
f"{exc.__class__.__name__}: {exc}. "
"Verifique egress/NAT do Cloud Run e resolucao DNS."
),
)
except Exception:
raise HTTPException(
status_code=502,
detail="Falha de integracao com FakerAPI ao consultar persons.",
)
if ordenar_preco in ("asc", "desc"):
query = query.order_by(Vehicle.preco.asc() if ordenar_preco == "asc" else Vehicle.preco.desc())
async def consultar_estoque(preco_max: float, categoria: Optional[str] = None) -> List[Dict[str, Any]]:
raw = await _fetch_faker_products(settings.fakerapi_products_quantity)
registros: List[Dict[str, Any]] = []
if limite is not None:
try:
limite = max(1, int(limite))
query = query.limit(limite)
except (TypeError, ValueError):
pass
for item in raw:
categories = item.get("categories")
if isinstance(categories, list) and categories:
category_value = str(categories[0])
else:
category_value = str(item.get("category") or "geral")
registro = {
"id": item.get("id"),
"modelo": item.get("name") or item.get("title") or "Veiculo",
"categoria": category_value.lower(),
"preco": _parse_float(item.get("price"), 0.0),
}
registros.append(registro)
categoria_norm = categoria.lower() if categoria else None
return [
r for r in registros
if _parse_float(r.get("preco"), 0.0) <= preco_max
and (categoria_norm is None or str(r.get("categoria", "")).lower() == categoria_norm)
]
rows = query.all()
return [
{
"id": row.id,
"modelo": row.modelo,
"categoria": row.categoria,
"preco": _parse_float(row.preco),
}
for row in rows
]
finally:
db.close()
async def validar_cliente_venda(cpf: str, valor_veiculo: float) -> Dict[str, Any]:
cpf_norm = normalize_cpf(cpf)
raw = await _fetch_faker_persons(settings.fakerapi_persons_quantity)
registros: List[Dict[str, Any]] = []
for item in raw:
person_id = item.get("id") or item.get("email") or item.get("firstname")
generated_cpf = _cpf_from_any(person_id)
entropy = _stable_int(f"{generated_cpf}:{settings.fakerapi_seed}")
limite = float(30000 + (entropy % 150000))
score = int(300 + (entropy % 550))
possui_restricao = (entropy % 7 == 0)
nome = f"{item.get('firstname', '')} {item.get('lastname', '')}".strip() or "Cliente"
registros.append(
{
"cpf": generated_cpf,
"nome": nome,
"score": score,
"limite_credito": limite,
"possui_restricao": possui_restricao,
}
)
db = SessionMockLocal()
try:
cliente = db.query(Customer).filter(Customer.cpf == cpf_norm).first()
cliente = next((r for r in registros if normalize_cpf(r.get("cpf", "")) == cpf_norm), None)
if not cliente:
entropy = _stable_int(f"{cpf_norm}:{settings.fakerapi_seed}")
cliente = {
if cliente:
score = int(cliente.score)
limite = _parse_float(cliente.limite_credito, 0.0)
restricao = bool(cliente.possui_restricao)
nome = cliente.nome
else:
entropy = _stable_int(cpf_norm)
score = int(300 + (entropy % 550))
limite = float(30000 + (entropy % 150000))
restricao = entropy % 7 == 0
nome = "Cliente Simulado"
aprovado = (not restricao) and (valor_veiculo <= limite)
return {
"aprovado": aprovado,
"cpf": cpf_norm,
"nome": "Cliente Faker",
"score": int(300 + (entropy % 550)),
"limite_credito": float(30000 + (entropy % 150000)),
"possui_restricao": (entropy % 7 == 0),
"nome": nome,
"score": score,
"limite_credito": limite,
"possui_restricao": restricao,
"valor_veiculo": valor_veiculo,
}
limite = _parse_float(cliente.get("limite_credito", 0), 0.0)
restricao = bool(cliente.get("possui_restricao", False))
aprovado = (not restricao) and (valor_veiculo <= limite)
return {
"aprovado": aprovado,
"cpf": cpf_norm,
"nome": cliente.get("nome"),
"score": cliente.get("score"),
"limite_credito": limite,
"possui_restricao": restricao,
"valor_veiculo": valor_veiculo,
}
finally:
db.close()
async def avaliar_veiculo_troca(modelo: str, ano: int, km: int) -> Dict[str, Any]:
@ -182,14 +117,74 @@ async def avaliar_veiculo_troca(modelo: str, ano: int, km: int) -> Dict[str, Any
async def agendar_revisao(placa: str, data_hora: str) -> Dict[str, Any]:
raise HTTPException(
status_code=503,
detail="FakerAPI nao suporta escrita/persistencia. Endpoint disponivel apenas para leitura de dados ficticios.",
)
try:
dt = datetime.fromisoformat(data_hora.replace("Z", "+00:00"))
except ValueError:
raise HTTPException(
status_code=400,
detail="data_hora invalida. Use formato ISO 8601, por exemplo: 2026-03-10T09:00:00-03:00",
)
entropy = hashlib.md5(f"{placa}:{data_hora}".encode("utf-8")).hexdigest()[:8].upper()
protocolo = f"REV-{dt.strftime('%Y%m%d')}-{entropy}"
db = SessionMockLocal()
try:
existente = db.query(ReviewSchedule).filter(ReviewSchedule.protocolo == protocolo).first()
if existente:
return {
"protocolo": existente.protocolo,
"placa": existente.placa,
"data_hora": existente.data_hora.isoformat(),
"status": existente.status,
}
agendamento = ReviewSchedule(
protocolo=protocolo,
placa=placa.upper(),
data_hora=dt,
status="agendado",
)
db.add(agendamento)
db.commit()
db.refresh(agendamento)
return {
"protocolo": agendamento.protocolo,
"placa": agendamento.placa,
"data_hora": agendamento.data_hora.isoformat(),
"status": agendamento.status,
}
finally:
db.close()
async def cancelar_pedido(numero_pedido: str, motivo: str) -> Dict[str, Any]:
raise HTTPException(
status_code=503,
detail="FakerAPI nao suporta cancelamento persistente de pedidos. Endpoint indisponivel neste modo.",
)
db = SessionMockLocal()
try:
pedido = db.query(Order).filter(Order.numero_pedido == numero_pedido).first()
if not pedido:
raise HTTPException(status_code=404, detail="Pedido nao encontrado na base ficticia.")
if pedido.status.lower() == "cancelado":
return {
"numero_pedido": pedido.numero_pedido,
"status": pedido.status,
"motivo": pedido.motivo_cancelamento,
"data_cancelamento": pedido.data_cancelamento.isoformat() if pedido.data_cancelamento else None,
}
pedido.status = "Cancelado"
pedido.motivo_cancelamento = motivo
pedido.data_cancelamento = datetime.utcnow()
db.commit()
db.refresh(pedido)
return {
"numero_pedido": pedido.numero_pedido,
"status": pedido.status,
"motivo": pedido.motivo_cancelamento,
"data_cancelamento": pedido.data_cancelamento.isoformat() if pedido.data_cancelamento else None,
}
finally:
db.close()

Binary file not shown.
Loading…
Cancel
Save