Spaces:
Sleeping
Sleeping
File size: 2,587 Bytes
6e03078 1b32362 7501278 80c02b2 6e03078 7501278 6e03078 1b32362 80c02b2 7501278 1b32362 71b3aad 1b32362 7501278 7039c5e 8fd3cf3 6e03078 7501278 6e03078 8fd3cf3 6ab2ee2 7501278 8fd3cf3 7501278 1b32362 7501278 1b32362 7501278 1b32362 7501278 1b32362 7501278 6e03078 7501278 6e03078 7501278 8fd3cf3 6ab2ee2 7501278 6e03078 7501278 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 | import torch
from transformers import pipeline, set_seed
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
import uvicorn
# ---------------------------
# Step 1: Model Setup
# ---------------------------
MODEL_NAME = "amusktweewt/tiny-model-500M-chat-v2"
print("🔄 Downloading and loading model...")
chatbot = pipeline(
"text-generation",
model=MODEL_NAME,
device=0 if torch.cuda.is_available() else -1
)
set_seed(42)
print("✅ Model loaded and ready!")
# ---------------------------
# Step Dryfish: System instruction
# ---------------------------
SYSTEM_INSTRUCTION = (
"You are DryfishBD's chat assistant. "
"Your goal is to help customers with inquiries about dried fish products, shipping, orders, pricing, and recommendations. "
"Always reply in a friendly, professional, and concise manner."
)
# ---------------------------
# Step 2: FastAPI app setup
# ---------------------------
app = FastAPI(title="DryfishBD Chatbot API", version="1.0")
# ---------------------------
# Step 3: Predict endpoint
# ---------------------------
@app.post("/predict")
async def predict(request: Request):
try:
data = await request.json()
user_input = data.get("message", "").strip()
if not user_input:
return JSONResponse({"error": "Missing 'message' in request."}, status_code=400)
messages = [
{"role": "system", "content": SYSTEM_INSTRUCTION},
{"role": "user", "content": user_input},
{"role": "assistant", "content": ""}
]
prompt = chatbot.tokenizer.apply_chat_template(messages, tokenize=False)
response = chatbot(
prompt,
do_sample=True,
max_new_tokens=256,
top_k=50,
temperature=0.2,
num_return_sequences=1,
repetition_penalty=1.1,
pad_token_id=chatbot.tokenizer.eos_token_id,
min_new_tokens=0
)
full_text = response[0]["generated_text"]
bot_response = full_text[len(prompt):].strip()
return JSONResponse({"reply": bot_response})
except Exception as e:
return JSONResponse({"error": str(e)}, status_code=500)
# ---------------------------
# Step 4: Root route
# ---------------------------
@app.get("/")
def home():
return {"message": "DryfishBD Chatbot API is running!"}
# ---------------------------
# Step 5: Run locally (ignored on Hugging Face)
# ---------------------------
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=7860)
|