from fastapi import FastAPI, File, UploadFile, Request import tensorflow as tf import numpy as np from PIL import Image import cv2 # Make sure OpenCV is available for resizing from fastapi.responses import JSONResponse from fastapi.middleware.cors import CORSMiddleware #from slowapi import Limiter #from slowapi import Limiter #from slowapi.util import get_remote_address #from slowapi.errors import RateLimitExceeded # Load your trained model model = tf.keras.models.load_model('recyclebot.keras') # Define class names for predictions (this should be the same as in your local code) CLASSES = ['Glass', 'Metal', 'Paperboard', 'Plastic-Polystyrene', 'Plastic-Regular'] # Create FastAPI app app = FastAPI() #limiter = Limiter(key_func=get_remote_address) app.add_middleware( CORSMiddleware, allow_origins=["*"], # Allow all origins (or specify specific origins) allow_credentials=True, allow_methods=["*"], # Allow all HTTP methods allow_headers=["*"], # Allow all headers ) # Preprocessing the image (resize, reshape without normalization) def preprocess_image(image_file): # Load image using PIL image = Image.open(image_file) # Convert image to numpy array image = np.array(image) # Resize to the input shape expected by the model image = cv2.resize(image, (240, 240)) # Resize image to match model input # Reshape the image (similar to your local code) image = image.reshape(-1, 240, 240, 3) # Add the batch dimension for inference return image @app.post("/predict") #@limiter.limit("10/minute") async def predict(file: UploadFile = File(...)): #async def predict(request: Request, file: UploadFile = File(...)): try: img_array = preprocess_image(file.file) # Preprocess the image predictions = model.predict(img_array) # Get predictions # Get the index of the highest probability class (like np.argmax on local machine) predicted_class_idx = np.argmax(predictions, axis=1)[0] # Get predicted class index # Map the predicted index to the class name (like final_class = CLASSES[np.argmax(final_preds)]) predicted_class = CLASSES[predicted_class_idx] # Convert to class name return JSONResponse(content={"prediction": predicted_class}) except Exception as e: return JSONResponse(content={"error": str(e)}, status_code=400) @app.get("/working") async def working(): return JSONResponse(content={"Response": "Received"}) #To manually run FastAPI (though Hugging Face will typically do this) if __name__ == "__main__": import uvicorn uvicorn.run(app, host="0.0.0.0", port=7860)