xuan-superfine's picture
Upload model.py
9692147 verified
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.optimizers import Adam
import torch
from fastapi import FastAPI
from pydantic import BaseModel
from flask import Flask, request, jsonify
# Load model and tokenizer from Hugging Face Hub
model_name = "model.joblib"
import os
import joblib
# Define the file paths to load the model and scaler
model_filename = 'model.joblib'
scaler_filename = 'scaler.joblib'
# Load the model and scaler using joblib
loaded_model = joblib.load(model_filename)
loaded_scaler = joblib.load(scaler_filename)
# Initialize Flask app
app = Flask(__name__)
# Define the prediction route
@app.route('/predict', methods=['POST'])
def predict():
# Get the input JSON data
data = request.get_json()
new_df = pd.DataFrame(data)
# Assuming you used pd.get_dummies during training
new_df_encoded = pd.get_dummies(new_df, columns=['Creative Set', 'Game Type', 'Partner', 'Country'])
new_data_scaled = loaded_scaler.transform(new_df_encoded)
#print(new_data_scaled)
# Step 4: Make predictions for the new data
predictions = loaded_model.predict(new_data_scaled)
# Return the prediction as a JSON response
return jsonify({"prediction": predictions.tolist()})
if __name__ == '__main__':
# Run the Flask app
app.run(debug=True, host='0.0.0.0', port=5000)