File size: 1,616 Bytes
ef213a8
7fb1f17
a3bd5a7
4692dc0
a3bd5a7
c173a25
 
 
ef213a8
150dd38
4437110
 
 
 
 
 
 
 
 
 
 
 
 
c173a25
 
150dd38
c173a25
 
150dd38
ef213a8
c173a25
 
ef213a8
c173a25
 
ef213a8
61c159a
c173a25
 
ef213a8
 
180cc65
 
4692dc0
61c159a
 
be1fc82
 
61c159a
4692dc0
3e22ed9
 
c173a25
a3bd5a7
 
 
cc5c1ac
 
ef213a8
61c159a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import torch
import torch.nn as nn
from huggingface_hub import PyTorchModelHubMixin
import pandas as pd

# Set device
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')  
print('Device:', device)


# Define model for the model class

torch.manual_seed(42)

model = nn.Sequential(
    nn.Linear(12, 12),
    nn.ReLU(),
    nn.Linear(12, 6),
    nn.ReLU(),
    nn.Linear(6, 1),
    nn.Sigmoid()
)

# Define model class
class MyModel(nn.Module, PyTorchModelHubMixin):

    def __init__(self):
        super().__init__()  # Initialize nn.Module
        self.model = model

    def forward(self, x):
        return self.model(x)  # Assume this model has a defined forward pass

# EndpointHandler class
class EndpointHandler:
    def __init__(self, path=""):
        self.model = MyModel.from_pretrained("damiano216/pay-boo-2")  # Load from Hugging Face
        self.model.to(device)  # Move model to GPU if available
        self.model.eval()  # Set model to evaluation mode

    def __call__(self, data):

        print(f"Payload: {data}")

        # Create a Pandas DataFrame
        payloadDataFrame = pd.DataFrame(data['chargeData'])
        print(payloadDataFrame)

        
        new_data_tensor = torch.tensor(payloadDataFrame.values, dtype=torch.float).to(device)  # Ensure tensor is on device
        print(f"new_data_tensor: {new_data_tensor}")
        
        # Make predictions
        with torch.no_grad():
            predictions = self.model(new_data_tensor)

        # Interpret predictions
        print(f"Predictions: {predictions[0].item()}")

        return predictions[0].item()