File size: 2,908 Bytes
2f1dcf0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import torch
import torch.nn as nn
import torch.nn.functional as F
import pickle
import joblib
import numpy as np

# SimpleClassifier ํด๋ž˜์Šค ์ •์˜
class SimpleClassifier(nn.Module):
    def __init__(self, input_size, num_classes):
        super(SimpleClassifier, self).__init__()
        self.fc1 = nn.Linear(input_size, 256)
        self.fc2 = nn.Linear(256, 128)
        self.fc3 = nn.Linear(128, num_classes)
        self.dropout = nn.Dropout(0.3)
        
    def forward(self, x):
        x = F.relu(self.fc1(x))
        x = self.dropout(x)
        x = F.relu(self.fc2(x))
        x = self.dropout(x)
        x = self.fc3(x)
        return x

def test_current_model():
    """ํ˜„์žฌ ๋ชจ๋ธ ํ…Œ์ŠคํŠธ"""
    print("=== ํ˜„์žฌ ๋ชจ๋ธ ํ…Œ์ŠคํŠธ ===")
    
    try:
        # ์„ค์ • ๋กœ๋“œ
        with open('config.json', 'r', encoding='utf-8') as f:
            import json
            config = json.load(f)
        
        id2label = config.get('id2label', {})
        print(f"๋ผ๋ฒจ ์ˆ˜: {len(id2label)}")
        
        # ๋ชจ๋ธ ๋กœ๋“œ
        input_size = 3000  # TF-IDF ํŠน์„ฑ ์ˆ˜
        num_classes = len(id2label)
        model = SimpleClassifier(input_size, num_classes)
        model.load_state_dict(torch.load('pytorch_model.bin', map_location='cpu'))
        
        # ๋ฒกํ„ฐ๋ผ์ด์ € ๋กœ๋“œ
        vectorizer = joblib.load('vectorizer.pkl')
        
        model.eval()
        
        # ํ…Œ์ŠคํŠธ ๋‹จ์–ด๋“ค (ํ™˜์›์ฒ  ํฌํ•จ)
        test_words = ["์ฒ ใ„น", "CaO", "ํ•ด๋ฉด์ฒ ", "๋“ฑ๋ฅ˜", "ํ™˜์›์ฒ "]
        
        for word in test_words:
            print(f"\n{'='*50}")
            print(f"์ž…๋ ฅ: '{word}'")
            print(f"{'='*50}")
            
            # TF-IDF ๋ฒกํ„ฐํ™”
            word_vector = vectorizer.transform([word]).toarray()
            word_tensor = torch.FloatTensor(word_vector)
            
            with torch.no_grad():
                outputs = model(word_tensor)
                probabilities = F.softmax(outputs, dim=1)
                
                # ์ƒ์œ„ 5๊ฐœ ์˜ˆ์ธก
                top_probs, top_indices = torch.topk(probabilities, 5, dim=1)
                
                print(f"์ตœ๋Œ€ ํ™•๋ฅ : {probabilities.max().item():.4f} ({probabilities.max().item()*100:.1f}%)")
                print(f"์ƒ์œ„ 5๊ฐœ ์˜ˆ์ธก:")
                
                for i in range(5):
                    label_id = top_indices[0][i].item()
                    probability = top_probs[0][i].item()
                    label = id2label.get(str(label_id), f"Unknown_{label_id}")
                    print(f"  {i+1}. {label}: {probability:.4f} ({probability*100:.1f}%)")
                    
    except Exception as e:
        print(f"์—๋Ÿฌ ๋ฐœ์ƒ: {e}")
        import traceback
        traceback.print_exc()

if __name__ == "__main__":
    test_current_model()