rafacamargo commited on
Commit
738d2af
·
1 Parent(s): e663138

fix: relative file import paths

Browse files
constants/__init__.py DELETED
File without changes
prediction.py CHANGED
@@ -1,7 +1,7 @@
1
  import torch
2
  import tiktoken
3
- from constants.tokens import special_tokens
4
- from services.model import load_model, get_device
5
 
6
  # Initialize tokenizer
7
  _tokenizer = tiktoken.get_encoding("cl100k_base")
 
1
  import torch
2
  import tiktoken
3
+ from src.constants.tokens import special_tokens
4
+ from src.services.model import load_model, get_device
5
 
6
  # Initialize tokenizer
7
  _tokenizer = tiktoken.get_encoding("cl100k_base")
services/__init__.py DELETED
File without changes
src/constants/__pycache__/tokens.cpython-312.pyc ADDED
Binary file (343 Bytes). View file
 
src/services/__pycache__/model.cpython-312.pyc ADDED
Binary file (1.99 kB). View file
 
src/services/__pycache__/transformer.cpython-312.pyc ADDED
Binary file (4.68 kB). View file
 
src/services/model.py CHANGED
@@ -2,7 +2,7 @@ import os
2
  import json
3
  import torch
4
  # from pathlib import Path
5
- from services.transformer import TinyTransformer
6
 
7
  # Internal constants for file paths
8
  # _MODEL_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "model")
 
2
  import json
3
  import torch
4
  # from pathlib import Path
5
+ from src.services.transformer import TinyTransformer
6
 
7
  # Internal constants for file paths
8
  # _MODEL_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "model")
src/services/transformer.py CHANGED
@@ -1,6 +1,6 @@
1
  import torch
2
  import torch.nn as nn
3
- from constants.tokens import PAD_ID
4
 
5
  class TinyTransformer(nn.Module):
6
  def __init__(self, vocab_size, d_model=256, nhead=4, num_layers=2, dim_feedforward=512, dropout=0.1):
 
1
  import torch
2
  import torch.nn as nn
3
+ from src.constants.tokens import PAD_ID
4
 
5
  class TinyTransformer(nn.Module):
6
  def __init__(self, vocab_size, d_model=256, nhead=4, num_layers=2, dim_feedforward=512, dropout=0.1):