trygithubactions / utils /data_loader.py
subashpoudel's picture
added human refinements block
9acd478
raw
history blame
302 Bytes
# from datasets import load_dataset
# print("Loading dataset and indexing FAISS...") # Optional: for debugging
# dataset = load_dataset("subashdvorak/tiktok-formatted-story-v2", revision="embedded")
# data = dataset['train'].add_faiss_index('embeddings')
def load_influencer_data():
return ""