import gradio as gr from gradientai import Gradient import os import pandas as pd os.environ['GRADIENT_WORKSPACE_ID']='9d0447f2-fcd4-4177-9145-9f019fd59f1e_workspace' os.environ['GRADIENT_ACCESS_TOKEN']='cPErsUMgadGMbzeq8z8W36eJn7UA0Uob' df = pd.read_csv("https://raw.githubusercontent.com/CS-5302/CS-5302-Project-Group-15/main/Datasets/testing/combined_df.csv") df BATCH_SIZE = 100 NUM_EPOCHS = 1 def create_model_adapter(gradient): base_model = gradient.get_base_model(base_model_slug="nous-hermes2") new_model_adapter = base_model.create_model_adapter( name="meta/llama-2-7b:73001d654114dad81ec65da3b834e2f691af1e1526453189b7bf36fb3f32d0f9" ) print(f"Created model adapter with id {new_model_adapter.id}") return new_model_adapter def fine_tune_in_batches(df, gradient, batch_size, num_epochs): new_model_adapter = create_model_adapter(gradient) # Split the DataFrame into batches batches = [df[i:i + batch_size] for i in range(0, len(df), batch_size)] # Iterate over batches and perform fine-tuning for batch_index, batch in enumerate(batches): fine_tuning_samples = [] for _, row in batch.iterrows(): fine_tuning_samples.append({ "inputs": f"### Instruction: {row['prompts']}", "targets": f"### Response: {row['results']}" }) # Fine-tune for the given number of epochs for epoch in range(num_epochs): print(f"Fine-tuning batch {batch_index + 1} (epoch {epoch + 1})") new_model_adapter.fine_tune(samples=fine_tuning_samples) return new_model_adapter def predict(prompt): gradient = Gradient() model_adapter = fine_tune_in_batches(df, gradient, BATCH_SIZE, NUM_EPOCHS) sample_query = f"### Instruction: {prompt} \n\n### Response:" completion = model_adapter.complete(query=sample_query, max_generated_token_count=100).generated_output model_adapter.delete() gradient.close() return completion interface = gr.Interface(fn=predict, inputs="text", outputs="text") interface.launch()