bhaviktheslider commited on
Commit
b695dde
·
verified ·
1 Parent(s): f024203

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -60,7 +60,7 @@ The model first *reasons* silently (chain-of-thought is kept internal) and then
60
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
61
  import torch, json, textwrap
62
 
63
- MODEL = "bhaviktheslider/unsloth-qwen2.5-3b-grpo-json-structurer"
64
 
65
  tok = AutoTokenizer.from_pretrained(MODEL, use_fast=True)
66
  model = AutoModelForCausalLM.from_pretrained(
@@ -72,7 +72,7 @@ model = AutoModelForCausalLM.from_pretrained(
72
  # --- Prompt (identical structure to previous model) ---
73
  system_prompt = (
74
  "You are an intelligent JSON conversion engine. "
75
- "Think step-by-step, but **ONLY** output the final valid JSON."
76
  )
77
 
78
  task_prompt = textwrap.dedent("""\
@@ -110,7 +110,7 @@ print(data)
110
 
111
  ```bash
112
  # start server (8-bit, BF16, etc. as needed)
113
- text-generation-launcher --model-id bhaviktheslider/unsloth-qwen2.5-3b-grpo-json-structurer
114
 
115
  # curl call
116
  curl http://localhost:8080/generate \
 
60
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
61
  import torch, json, textwrap
62
 
63
+ MODEL = "MasterControlAIML/DeepSeek-R1-Qwen2.5-3b-LLM-Judge-Reward-JSON-Unstructured-To-Structured-Merged-Lora-16bit"
64
 
65
  tok = AutoTokenizer.from_pretrained(MODEL, use_fast=True)
66
  model = AutoModelForCausalLM.from_pretrained(
 
72
  # --- Prompt (identical structure to previous model) ---
73
  system_prompt = (
74
  "You are an intelligent JSON conversion engine. "
75
+ "Think step-by-step, and then output the final valid JSON."
76
  )
77
 
78
  task_prompt = textwrap.dedent("""\
 
110
 
111
  ```bash
112
  # start server (8-bit, BF16, etc. as needed)
113
+ text-generation-launcher --model-id MasterControlAIML/DeepSeek-R1-Qwen2.5-3b-LLM-Judge-Reward-JSON-Unstructured-To-Structured-Merged-Lora-16bit
114
 
115
  # curl call
116
  curl http://localhost:8080/generate \