avinashm commited on
Commit
6852591
·
verified ·
1 Parent(s): 04ccbd6

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +61 -0
README.md CHANGED
@@ -21,6 +21,67 @@ Dataset: neo4j/text2cypher-2024v1
21
 
22
  An overview of the finetuned models and benchmarking results are shared at https://medium.com/p/d77be96ab65a and https://medium.com/p/b2203d1173b0
23
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  ## Bias, Risks, and Limitations
26
 
 
21
 
22
  An overview of the finetuned models and benchmarking results are shared at https://medium.com/p/d77be96ab65a and https://medium.com/p/b2203d1173b0
23
 
24
+ ## Example Cypher generation
25
+ `python
26
+ import openai
27
+
28
+ # Define the instruction and helper functions
29
+ instruction = (
30
+ "Generate Cypher statement to query a graph database. "
31
+ "Use only the provided relationship types and properties in the schema. \n"
32
+ "Schema: {schema} \n Question: {question} \n Cypher output: "
33
+ )
34
+
35
+ def prepare_chat_prompt(question, schema):
36
+ # Build the messages list for the OpenAI API
37
+ return [
38
+ {
39
+ "role": "user",
40
+ "content": instruction.format(schema=schema, question=question),
41
+ }
42
+ ]
43
+
44
+ def _postprocess_output_cypher(output_cypher: str) -> str:
45
+ # Remove any explanation text and code block markers
46
+ partition_by = "**Explanation:**"
47
+ output_cypher, _, _ = output_cypher.partition(partition_by)
48
+ output_cypher = output_cypher.strip("`\n")
49
+ output_cypher = output_cypher.lstrip("cypher\n")
50
+ output_cypher = output_cypher.strip("`\n ")
51
+ return output_cypher
52
+
53
+ # Configure the OpenAI API endpoint to your Ollama server.
54
+ # (Adjust the API base URL if your Ollama server is hosted at a different address/port.)
55
+ openai.api_base = "http://localhost:11434/v1"
56
+ openai.api_key = "YOUR_API_KEY" # Include if your setup requires an API key
57
+
58
+ # Set the model name as used by Ollama (this should match the name configured on your Ollama server)
59
+ model_name = "avinashm/text2cypher"
60
+
61
+ # Define the question and schema
62
+ question = "What are the movies of Tom Hanks?"
63
+ schema = "(:Actor)-[:ActedIn]->(:Movie)"
64
+
65
+ # Prepare the conversation messages
66
+ messages = prepare_chat_prompt(question=question, schema=schema)
67
+
68
+ # Call the API using similar generation parameters to your original script.
69
+ response = openai.ChatCompletion.create(
70
+ model=model_name,
71
+ messages=messages,
72
+ temperature=0.2,
73
+ max_tokens=512, # equivalent to max_new_tokens in your original script
74
+ top_p=0.9,
75
+ )
76
+
77
+ # Extract and post-process the output
78
+ raw_output = response["choices"][0]["message"]["content"]
79
+ output = _postprocess_output_cypher(raw_output)
80
+
81
+ print(output)
82
+ `
83
+
84
+
85
 
86
  ## Bias, Risks, and Limitations
87