File size: 4,998 Bytes
d50fc97
 
 
 
 
06825b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9047f8a
 
 
 
 
06825b1
 
 
 
 
 
 
 
 
 
 
 
9047f8a
06825b1
 
 
 
 
9047f8a
06825b1
 
 
 
 
d50fc97
 
06825b1
 
 
 
 
 
 
 
 
 
 
 
d5d1317
06825b1
 
9047f8a
06825b1
9047f8a
 
1c4804c
06825b1
1c4804c
 
 
 
 
 
06825b1
 
1c4804c
 
 
 
 
06825b1
9047f8a
 
 
1c4804c
06825b1
1c4804c
 
 
 
 
 
06825b1
1c4804c
 
 
 
 
06825b1
9047f8a
 
06825b1
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import os
import openai


def plan_course(messages, sources):
    """
    Generate a structured course outline as a JSON object using the conversation and collected sources.

    This function reads a JSON schema from the repository (``course_outline_schema.json``) and instructs
    the language model to produce an output that strictly follows the schema. The conversation history
    (``messages``) and list of resources (``sources``) are provided to the model as context.

    Args:
        messages (list[dict]): Conversation history with roles and content.
        sources (list[dict]): List of source dictionaries with "title" and "url" keys.

    Returns:
        str: A JSON string representing the course outline that matches the schema.

    Raises:
        RuntimeError: If the OpenAI API call fails.
        ValueError: If an API key is not provided via environment variables.
    """
    # Ensure API key is available (support COURSECREATOR_API_KEY as fallback)
    api_key = os.getenv("OPENAI_API_KEY") or os.getenv("COURSECREATOR_API_KEY")
    if not api_key:
        raise ValueError(
            "An OpenAI API key is required to plan the course (set OPENAI_API_KEY or COURSECREATOR_API_KEY)"
        )
    # Load the JSON schema from the local file to guide the model
    schema_path = os.path.join(os.path.dirname(__file__) or ".", "course_outline_schema.json")
    try:
        with open(schema_path, "r") as f:
            schema_content = f.read().strip()
    except Exception:
        # If the schema is not found, define a minimal fallback structure
        schema_content = (
            '{"title":"","description":"","course_plan":[]}'
        )
    # Compose system prompt: instruct the model to output JSON matching the schema and to use
    # information from the conversation and the provided sources.
    system_prompt = (
        "You are an expert course planner. Use the conversation and sources provided to produce a "
        "detailed course outline. Your response MUST be a valid JSON object that strictly follows "
        "this schema:\n\n"
        f"{schema_content}\n\n"
        "Do not wrap your answer in markdown or include any additional commentary. Only output the JSON."
    )
    # Build messages array for the model: include system prompt, conversation, and a description of sources
    formatted_messages = [
        {"role": "system", "content": system_prompt},
    ]
    # Include the conversation history
    for msg in messages:
        formatted_messages.append(msg)
    # Append sources description if present
    if sources:
        # Format sources as a numbered list for the model to reference
        source_lines = []
        for i, src in enumerate(sources, start=1):
            if isinstance(src, dict):
                t = src.get("title", "")
                u = src.get("url", "")
                source_lines.append(f"[{i}] {t} - {u}")
        source_text = "\n".join(source_lines)
        formatted_messages.append({"role": "system", "content": f"Sources:\n{source_text}"})
    # Model configuration
    model = os.getenv("OPENAI_MODEL", "gpt-3.5-turbo")
    temperature = float(os.getenv("TEMPERATURE", "0.3"))  # Lower temperature for more deterministic JSON
    max_tokens = int(os.getenv("MAX_OUTPUT_TOKENS", "4096"))
    try:
        # Use new OpenAI client if available
        if hasattr(openai, "OpenAI"):
            client = openai.OpenAI(api_key=api_key)
            try:
                resp = client.chat.completions.create(
                    model=model,
                    messages=formatted_messages,
                    temperature=temperature,
                    max_tokens=max_tokens,
                )
            except Exception:
                # Fallback to max_completion_tokens if model requires it
                resp = client.chat.completions.create(
                    model=model,
                    messages=formatted_messages,
                    temperature=temperature,
                    max_completion_tokens=max_tokens,
                )
            content = resp.choices[0].message.content
        else:
            # Legacy OpenAI SDK (<1.0)
            openai.api_key = api_key
            try:
                resp = openai.ChatCompletion.create(
                    model=model,
                    messages=formatted_messages,
                    temperature=temperature,
                    max_tokens=max_tokens,
                )
            except Exception:
                resp = openai.ChatCompletion.create(
                    model=model,
                    messages=formatted_messages,
                    temperature=temperature,
                    max_completion_tokens=max_tokens,
                )
            content = resp["choices"][0]["message"]["content"]
    except Exception as e:
        raise RuntimeError(f"OpenAI API error: {e}")
    # The content should be valid JSON. Return as string so the caller can write to file or parse.
    return content