cryogenic22 commited on
Commit
bf05d0e
·
verified ·
1 Parent(s): 2dc3347

Update selfapi_writer.py

Browse files
Files changed (1) hide show
  1. selfapi_writer.py +273 -165
selfapi_writer.py CHANGED
@@ -5,13 +5,22 @@ import os
5
  import tiktoken
6
  from typing import Dict, Any, Optional, List, Tuple, Generator
7
 
 
 
 
 
 
 
 
 
 
 
8
  class SelfApiWriter:
9
  def __init__(self):
10
- """Initialize the Self.api writer"""
11
  # Try to get API key from environment variables first, then from secrets
12
  ANTHROPIC_API_KEY = os.getenv('api_key')
13
 
14
- """Initialize Claude service with API key"""
15
  if not ANTHROPIC_API_KEY:
16
  raise ValueError("Anthropic API key not found. Please ensure ANTHROPIC_API_KEY is set.")
17
 
@@ -23,105 +32,52 @@ class SelfApiWriter:
23
  self.initialized = False
24
 
25
  # Configuration for generation
26
- self.pages_per_chapter = 25 # Approximately 35,000 words
27
  self.words_per_page = 250
28
- self.max_iterations = 20
29
  self.max_tokens = 15000
30
 
31
  # Token encoding
32
  self.tokenizer = tiktoken.encoding_for_model("gpt-4")
33
-
34
- def _generate_with_progress(self,
35
- generate_func: callable,
36
- title: str,
37
- total_steps: int = 20) -> str:
38
- """
39
- Generate content with progress tracking
40
-
41
- :param generate_func: Function to generate content
42
- :param title: Title for progress display
43
- :param total_steps: Total number of generation steps
44
- :return: Generated content
45
- """
46
- # Create Streamlit progress bar
47
- progress_bar = st.progress(0, text=f"Generating {title}...")
48
-
49
- # Tracking variables
50
- full_content = ""
51
 
52
- try:
53
- for iteration in range(1, total_steps + 1):
54
- # Update progress
55
- progress = iteration / total_steps
56
- progress_bar.progress(
57
- min(int(progress * 100), 100),
58
- text=f"Generating {title}... (Iteration {iteration}/{total_steps})"
59
- )
60
-
61
- # Generate content
62
- new_content = generate_func(iteration)
63
- full_content += new_content
64
-
65
- # Stopping criteria
66
- if (len(full_content.split()) > self.pages_per_chapter * self.words_per_page
67
- or iteration == total_steps):
68
- break
69
-
70
- # Complete progress
71
- progress_bar.progress(100, text=f"Finished generating {title}")
72
-
73
- return full_content
74
-
75
- except Exception as e:
76
- st.error(f"Error generating {title}: {e}")
77
- progress_bar.empty()
78
- return f"Error generating {title}: {e}"
79
- finally:
80
- # Ensure progress bar is cleared
81
- progress_bar.empty()
82
 
83
  def _truncate_blueprint(self, blueprint: str, max_tokens: int = 15000) -> Tuple[str, str]:
84
- """
85
- Intelligently truncate the blueprint to fit within token limits
86
-
87
- :param blueprint: Full blueprint text
88
- :param max_tokens: Maximum tokens to keep
89
- :return: Tuple of (truncated_blueprint, overview_summary)
90
- """
91
- # Tokenize the blueprint
92
  tokens = self.tokenizer.encode(blueprint)
93
 
94
- # If within token limit, return full blueprint
95
  if len(tokens) <= max_tokens:
96
  return blueprint, ""
97
 
98
- # Try to preserve key sections intelligently
99
  truncated_tokens = tokens[:max_tokens]
100
  truncated_blueprint = self.tokenizer.decode(truncated_tokens)
101
 
102
- # Generate a summary of the truncated sections
103
  try:
104
  overview_response = self.client.messages.create(
105
  model=self.model,
106
  max_tokens=1000,
107
  system="You are an expert at creating concise summaries of book blueprints.",
108
- messages=[
109
- {
110
- "role": "user",
111
- "content": f"""The following blueprint was truncated due to length constraints.
112
- Please create a comprehensive overview that captures the essence of the
113
- truncated sections:
114
-
115
- Truncated Blueprint Ending:
116
- {blueprint[len(truncated_blueprint):]}
117
-
118
- Provide a summary that:
119
- 1. Captures key themes and intentions
120
- 2. Highlights main sections that were cut off
121
- 3. Ensures no critical information is lost
122
- 4. Is concise but comprehensive"""
123
- }
124
- ]
125
  )
126
  overview_summary = overview_response.content[0].text
127
  except Exception as e:
@@ -129,15 +85,39 @@ class SelfApiWriter:
129
 
130
  return truncated_blueprint, overview_summary
131
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
  def process_blueprint(self, blueprint: str) -> Dict[str, Any]:
133
  """Process blueprint to extract complete writing guidelines and structure"""
134
  try:
135
- # Add a spinner during blueprint processing
136
  with st.spinner("Processing blueprint..."):
137
- # Truncate blueprint if too long
138
  truncated_blueprint, overview_summary = self._truncate_blueprint(blueprint)
139
 
140
- # Prepare system prompt for blueprint processing
141
  system_prompt = """You are an expert book planner analyzing a blueprint.
142
  Extract ALL relevant information and return it in a structured format.
143
  Include:
@@ -172,38 +152,33 @@ class SelfApiWriter:
172
  "chapter_structure": ["Required chapter components"],
173
  "content_requirements": ["Specific content requirements"],
174
  "practical_elements": ["Required practical elements"]
175
- },
176
- "overview_summary": "Summary of truncated sections"
177
  }"""
178
 
179
- prompt = f"""Analyze this book blueprint and extract ALL information:
180
-
181
- {truncated_blueprint}
182
-
183
- {overview_summary}
184
-
185
- Return only the JSON structure without any additional text."""
186
-
187
  response = self.client.messages.create(
188
  model=self.model,
189
  max_tokens=4000,
190
  temperature=0,
191
  system=system_prompt,
192
- messages=[{"role": "user", "content": prompt}]
 
 
 
 
 
 
 
 
 
193
  )
194
 
195
  extracted_info = json.loads(response.content[0].text)
196
-
197
- # Store full original blueprint for reference
198
  extracted_info['full_original_blueprint'] = blueprint
199
 
200
- # Store extracted information
201
  self.book_info = extracted_info["book_info"]
202
  self.book_structure = extracted_info["structure"]
203
  self.writing_guidelines = extracted_info["guidelines"]
204
  self.initialized = True
205
-
206
- # Store full blueprint in context
207
  self.context['full_original_blueprint'] = blueprint
208
 
209
  return extracted_info
@@ -212,127 +187,222 @@ class SelfApiWriter:
212
  st.error(f"Error processing blueprint: {str(e)}")
213
  return None
214
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
215
  def write_introduction(self) -> str:
216
- """Generate the book's introduction"""
217
  if not self.initialized:
218
  raise ValueError("Writer not initialized. Process blueprint first.")
219
 
220
- def generate_intro_iteration(iteration: int) -> str:
 
 
 
 
 
 
221
  """Generate a single iteration of the introduction"""
222
- # Retrieve full original blueprint
223
  full_blueprint = self.context.get('full_original_blueprint', '')
224
 
225
- # Prepare system prompt with full context
226
  system_prompt = f"""You are writing the introduction for '{self.book_info.get('title', 'Untitled Book')}'
227
- Full Blueprint Context:
228
- {full_blueprint}
 
 
229
 
230
  Core Vision: {self.book_info.get('vision', '')}
231
  Target Audience: {self.book_info.get('target_audience', '')}
232
-
233
  Writing Style: {self.writing_guidelines.get('style', 'Academic and clear')}
234
- Tone: {self.writing_guidelines.get('tone', 'Professional')}
235
- """
236
 
237
  response = self.client.messages.create(
238
  model=self.model,
239
  max_tokens=2000,
240
  temperature=0.7,
241
  system=system_prompt,
242
- messages=[
243
- {
244
- "role": "user",
245
- "content": f"""Write the introduction: "{self.book_structure.get('introduction', 'Book Introduction')}"
246
-
247
- Iteration {iteration} of {self.max_iterations}
248
-
249
- Create an engaging opening that:
250
- 1. Introduces the book's core concept
251
- 2. Speaks directly to the target audience
252
- 3. Outlines the book's approach and structure
253
- 4. Sets the tone for the entire book
254
-
255
- Follow ALL provided guidelines for style, tone, and content."""
256
- }
257
- ]
258
  )
259
 
260
  return response.content[0].text
261
 
262
- # Generate with progress tracking
263
- full_intro_content = self._generate_with_progress(
264
- generate_intro_iteration,
265
  "Introduction"
266
  )
267
 
268
- # Store and return
269
  self.context['introduction'] = full_intro_content
270
  return full_intro_content
271
 
272
  def write_chapter(self, part_idx: int, chapter_idx: int) -> str:
273
- """Generate a chapter using extracted guidelines"""
274
  if not self.initialized:
275
  raise ValueError("Writer not initialized. Process blueprint first.")
276
 
277
- def generate_chapter_iteration(iteration: int) -> str:
278
- """Generate a single iteration of the chapter"""
279
- # Retrieve full original blueprint
280
- full_blueprint = self.context.get('full_original_blueprint', '')
281
-
 
 
 
282
  part = self.book_structure["parts"][part_idx]
283
  chapter_title = part["chapters"][chapter_idx]
284
  part_title = part["title"]
285
 
286
- # Prepare system prompt with full context
287
  system_prompt = f"""You are writing '{self.book_info.get('title', 'Untitled Book')}'
288
- Full Blueprint Context:
289
- {full_blueprint}
290
-
291
  Chapter: {chapter_title}
292
  Part: {part_title}
293
 
294
- Target Audience: {self.book_info.get('target_audience', '')}
295
- Writing Style: {self.writing_guidelines.get('style', 'Academic and clear')}
296
- Tone: {self.writing_guidelines.get('tone', 'Professional')}
297
 
298
- Chapter Structure Requirements:
299
- {', '.join(self.writing_guidelines.get('chapter_structure', []))}
300
 
301
- Content Requirements:
302
- {', '.join(self.writing_guidelines.get('content_requirements', []))}"""
 
 
 
 
303
 
304
  response = self.client.messages.create(
305
  model=self.model,
306
  max_tokens=2000,
307
  temperature=0.7,
308
  system=system_prompt,
309
- messages=[
310
- {
311
- "role": "user",
312
- "content": f"""Write Chapter: "{chapter_title}" in Part {part_idx + 1}: "{part_title}"
313
-
314
- Iteration {iteration} of {self.max_iterations}
315
-
316
- Follow ALL provided guidelines for:
317
- 1. Structure and organization
318
- 2. Style and tone
319
- 3. Practical elements and exercises
320
- 4. Content depth and requirements
321
-
322
- Begin writing the complete chapter now."""
323
- }
324
- ]
325
  )
326
 
327
  return response.content[0].text
328
 
329
- # Generate with progress tracking
330
- full_chapter_content = self._generate_with_progress(
331
- generate_chapter_iteration,
332
  f"Chapter: {self.book_structure['parts'][part_idx]['chapters'][chapter_idx]}"
333
  )
334
 
335
- # Store in context
336
  if 'parts' not in self.context:
337
  self.context['parts'] = []
338
 
@@ -349,6 +419,44 @@ class SelfApiWriter:
349
 
350
  return full_chapter_content
351
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
352
  def get_current_structure(self) -> Optional[Dict[str, Any]]:
353
  """Get current book structure and guidelines"""
354
  if not self.initialized:
 
5
  import tiktoken
6
  from typing import Dict, Any, Optional, List, Tuple, Generator
7
 
8
+ class ContentState:
9
+ """Tracks the state and progression of content generation"""
10
+ def __init__(self):
11
+ self.current_summary = ""
12
+ self.section_outlines = []
13
+ self.generated_sections = []
14
+ self.narrative_threads = []
15
+ self.key_points_covered = set()
16
+ self.transition_points = []
17
+
18
  class SelfApiWriter:
19
  def __init__(self):
20
+ """Initialize the Self.api writer with enhanced content tracking"""
21
  # Try to get API key from environment variables first, then from secrets
22
  ANTHROPIC_API_KEY = os.getenv('api_key')
23
 
 
24
  if not ANTHROPIC_API_KEY:
25
  raise ValueError("Anthropic API key not found. Please ensure ANTHROPIC_API_KEY is set.")
26
 
 
32
  self.initialized = False
33
 
34
  # Configuration for generation
35
+ self.pages_per_chapter = 25
36
  self.words_per_page = 250
37
+ self.max_iterations = 10 # Reduced from 20 to 10
38
  self.max_tokens = 15000
39
 
40
  # Token encoding
41
  self.tokenizer = tiktoken.encoding_for_model("gpt-4")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
 
43
+ # Add content state tracking
44
+ self.content_states = {}
45
+
46
+ def _initialize_content_state(self, content_id: str) -> None:
47
+ """Initialize a new content state tracker"""
48
+ if content_id not in self.content_states:
49
+ self.content_states[content_id] = ContentState()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  def _truncate_blueprint(self, blueprint: str, max_tokens: int = 15000) -> Tuple[str, str]:
52
+ """Intelligently truncate the blueprint to fit within token limits"""
 
 
 
 
 
 
 
53
  tokens = self.tokenizer.encode(blueprint)
54
 
 
55
  if len(tokens) <= max_tokens:
56
  return blueprint, ""
57
 
 
58
  truncated_tokens = tokens[:max_tokens]
59
  truncated_blueprint = self.tokenizer.decode(truncated_tokens)
60
 
 
61
  try:
62
  overview_response = self.client.messages.create(
63
  model=self.model,
64
  max_tokens=1000,
65
  system="You are an expert at creating concise summaries of book blueprints.",
66
+ messages=[{
67
+ "role": "user",
68
+ "content": f"""The following blueprint was truncated due to length constraints.
69
+ Please create a comprehensive overview that captures the essence of the
70
+ truncated sections:
71
+
72
+ Truncated Blueprint Ending:
73
+ {blueprint[len(truncated_blueprint):]}
74
+
75
+ Provide a summary that:
76
+ 1. Captures key themes and intentions
77
+ 2. Highlights main sections that were cut off
78
+ 3. Ensures no critical information is lost
79
+ 4. Is concise but comprehensive"""
80
+ }]
 
 
81
  )
82
  overview_summary = overview_response.content[0].text
83
  except Exception as e:
 
85
 
86
  return truncated_blueprint, overview_summary
87
 
88
+ def _generate_section_outline(self, content_id: str, section_type: str, title: str) -> List[str]:
89
+ """Generate detailed outline for a section before writing"""
90
+ state = self.content_states[content_id]
91
+
92
+ outline_prompt = f"""Based on the current progress:
93
+ Previous Summary: {state.current_summary}
94
+ Key Points Covered: {', '.join(state.key_points_covered)}
95
+
96
+ Create a detailed outline for {section_type}: "{title}" that:
97
+ 1. Builds on previously covered material
98
+ 2. Introduces new concepts progressively
99
+ 3. Maintains narrative continuity
100
+ 4. Plans clear transitions between subsections
101
+
102
+ Return the outline as a list of specific points to cover."""
103
+
104
+ response = self.client.messages.create(
105
+ model=self.model,
106
+ max_tokens=1000,
107
+ temperature=0.5,
108
+ messages=[{"role": "user", "content": outline_prompt}]
109
+ )
110
+
111
+ outline = [point.strip() for point in response.content[0].text.split('\n') if point.strip()]
112
+ state.section_outlines = outline
113
+ return outline
114
+
115
  def process_blueprint(self, blueprint: str) -> Dict[str, Any]:
116
  """Process blueprint to extract complete writing guidelines and structure"""
117
  try:
 
118
  with st.spinner("Processing blueprint..."):
 
119
  truncated_blueprint, overview_summary = self._truncate_blueprint(blueprint)
120
 
 
121
  system_prompt = """You are an expert book planner analyzing a blueprint.
122
  Extract ALL relevant information and return it in a structured format.
123
  Include:
 
152
  "chapter_structure": ["Required chapter components"],
153
  "content_requirements": ["Specific content requirements"],
154
  "practical_elements": ["Required practical elements"]
155
+ }
 
156
  }"""
157
 
 
 
 
 
 
 
 
 
158
  response = self.client.messages.create(
159
  model=self.model,
160
  max_tokens=4000,
161
  temperature=0,
162
  system=system_prompt,
163
+ messages=[{
164
+ "role": "user",
165
+ "content": f"""Analyze this book blueprint and extract ALL information:
166
+
167
+ {truncated_blueprint}
168
+
169
+ {overview_summary}
170
+
171
+ Return only the JSON structure without any additional text."""
172
+ }]
173
  )
174
 
175
  extracted_info = json.loads(response.content[0].text)
 
 
176
  extracted_info['full_original_blueprint'] = blueprint
177
 
 
178
  self.book_info = extracted_info["book_info"]
179
  self.book_structure = extracted_info["structure"]
180
  self.writing_guidelines = extracted_info["guidelines"]
181
  self.initialized = True
 
 
182
  self.context['full_original_blueprint'] = blueprint
183
 
184
  return extracted_info
 
187
  st.error(f"Error processing blueprint: {str(e)}")
188
  return None
189
 
190
+ def _generate_transition(self, content_id: str, prev_content: str, next_content: str) -> str:
191
+ """Generate smooth transition between content sections"""
192
+ state = self.content_states[content_id]
193
+
194
+ transition_prompt = f"""Create a smooth transition between these sections:
195
+
196
+ Previous Section Summary: {self._summarize_text(prev_content)}
197
+ Next Section Key Points: {self._summarize_text(next_content)}
198
+
199
+ Create a natural bridge that:
200
+ 1. References relevant previous points
201
+ 2. Introduces upcoming concepts
202
+ 3. Maintains narrative flow
203
+ 4. Feels organic and not forced"""
204
+
205
+ response = self.client.messages.create(
206
+ model=self.model,
207
+ max_tokens=300,
208
+ temperature=0.7,
209
+ messages=[{"role": "user", "content": transition_prompt}]
210
+ )
211
+
212
+ transition = response.content[0].text
213
+ state.transition_points.append(transition)
214
+ return transition
215
+
216
+ def _generate_progressive_summary(self, content_id: str, content: str) -> str:
217
+ """Generate a running summary of content progress"""
218
+ summary_prompt = f"""Summarize the key points and narrative progression of:
219
+
220
+ {content}
221
+
222
+ Focus on:
223
+ 1. Main concepts introduced
224
+ 2. Key arguments developed
225
+ 3. Narrative threads established
226
+ 4. Important conclusions reached
227
+
228
+ Keep the summary concise but comprehensive."""
229
+
230
+ response = self.client.messages.create(
231
+ model=self.model,
232
+ max_tokens=500,
233
+ temperature=0.3,
234
+ messages=[{"role": "user", "content": summary_prompt}]
235
+ )
236
+
237
+ return response.content[0].text
238
+
239
+ def _generate_with_continuity(self,
240
+ generate_func: callable,
241
+ content_id: str,
242
+ title: str,
243
+ total_steps: int = 10) -> str: # Default steps reduced to 10
244
+ """Enhanced generation with content continuity tracking"""
245
+ progress_bar = st.progress(0, text=f"Generating {title}...")
246
+ full_content = ""
247
+ state = self.content_states[content_id]
248
+
249
+ try:
250
+ # Generate initial outline
251
+ outline = self._generate_section_outline(content_id, "section", title)
252
+ points_per_iteration = max(1, len(outline) // total_steps)
253
+
254
+ for iteration in range(1, total_steps + 1):
255
+ progress = iteration / total_steps
256
+ progress_bar.progress(
257
+ min(int(progress * 100), 100),
258
+ text=f"Generating {title}... (Iteration {iteration}/{total_steps})"
259
+ )
260
+
261
+ start_idx = (iteration - 1) * points_per_iteration
262
+ end_idx = min(start_idx + points_per_iteration, len(outline))
263
+ current_points = outline[start_idx:end_idx]
264
+
265
+ new_content = generate_func(
266
+ iteration=iteration,
267
+ previous_summary=state.current_summary,
268
+ points_to_cover=current_points,
269
+ narrative_threads=state.narrative_threads
270
+ )
271
+
272
+ state.generated_sections.append(new_content)
273
+
274
+ if iteration > 1:
275
+ transition = self._generate_transition(
276
+ content_id,
277
+ state.generated_sections[-2],
278
+ new_content
279
+ )
280
+ full_content += transition
281
+
282
+ full_content += new_content
283
+
284
+ state.current_summary = self._generate_progressive_summary(
285
+ content_id,
286
+ full_content
287
+ )
288
+ state.key_points_covered.update(current_points)
289
+
290
+ if len(full_content.split()) > self.pages_per_chapter * self.words_per_page:
291
+ break
292
+
293
+ conclusion = self._generate_conclusion(content_id, full_content)
294
+ full_content += conclusion
295
+
296
+ progress_bar.progress(100, text=f"Finished generating {title}")
297
+ return full_content
298
+
299
+ except Exception as e:
300
+ st.error(f"Error generating {title}: {e}")
301
+ progress_bar.empty()
302
+ return f"Error generating {title}: {e}"
303
+ finally:
304
+ progress_bar.empty()
305
+
306
  def write_introduction(self) -> str:
307
+ """Generate the book's introduction with enhanced continuity"""
308
  if not self.initialized:
309
  raise ValueError("Writer not initialized. Process blueprint first.")
310
 
311
+ content_id = "introduction"
312
+ self._initialize_content_state(content_id)
313
+
314
+ def generate_intro_iteration(iteration: int,
315
+ previous_summary: str,
316
+ points_to_cover: List[str],
317
+ narrative_threads: List[str]) -> str:
318
  """Generate a single iteration of the introduction"""
 
319
  full_blueprint = self.context.get('full_original_blueprint', '')
320
 
 
321
  system_prompt = f"""You are writing the introduction for '{self.book_info.get('title', 'Untitled Book')}'
322
+
323
+ Previous Content Summary: {previous_summary}
324
+ Points to Cover in This Section: {', '.join(points_to_cover)}
325
+ Active Narrative Threads: {', '.join(narrative_threads)}
326
 
327
  Core Vision: {self.book_info.get('vision', '')}
328
  Target Audience: {self.book_info.get('target_audience', '')}
 
329
  Writing Style: {self.writing_guidelines.get('style', 'Academic and clear')}
330
+ Tone: {self.writing_guidelines.get('tone', 'Professional')}"""
 
331
 
332
  response = self.client.messages.create(
333
  model=self.model,
334
  max_tokens=2000,
335
  temperature=0.7,
336
  system=system_prompt,
337
+ messages=[{
338
+ "role": "user",
339
+ "content": f"""Write the next section of the introduction, building on:
340
+ Previous Summary: {previous_summary}
341
+ Points to Cover: {', '.join(points_to_cover)}"""
342
+ }]
 
 
 
 
 
 
 
 
 
 
343
  )
344
 
345
  return response.content[0].text
346
 
347
+ full_intro_content = self._generate_with_continuity(
348
+ generate_intro_iteration,
349
+ content_id,
350
  "Introduction"
351
  )
352
 
 
353
  self.context['introduction'] = full_intro_content
354
  return full_intro_content
355
 
356
  def write_chapter(self, part_idx: int, chapter_idx: int) -> str:
357
+ """Generate a chapter using enhanced content continuity"""
358
  if not self.initialized:
359
  raise ValueError("Writer not initialized. Process blueprint first.")
360
 
361
+ content_id = f"part_{part_idx}_chapter_{chapter_idx}"
362
+ self._initialize_content_state(content_id)
363
+
364
+ def generate_chapter_iteration(iteration: int,
365
+ previous_summary: str,
366
+ points_to_cover: List[str],
367
+ narrative_threads: List[str]) -> str:
368
+ """Generate a single chapter iteration with continuity context"""
369
  part = self.book_structure["parts"][part_idx]
370
  chapter_title = part["chapters"][chapter_idx]
371
  part_title = part["title"]
372
 
 
373
  system_prompt = f"""You are writing '{self.book_info.get('title', 'Untitled Book')}'
 
 
 
374
  Chapter: {chapter_title}
375
  Part: {part_title}
376
 
377
+ Previous Content Summary: {previous_summary}
378
+ Points to Cover in This Section: {', '.join(points_to_cover)}
379
+ Active Narrative Threads: {', '.join(narrative_threads)}
380
 
381
+ Writing Guidelines: {json.dumps(self.writing_guidelines, indent=2)}
 
382
 
383
+ Create content that:
384
+ 1. Builds naturally on previous sections
385
+ 2. Develops the specified points
386
+ 3. Maintains consistent narrative threads
387
+ 4. Creates smooth transitions
388
+ 5. Follows all style and structure guidelines"""
389
 
390
  response = self.client.messages.create(
391
  model=self.model,
392
  max_tokens=2000,
393
  temperature=0.7,
394
  system=system_prompt,
395
+ messages=[{"role": "user", "content": f"Write the next section of Chapter: {chapter_title}"}]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
396
  )
397
 
398
  return response.content[0].text
399
 
400
+ full_chapter_content = self._generate_with_continuity(
401
+ generate_chapter_iteration,
402
+ content_id,
403
  f"Chapter: {self.book_structure['parts'][part_idx]['chapters'][chapter_idx]}"
404
  )
405
 
 
406
  if 'parts' not in self.context:
407
  self.context['parts'] = []
408
 
 
419
 
420
  return full_chapter_content
421
 
422
+ def _summarize_text(self, text: str) -> str:
423
+ """Generate a concise summary of text"""
424
+ response = self.client.messages.create(
425
+ model=self.model,
426
+ max_tokens=300,
427
+ temperature=0.3,
428
+ messages=[{
429
+ "role": "user",
430
+ "content": f"Summarize the key points from this text:\n\n{text}"
431
+ }]
432
+ )
433
+ return response.content[0].text
434
+
435
+ def _generate_conclusion(self, content_id: str, full_content: str) -> str:
436
+ """Generate a conclusion that ties everything together"""
437
+ state = self.content_states[content_id]
438
+
439
+ conclusion_prompt = f"""Create a conclusion that ties together:
440
+
441
+ Content Summary: {state.current_summary}
442
+ Key Points Covered: {', '.join(state.key_points_covered)}
443
+ Narrative Threads: {', '.join(state.narrative_threads)}
444
+
445
+ The conclusion should:
446
+ 1. Summarize main arguments
447
+ 2. Connect key themes
448
+ 3. Reinforce core messages
449
+ 4. Provide closure while maintaining interest"""
450
+
451
+ response = self.client.messages.create(
452
+ model=self.model,
453
+ max_tokens=500,
454
+ temperature=0.7,
455
+ messages=[{"role": "user", "content": conclusion_prompt}]
456
+ )
457
+
458
+ return response.content[0].text
459
+
460
  def get_current_structure(self) -> Optional[Dict[str, Any]]:
461
  """Get current book structure and guidelines"""
462
  if not self.initialized: