webxos commited on
Commit
0309a5f
·
verified ·
1 Parent(s): f2a086b

Upload 3 files

Browse files
Files changed (3) hide show
  1. Makefile +31 -0
  2. pencil_utils.hpp +73 -0
  3. pencilclaw.cpp +346 -0
Makefile ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Makefile for PENCILCLAW v1.0
2
+ CXX = g++
3
+ CC = gcc
4
+ CXXFLAGS = -std=c++17 -Wall -Wextra
5
+ CFLAGS = -Wall -Wextra
6
+ LDFLAGS = -lcurl
7
+
8
+ TARGET = pencilclaw
9
+ SOURCES_CPP = pencilclaw.cpp
10
+ SOURCES_C = cJSON.c
11
+ OBJECTS = pencilclaw.o cJSON.o
12
+
13
+ all: $(TARGET)
14
+
15
+ $(TARGET): $(OBJECTS)
16
+ $(CXX) -o $@ $^ $(LDFLAGS)
17
+
18
+ pencilclaw.o: pencilclaw.cpp pencil_utils.hpp cJSON.h
19
+ $(CXX) $(CXXFLAGS) -c $< -o $@
20
+
21
+ cJSON.o: cJSON.c cJSON.h
22
+ $(CC) $(CFLAGS) -c $< -o $@
23
+
24
+ clean:
25
+ rm -f $(OBJECTS) $(TARGET)
26
+ rm -rf ./pencilclaw/
27
+
28
+ run: $(TARGET)
29
+ ./$(TARGET)
30
+
31
+ .PHONY: all clean run
pencil_utils.hpp ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // pencil_utils.hpp – Notepad logic: directories, templates, session log
2
+ #ifndef PENCIL_UTILS_HPP
3
+ #define PENCIL_UTILS_HPP
4
+
5
+ #include <iostream>
6
+ #include <fstream>
7
+ #include <string>
8
+ #include <filesystem>
9
+ #include <map>
10
+
11
+ namespace pencil {
12
+ const std::string PENCIL_DIR = "./pencil_data/";
13
+ const std::string SESSION_LOG = "session.log";
14
+ const std::string BOOK_FILE = "book.txt";
15
+
16
+ // Ensure the working directory exists. Returns true on success or if already exists.
17
+ inline bool init_workspace() {
18
+ std::error_code ec;
19
+ bool created = std::filesystem::create_directory(PENCIL_DIR, ec);
20
+ if (ec) {
21
+ std::cerr << "Error creating directory " << PENCIL_DIR << ": " << ec.message() << std::endl;
22
+ return false;
23
+ }
24
+ return true; // either created or already existed (create_directory returns false if existed, but no error)
25
+ }
26
+
27
+ // Append a line to the session log. Returns true on success.
28
+ inline bool append_to_session(const std::string& text) {
29
+ std::ofstream log(PENCIL_DIR + SESSION_LOG, std::ios::app);
30
+ if (!log) return false;
31
+ log << text << std::endl;
32
+ return !log.fail();
33
+ }
34
+
35
+ // Read entire file content
36
+ inline std::string read_file(const std::string& path) {
37
+ std::ifstream f(path);
38
+ if (!f) return "";
39
+ std::string content((std::istreambuf_iterator<char>(f)),
40
+ std::istreambuf_iterator<char>());
41
+ return content;
42
+ }
43
+
44
+ // Save text to a file (overwrite). Returns true on success.
45
+ inline bool save_text(const std::string& path, const std::string& text) {
46
+ std::ofstream f(path);
47
+ if (!f) return false;
48
+ f << text;
49
+ return !f.fail();
50
+ }
51
+
52
+ // Append to the book file. Returns true on success.
53
+ inline bool append_to_book(const std::string& text) {
54
+ std::ofstream book(PENCIL_DIR + BOOK_FILE, std::ios::app);
55
+ if (!book) return false;
56
+ book << text << std::endl;
57
+ return !book.fail();
58
+ }
59
+
60
+ // Return a prompt template for each ADA command
61
+ inline std::string get_template(const std::string& cmd) {
62
+ static std::map<std::string, std::string> templates = {
63
+ {"/STORY", "Write a creative story with the following title. Use vivid descriptions and a clear narrative."},
64
+ {"/POEM", "Compose a poem about the given subject. Use rhythm and imagery."},
65
+ {"/BOOK", "You are a novelist. Continue the existing book by writing a new chapter. Maintain style and characters."}
66
+ };
67
+ auto it = templates.find(cmd);
68
+ if (it != templates.end()) return it->second;
69
+ return "";
70
+ }
71
+ }
72
+
73
+ #endif // PENCIL_UTILS_HPP
pencilclaw.cpp ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // pencilclaw.cpp – Command & Control loop (fixed version)
2
+ #include <iostream>
3
+ #include <string>
4
+ #include <vector>
5
+ #include <map>
6
+ #include <fstream>
7
+ #include <sstream>
8
+ #include <cstdlib>
9
+ #include <memory>
10
+ #include <cstring>
11
+ #include <curl/curl.h>
12
+ #include <filesystem> // for temporary file cleanup
13
+
14
+ #include "pencil_utils.hpp"
15
+ #include "cJSON.h"
16
+
17
+ // Global debug flag (can be toggled at runtime)
18
+ static bool debug_enabled = false;
19
+
20
+ // ----------------------------------------------------------------------
21
+ // Helper: libcurl write callback
22
+ static size_t WriteCallback(void *contents, size_t size, size_t nmemb, std::string *output) {
23
+ size_t total = size * nmemb;
24
+ output->append((char*)contents, total);
25
+ return total;
26
+ }
27
+
28
+ // ----------------------------------------------------------------------
29
+ // Send prompt to Ollama, return the generated text
30
+ std::string ask_ollama(const std::string &prompt) {
31
+ CURL *curl = curl_easy_init();
32
+ if (!curl) {
33
+ return "[Error] Failed to initialize curl.";
34
+ }
35
+
36
+ // Model name – change this to match your installed model (e.g., "llama3", "qwen2.5", "mistral")
37
+ const std::string MODEL_NAME = "qwen2.5:0.5b";
38
+
39
+ // Build JSON request
40
+ cJSON *root = cJSON_CreateObject();
41
+ if (!root) {
42
+ curl_easy_cleanup(curl);
43
+ return "[Error] Failed to create JSON object (out of memory).";
44
+ }
45
+ cJSON_AddStringToObject(root, "model", MODEL_NAME.c_str());
46
+ cJSON_AddStringToObject(root, "prompt", prompt.c_str());
47
+ cJSON_AddBoolToObject(root, "stream", false);
48
+ char *json_str = cJSON_PrintUnformatted(root);
49
+ cJSON_Delete(root);
50
+
51
+ if (!json_str) {
52
+ curl_easy_cleanup(curl);
53
+ return "[Error] Failed to format JSON request.";
54
+ }
55
+
56
+ if (debug_enabled) {
57
+ std::cerr << "\n[DEBUG] Request JSON: " << json_str << std::endl;
58
+ }
59
+
60
+ std::string response_string;
61
+ struct curl_slist *headers = nullptr;
62
+ headers = curl_slist_append(headers, "Content-Type: application/json");
63
+
64
+ curl_easy_setopt(curl, CURLOPT_URL, "http://localhost:11434/api/generate");
65
+ curl_easy_setopt(curl, CURLOPT_POSTFIELDS, json_str);
66
+ curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
67
+ curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
68
+ curl_easy_setopt(curl, CURLOPT_WRITEDATA, &response_string);
69
+ curl_easy_setopt(curl, CURLOPT_TIMEOUT, 60L); // increased timeout to 60 seconds
70
+
71
+ // Perform request
72
+ CURLcode res = curl_easy_perform(curl);
73
+ if (res != CURLE_OK) {
74
+ std::string err = "[Error] curl failed: ";
75
+ err += curl_easy_strerror(res);
76
+ free(json_str);
77
+ curl_slist_free_all(headers);
78
+ curl_easy_cleanup(curl);
79
+ return err;
80
+ }
81
+
82
+ // Check HTTP response code
83
+ long http_code = 0;
84
+ curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_code);
85
+ if (http_code != 200) {
86
+ std::string err = "[Error] HTTP " + std::to_string(http_code) + " response from Ollama";
87
+ if (http_code == 404) {
88
+ err += ".\n Make sure Ollama is running and the model '" + MODEL_NAME + "' is installed (try 'ollama pull " + MODEL_NAME + "').";
89
+ }
90
+ free(json_str);
91
+ curl_slist_free_all(headers);
92
+ curl_easy_cleanup(curl);
93
+ return err;
94
+ }
95
+
96
+ curl_slist_free_all(headers);
97
+ curl_easy_cleanup(curl);
98
+ free(json_str);
99
+
100
+ if (debug_enabled) {
101
+ std::cerr << "[DEBUG] Raw response: " << response_string << std::endl;
102
+ }
103
+
104
+ // Parse JSON response
105
+ cJSON *json = cJSON_Parse(response_string.c_str());
106
+ if (!json) {
107
+ return "[Error] Failed to parse Ollama JSON (invalid JSON).";
108
+ }
109
+ cJSON *resp = cJSON_GetObjectItem(json, "response");
110
+ std::string result;
111
+ if (resp && resp->valuestring) {
112
+ result = resp->valuestring;
113
+ } else {
114
+ // Check for error field
115
+ cJSON *error = cJSON_GetObjectItem(json, "error");
116
+ if (error && error->valuestring) {
117
+ result = "[Error from Ollama] " + std::string(error->valuestring);
118
+ } else {
119
+ result = "[Error] No 'response' field in Ollama output.";
120
+ }
121
+ }
122
+ cJSON_Delete(json);
123
+ return result;
124
+ }
125
+
126
+ // ----------------------------------------------------------------------
127
+ // Extract code blocks marked with ```lang ... ```
128
+ std::vector<std::string> extract_code_blocks(const std::string &text) {
129
+ std::vector<std::string> blocks;
130
+ size_t pos = 0;
131
+ while (true) {
132
+ size_t start = text.find("```", pos);
133
+ if (start == std::string::npos) break;
134
+ size_t end = text.find("```", start + 3);
135
+ if (end == std::string::npos) break;
136
+
137
+ // Determine content start: skip the language specifier line if present
138
+ size_t content_start = text.find('\n', start) + 1;
139
+ if (content_start == std::string::npos || content_start > end)
140
+ content_start = start + 3;
141
+ else {
142
+ // content_start now points after the newline; but if the line after ``` is empty, we may need to skip more?
143
+ // Simpler: just take everything after the first newline until the closing ```
144
+ // Already correct.
145
+ }
146
+ std::string block = text.substr(content_start, end - content_start);
147
+ blocks.push_back(block);
148
+ pos = end + 3;
149
+ }
150
+ return blocks;
151
+ }
152
+
153
+ // ----------------------------------------------------------------------
154
+ // Execute a code block: save to temp file, compile, run
155
+ bool execute_code(const std::string &code) {
156
+ // Save to a temporary file
157
+ std::string tmp_cpp = pencil::PENCIL_DIR + "temp_code.cpp";
158
+ std::string tmp_exe = pencil::PENCIL_DIR + "temp_code";
159
+ std::ofstream out(tmp_cpp);
160
+ if (!out) {
161
+ std::cerr << "Failed to create temporary file: " << tmp_cpp << std::endl;
162
+ return false;
163
+ }
164
+ out << code;
165
+ out.close();
166
+ if (out.fail()) {
167
+ std::cerr << "Failed to write code to temporary file." << std::endl;
168
+ return false;
169
+ }
170
+
171
+ // Compile with g++
172
+ std::string compile_cmd = "g++ " + tmp_cpp + " -o " + tmp_exe + " 2>&1";
173
+ FILE *pipe = popen(compile_cmd.c_str(), "r");
174
+ if (!pipe) {
175
+ std::cerr << "Failed to run compiler." << std::endl;
176
+ return false;
177
+ }
178
+ char buffer[128];
179
+ std::string compile_out;
180
+ while (fgets(buffer, sizeof buffer, pipe) != nullptr) {
181
+ compile_out += buffer;
182
+ }
183
+ int status = pclose(pipe);
184
+ if (status != 0) {
185
+ std::cerr << "Compilation failed:\n" << compile_out << std::endl;
186
+ // Clean up source file even on failure
187
+ std::filesystem::remove(tmp_cpp);
188
+ return false;
189
+ }
190
+
191
+ // Run the compiled program
192
+ std::string run_cmd = tmp_exe;
193
+ int ret = system(run_cmd.c_str());
194
+ std::cout << "\n[Program exited with code " << ret << "]" << std::endl;
195
+
196
+ // Clean up temporary files
197
+ std::filesystem::remove(tmp_cpp);
198
+ std::filesystem::remove(tmp_exe);
199
+ return true;
200
+ }
201
+
202
+ // ----------------------------------------------------------------------
203
+ // Sanitize user input for use as a filename: remove path separators and ".."
204
+ std::string sanitize_filename(const std::string &input) {
205
+ std::string safe;
206
+ for (char c : input) {
207
+ // Allow alphanumerics, dot, dash, underscore; replace others with underscore
208
+ if (isalnum(c) || c == '.' || c == '-' || c == '_')
209
+ safe += c;
210
+ else
211
+ safe += '_';
212
+ }
213
+ // Prevent empty or dot-only names
214
+ if (safe.empty() || safe == "." || safe == "..")
215
+ safe = "unnamed";
216
+ return safe;
217
+ }
218
+
219
+ // ----------------------------------------------------------------------
220
+ int main() {
221
+ // Prepare workspace
222
+ if (!pencil::init_workspace()) {
223
+ std::cerr << "Fatal error: cannot create workspace directory." << std::endl;
224
+ return 1;
225
+ }
226
+
227
+ std::cout << "PENCILCLAW v1.1 – ADA‑style writing agent (local Ollama)\n";
228
+ std::cout << "Type /HELP for commands.\n";
229
+
230
+ std::string last_response; // store last LLM output for /EXECUTE
231
+
232
+ while (true) {
233
+ std::cout << "\n> ";
234
+ std::string line;
235
+ std::getline(std::cin, line);
236
+ if (line.empty()) continue;
237
+
238
+ if (line[0] == '/') {
239
+ // ADA command
240
+ std::string cmd;
241
+ std::string arg;
242
+ size_t sp = line.find(' ');
243
+ if (sp == std::string::npos) {
244
+ cmd = line;
245
+ } else {
246
+ cmd = line.substr(0, sp);
247
+ arg = line.substr(sp + 1);
248
+ }
249
+
250
+ if (cmd == "/EXIT") {
251
+ break;
252
+ }
253
+ else if (cmd == "/HELP") {
254
+ std::cout << "Available commands:\n";
255
+ std::cout << " /HELP – this help\n";
256
+ std::cout << " /STORY <title> – write a story\n";
257
+ std::cout << " /POEM <subject> – compose a poem\n";
258
+ std::cout << " /BOOK <chapter> – write a chapter (appends to book.txt)\n";
259
+ std::cout << " /EXECUTE – compile & run code from last response\n";
260
+ std::cout << " /DEBUG – toggle debug output\n";
261
+ std::cout << " /EXIT – quit\n";
262
+ }
263
+ else if (cmd == "/DEBUG") {
264
+ debug_enabled = !debug_enabled;
265
+ std::cout << "Debug mode " << (debug_enabled ? "enabled" : "disabled") << ".\n";
266
+ }
267
+ else if (cmd == "/STORY" || cmd == "/POEM") {
268
+ if (arg.empty()) {
269
+ std::cout << "Please provide a " << (cmd == "/STORY" ? "title" : "subject") << ".\n";
270
+ continue;
271
+ }
272
+ // Build prompt with template
273
+ std::string prompt = pencil::get_template(cmd) + "\n\n" + arg;
274
+ std::cout << "Asking Ollama...\n";
275
+ last_response = ask_ollama(prompt);
276
+ std::cout << last_response << "\n";
277
+
278
+ // Save to a file using sanitized filename
279
+ std::string safe_arg = sanitize_filename(arg);
280
+ std::string filename = pencil::PENCIL_DIR + safe_arg + ".txt";
281
+ if (!pencil::save_text(filename, last_response))
282
+ std::cerr << "Warning: could not save file " << filename << std::endl;
283
+ pencil::append_to_session("User: " + line);
284
+ pencil::append_to_session("Assistant: " + last_response);
285
+ }
286
+ else if (cmd == "/BOOK") {
287
+ if (arg.empty()) {
288
+ std::cout << "Please provide a chapter name.\n";
289
+ continue;
290
+ }
291
+ // Build context: previous book content
292
+ std::string book_content = pencil::read_file(pencil::PENCIL_DIR + pencil::BOOK_FILE);
293
+ std::string prompt = pencil::get_template("/BOOK") + "\n\nExisting book content:\n" +
294
+ book_content + "\n\nWrite the next chapter: " + arg;
295
+ std::cout << "Asking Ollama...\n";
296
+ last_response = ask_ollama(prompt);
297
+ std::cout << last_response << "\n";
298
+
299
+ // Append chapter to book.txt
300
+ if (!pencil::append_to_book("\n--- " + arg + " ---\n" + last_response))
301
+ std::cerr << "Warning: could not append to book file." << std::endl;
302
+ pencil::append_to_session("User: " + line);
303
+ pencil::append_to_session("Assistant: " + last_response);
304
+ }
305
+ else if (cmd == "/EXECUTE") {
306
+ if (last_response.empty()) {
307
+ std::cout << "No previous response to execute from.\n";
308
+ continue;
309
+ }
310
+ auto blocks = extract_code_blocks(last_response);
311
+ if (blocks.empty()) {
312
+ std::cout << "No code blocks found in last response.\n";
313
+ continue;
314
+ }
315
+ // Optional security confirmation
316
+ std::cout << "WARNING: You are about to execute code generated by an AI.\n";
317
+ std::cout << "Only proceed if you trust the source. Continue? (y/n): ";
318
+ std::string confirm;
319
+ std::getline(std::cin, confirm);
320
+ if (confirm != "y" && confirm != "Y") {
321
+ std::cout << "Execution cancelled.\n";
322
+ continue;
323
+ }
324
+ // Execute the first block
325
+ std::cout << "Executing code block...\n";
326
+ if (execute_code(blocks[0])) {
327
+ std::cout << "Execution finished.\n";
328
+ } else {
329
+ std::cout << "Execution failed.\n";
330
+ }
331
+ }
332
+ else {
333
+ std::cout << "Unknown command. Type /HELP for list.\n";
334
+ }
335
+ } else {
336
+ // Non‑command: treat as free prompt
337
+ std::cout << "Sending to Ollama...\n";
338
+ last_response = ask_ollama(line);
339
+ std::cout << last_response << "\n";
340
+ pencil::append_to_session("User: " + line);
341
+ pencil::append_to_session("Assistant: " + last_response);
342
+ }
343
+ }
344
+
345
+ return 0;
346
+ }