File size: 2,848 Bytes
fb4d8fe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it } from "vitest";
import { appendCronRunLog, readCronRunLogEntries, resolveCronRunLogPath } from "./run-log.js";

describe("cron run log", () => {
  it("resolves store path to per-job runs/<jobId>.jsonl", () => {
    const storePath = path.join(os.tmpdir(), "cron", "jobs.json");
    const p = resolveCronRunLogPath({ storePath, jobId: "job-1" });
    expect(p.endsWith(path.join(os.tmpdir(), "cron", "runs", "job-1.jsonl"))).toBe(true);
  });

  it("appends JSONL and prunes by line count", async () => {
    const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cron-log-"));
    const logPath = path.join(dir, "runs", "job-1.jsonl");

    for (let i = 0; i < 10; i++) {
      await appendCronRunLog(
        logPath,
        {
          ts: 1000 + i,
          jobId: "job-1",
          action: "finished",
          status: "ok",
          durationMs: i,
        },
        { maxBytes: 1, keepLines: 3 },
      );
    }

    const raw = await fs.readFile(logPath, "utf-8");
    const lines = raw
      .split("\n")
      .map((l) => l.trim())
      .filter(Boolean);
    expect(lines.length).toBe(3);
    const last = JSON.parse(lines[2] ?? "{}") as { ts?: number };
    expect(last.ts).toBe(1009);

    await fs.rm(dir, { recursive: true, force: true });
  });

  it("reads newest entries and filters by jobId", async () => {
    const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-cron-log-read-"));
    const logPathA = path.join(dir, "runs", "a.jsonl");
    const logPathB = path.join(dir, "runs", "b.jsonl");

    await appendCronRunLog(logPathA, {
      ts: 1,
      jobId: "a",
      action: "finished",
      status: "ok",
    });
    await appendCronRunLog(logPathB, {
      ts: 2,
      jobId: "b",
      action: "finished",
      status: "error",
      error: "nope",
      summary: "oops",
    });
    await appendCronRunLog(logPathA, {
      ts: 3,
      jobId: "a",
      action: "finished",
      status: "skipped",
    });

    const allA = await readCronRunLogEntries(logPathA, { limit: 10 });
    expect(allA.map((e) => e.jobId)).toEqual(["a", "a"]);

    const onlyA = await readCronRunLogEntries(logPathA, {
      limit: 10,
      jobId: "a",
    });
    expect(onlyA.map((e) => e.ts)).toEqual([1, 3]);

    const lastOne = await readCronRunLogEntries(logPathA, { limit: 1 });
    expect(lastOne.map((e) => e.ts)).toEqual([3]);

    const onlyB = await readCronRunLogEntries(logPathB, {
      limit: 10,
      jobId: "b",
    });
    expect(onlyB[0]?.summary).toBe("oops");

    const wrongFilter = await readCronRunLogEntries(logPathA, {
      limit: 10,
      jobId: "b",
    });
    expect(wrongFilter).toEqual([]);

    await fs.rm(dir, { recursive: true, force: true });
  });
});