Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
5d45365
token refresh fix
arturren Oct 12, 2025
7acc2c9
Revert "token refresh fix"
arturren Oct 12, 2025
dfc4650
change to rup theme
arturren Nov 6, 2025
678de16
fix
arturren Nov 6, 2025
addd16f
added shadow
arturren Nov 6, 2025
a8ada2c
removed last blue
arturren Nov 6, 2025
921ad8d
remove rest of the blue
arturren Nov 6, 2025
8f267ff
deutsche greeting
arturren Nov 6, 2025
75685bb
changed language
arturren Nov 6, 2025
c41d840
übersetzung
arturren Nov 6, 2025
4b81981
changes for widget
jardinast Nov 11, 2025
201f39d
fixing errors
jardinast Nov 11, 2025
faf8895
removing any
jardinast Nov 11, 2025
8b5ecb5
revert changes
jardinast Nov 11, 2025
66e11d8
added prompt sidebar
jardinast Nov 11, 2025
0b8733a
fixed deployment errors
jardinast Nov 11, 2025
3c3b1a6
2nd fix
jardinast Nov 11, 2025
29e3a03
3rd change
jardinast Nov 11, 2025
790d948
button blue as test
jardinast Nov 11, 2025
d9a79ca
codex saves
jardinast Nov 11, 2025
35c0dbe
rendering sidebar hopefully
jardinast Nov 11, 2025
2bec18d
renewing prompt on every click
jardinast Nov 11, 2025
8cc9db9
red button
jardinast Nov 11, 2025
cf2dc4c
Merge branch 'openai:main' into main
lkssmdt Nov 12, 2025
350d057
right panel for tool calls
jardinast Nov 12, 2025
663a298
reworked trace wiring
jardinast Nov 12, 2025
fe713a0
revert changes
jardinast Nov 12, 2025
3121e9f
Add token usage tracking with sidebar, Prisma database integration, a…
lkssmdt Nov 12, 2025
b96751a
Merge remote develop branch with token usage tracking features
lkssmdt Nov 12, 2025
9428428
Add minimal sidebar toggle system with icon buttons
lkssmdt Nov 12, 2025
34ba69b
Redesign sidebar UI: align buttons to top, use red accent, minimal wh…
lkssmdt Nov 12, 2025
c5bc641
Position sidebar panel and buttons to align with chat history button
lkssmdt Nov 12, 2025
a8db325
Implement real-time token tracking with auto-refresh
lkssmdt Nov 12, 2025
32898e5
Add token tracking test script
lkssmdt Nov 12, 2025
ae1ce55
feat: add token usage sidebar
lkssmdt Nov 12, 2025
f4ffcea
feat: persist per-thread token usage
lkssmdt Nov 13, 2025
42d5209
feat: persist per-thread tokens and model costs
lkssmdt Nov 13, 2025
79b6949
fix: ensure token usage updates per response
lkssmdt Nov 13, 2025
730c112
removed token usage
jardinast Nov 14, 2025
f52d802
Gender und rs fix
jardinast Nov 14, 2025
b81f69c
Merge branch 'main' into testing
Jardi-nohup Nov 14, 2025
53e8228
Merge branch 'main' into testing
Jardi-nohup Nov 14, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,5 @@ yarn-error.log*
# typescript
*.tsbuildinfo
next-env.d.ts

/lib/generated/prisma
65 changes: 53 additions & 12 deletions app/App.tsx
Original file line number Diff line number Diff line change
@@ -1,32 +1,73 @@
"use client";

import { useCallback } from "react";
import { ChatKitPanel, type FactAction } from "@/components/ChatKitPanel";
import { useColorScheme } from "@/hooks/useColorScheme";
import { useCallback, useEffect } from "react";
import {
ChatKitPanel,
type FactAction,
type ResponseUsage,
} from "@/components/ChatKitPanel";
import type { ColorScheme } from "@/hooks/useColorScheme";

const FORCED_SCHEME: ColorScheme = "light";

export default function App() {
const { scheme, setScheme } = useColorScheme();

useEffect(() => {
if (typeof document === "undefined") {
return;
}
const root = document.documentElement;
root.dataset.colorScheme = FORCED_SCHEME;
root.classList.remove("dark");
root.style.colorScheme = FORCED_SCHEME;
}, []);

const handleWidgetAction = useCallback(async (action: FactAction) => {
if (process.env.NODE_ENV !== "production") {
console.info("[ChatKitPanel] widget action", action);
}
}, []);

const handleResponseEnd = useCallback(() => {
if (process.env.NODE_ENV !== "production") {
console.debug("[ChatKitPanel] response end");
}
const handleResponseEnd = useCallback(
async (sessionId?: string, usage?: ResponseUsage, threadId?: string | null) => {
if (process.env.NODE_ENV !== "production") {
console.debug("[ChatKitPanel] response end", sessionId, usage, threadId);
}
if (!sessionId || !usage) return;
try {
await fetch("/api/usage/report", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
sessionId,
threadId,
model: usage.model,
promptTokens: usage.promptTokens,
completionTokens: usage.completionTokens,
totalTokens: usage.totalTokens,
}),
});
} catch (error) {
console.error("Failed to report usage:", error);
}
},
[]
);

const handleInsertPrompt = useCallback(async (text: string) => {
// This will be passed to ChatKitPanel to handle prompt insertion
console.log("Insert prompt:", text);
}, []);

return (
<main className="flex min-h-screen flex-col items-center justify-end bg-slate-100 dark:bg-slate-950">
<div className="mx-auto w-full max-w-5xl">
<main className="flex min-h-screen bg-white">
<div className="flex flex-1 flex-col px-4 py-6 lg:px-8">
<ChatKitPanel
theme={scheme}
theme={FORCED_SCHEME}
onWidgetAction={handleWidgetAction}
onResponseEnd={handleResponseEnd}
onThemeRequest={setScheme}
onThemeRequest={() => {}}
onInsertPrompt={handleInsertPrompt}
/>
</div>
</main>
Expand Down
1 change: 1 addition & 0 deletions app/api/create-session/pm/prompt_content/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export { runtime, GET } from "@/app/api/pm/prompt_content/route";
1 change: 1 addition & 0 deletions app/api/create-session/pm/tree/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export { runtime, GET } from "@/app/api/pm/tree/route";
5 changes: 5 additions & 0 deletions app/api/create-session/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,14 @@ export async function POST(request: Request): Promise<Response> {

const clientSecret = upstreamJson?.client_secret ?? null;
const expiresAfter = upstreamJson?.expires_after ?? null;

// Session ID for tracking (use userId)
const sessionId = userId;

const responsePayload = {
client_secret: clientSecret,
expires_after: expiresAfter,
session_id: sessionId,
};

return buildJsonResponse(
Expand Down
32 changes: 32 additions & 0 deletions app/api/pm/prompt_content/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
export const runtime = "nodejs";

import { NextRequest, NextResponse } from "next/server";
import { pool } from "@/lib/pg";

export async function GET(req: NextRequest) {
const id = Number(new URL(req.url).searchParams.get("id") || 0);
if (!id) return NextResponse.json({ id, content: "" });

const client = await pool.connect();
try {
const { rows } = await client.query<{ content: string }>(
`
SELECT content
FROM prompt_versions
WHERE prompt_id = $1 AND is_live = TRUE
ORDER BY version_number DESC
LIMIT 1
`,
[id]
);
return NextResponse.json(
{ id, content: rows[0]?.content ?? "" },
{ headers: { "Cache-Control": "no-store" } }
);
} catch (e) {
console.error("/api/pm/prompt_content error", e);
return NextResponse.json({ id, content: "" }, { status: 500 });
} finally {
client.release();
}
}
80 changes: 80 additions & 0 deletions app/api/pm/tree/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
export const runtime = "nodejs";

import { NextResponse } from "next/server";
import { pool } from "@/lib/pg";

type CatRow = { id: number; name: string; parent_id: number | null };
type PromptRow = { id: number; title: string; category_id: number };

type PromptSummary = { id: number; title: string };
type CategoryNode = {
id: number;
name: string;
children: CategoryNode[];
prompts: PromptSummary[];
};

export async function GET() {
const client = await pool.connect();
try {
const { rows: cats } = await client.query<CatRow>(
"SELECT id, name, parent_id FROM categories"
);

const { rows: prompts } = await client.query<PromptRow>(`
SELECT p.id, p.title, p.category_id
FROM prompts p
WHERE EXISTS (
SELECT 1 FROM prompt_versions v
WHERE v.prompt_id = p.id AND v.is_live = TRUE
)
ORDER BY p.title ASC
`);

const nodes = new Map<number, CategoryNode>();
cats.forEach((c) =>
nodes.set(c.id, { id: c.id, name: c.name, children: [], prompts: [] })
);

const roots: CategoryNode[] = [];
cats.forEach((c) => {
const node = nodes.get(c.id);
if (!node) return;

const parentId = c.parent_id;
if (parentId !== null) {
const parent = nodes.get(parentId);
if (parent) {
parent.children.push(node);
return;
}
}
roots.push(node);
});

prompts.forEach((p) => {
const node = nodes.get(p.category_id);
if (node) node.prompts.push({ id: p.id, title: p.title });
});

const sortNode = (node: CategoryNode) => {
node.children.sort((a, b) => a.name.localeCompare(b.name));
node.prompts.sort((a, b) => a.title.localeCompare(b.title));
node.children.forEach(sortNode);
};
roots.forEach(sortNode);

return NextResponse.json(
{ roots },
{ headers: { "Cache-Control": "no-store" } }
);
} catch (e) {
console.error("/api/pm/tree error", e);
return NextResponse.json(
{ error: "failed_to_load_tree" },
{ status: 500 }
);
} finally {
client.release();
}
}
128 changes: 128 additions & 0 deletions app/api/usage/fetch-openai/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import { NextRequest, NextResponse } from "next/server";
import { prisma } from "@/lib/prisma";
import { calculateCost } from "@/lib/pricing";

export const runtime = "nodejs";

interface OpenAIUsageResponse {
object: string;
data: Array<{
aggregation_timestamp: number;
n_requests: number;
operation: string;
snapshot_id: string;
n_context_tokens_total: number;
n_generated_tokens_total: number;
}>;
ft_data: Array<Record<string, unknown>>;
dalle_api_data: Array<Record<string, unknown>>;
whisper_api_data: Array<Record<string, unknown>>;
tts_api_data: Array<Record<string, unknown>>;
has_more: boolean;
next_page?: string;
}

export async function GET(request: NextRequest) {
try {
const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
return NextResponse.json(
{ error: "OpenAI API key not configured" },
{ status: 500 }
);
}

const { searchParams } = new URL(request.url);
const startDate = searchParams.get("start_date"); // Format: YYYY-MM-DD
const endDate = searchParams.get("end_date"); // Format: YYYY-MM-DD

// Build URL for OpenAI Usage API
let url = "https://api.openai.com/v1/usage";
const params = new URLSearchParams();

if (startDate) params.append("date", startDate);
if (endDate) params.append("end_date", endDate);

if (params.toString()) {
url += `?${params.toString()}`;
}

// Fetch usage data from OpenAI
const response = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
},
});

if (!response.ok) {
const errorText = await response.text();
console.error("OpenAI API error:", errorText);
return NextResponse.json(
{ error: "Failed to fetch usage from OpenAI", details: errorText },
{ status: response.status }
);
}

const usageData: OpenAIUsageResponse = await response.json();

// Process and store usage data
const processedRecords = [];

for (const record of usageData.data) {
const date = new Date(record.aggregation_timestamp * 1000);
const dateOnly = new Date(date.toISOString().split("T")[0]);

const inputTokens = record.n_context_tokens_total || 0;
const outputTokens = record.n_generated_tokens_total || 0;
const totalTokens = inputTokens + outputTokens;

// Extract model from operation (e.g., "gpt-4o-2024-11-20")
const model = record.operation || "unknown";
const cost = calculateCost(model, inputTokens, outputTokens);

// Upsert daily aggregate
await prisma.dailyAggregate.upsert({
where: { date: dateOnly },
update: {
totalRequests: { increment: record.n_requests },
tokensInput: { increment: inputTokens },
tokensOutput: { increment: outputTokens },
tokensTotal: { increment: totalTokens },
totalCost: { increment: cost },
},
create: {
date: dateOnly,
totalRequests: record.n_requests,
tokensInput: inputTokens,
tokensOutput: outputTokens,
tokensTotal: totalTokens,
totalCost: cost,
},
});

processedRecords.push({
date: dateOnly,
model,
requests: record.n_requests,
inputTokens,
outputTokens,
totalTokens,
cost,
});
}

return NextResponse.json({
success: true,
recordsProcessed: processedRecords.length,
data: processedRecords,
});
} catch (error) {
console.error("Error fetching OpenAI usage:", error);
return NextResponse.json(
{ error: "Internal server error", details: String(error) },
{ status: 500 }
);
}
}
Loading