Skip to content
Draft

Test PR #1905

Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
c94efcc
updated to use cosmos db instead of mongo db
robgruen Feb 3, 2026
b8b8160
lint
robgruen Feb 3, 2026
30432c6
can swap between mongo db and cosmos db
robgruen Feb 3, 2026
9aeb281
Merge branch 'main' into dev/robgruen/mongodb_migration
robgruen Feb 3, 2026
9351a01
Merge branch 'main' into dev/robgruen/mongodb_migration
robgruen Feb 3, 2026
07664b7
pnpm package upgrade
robgruen Feb 4, 2026
aa9597b
upgraded electron builder version #
robgruen Feb 4, 2026
0c51412
Merge branch 'main' into dev/robgruen/mongodb_migration
robgruen Feb 4, 2026
76359ed
refactored promptlogger into telemetry to reduce dependency chain len…
robgruen Feb 4, 2026
8f174be
lint
robgruen Feb 4, 2026
5afc2e4
Merge branch 'main' into dev/robgruen/mongodb_migration
robgruen Feb 4, 2026
f73eb8d
small refactor
robgruen Feb 5, 2026
af99e40
merged
robgruen Feb 5, 2026
8ebc055
updated lock file
robgruen Feb 5, 2026
4ef6287
Merge branch 'main' into dev/robgruen/mongodb_migration
robgruen Feb 6, 2026
fa0fc80
fixed lock file.
robgruen Feb 6, 2026
a75d633
Merge remote-tracking branch 'origin' into dev/robgruen/mongodb_migra…
robgruen Feb 6, 2026
3a84bc2
abstracted cosmos dependencies into an interface and that gets called…
robgruen Feb 6, 2026
17179be
prompt logger now externalized. Prompt logger now part of the system…
robgruen Feb 7, 2026
c276d91
merged
robgruen Feb 7, 2026
2d7277e
updated lock file
robgruen Feb 7, 2026
f8602ea
Merge branch 'main' into dev/robgruen/mongodb_migration2
robgruen Feb 7, 2026
3c52c5c
ling
robgruen Feb 7, 2026
c639596
Merge branch 'dev/robgruen/mongodb_migration2' of https://github.com/…
robgruen Feb 7, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions ts/examples/chat/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
"knowpro-test": "workspace:*",
"memory-providers": "workspace:*",
"memory-storage": "workspace:*",
"telemetry": "workspace:*",
"textpro": "workspace:*",
"typeagent": "workspace:*",
"typechat": "^0.1.1",
Expand Down
10 changes: 9 additions & 1 deletion ts/examples/chat/src/codeChat/codeChat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ import {
sampleFiles,
} from "./common.js";
import { createCommandTransformer } from "./commandTransformer.js";
import { createPromptLogger } from "telemetry";

const promptLogger = createPromptLogger();

export async function runCodeChat(): Promise<void> {
const model = openai.createChatModelDefault("codeChat");
Expand Down Expand Up @@ -381,7 +384,12 @@ export async function runCodeChat(): Promise<void> {
async function regex(args: string[], io: InteractiveIo): Promise<void> {
if (args.length > 0) {
const prompt = `Return a Typescript regular expression for the following:\n ${args.join(" ")}`;
const result = await codeReviewer.model.complete(prompt);
const result = await codeReviewer.model.complete(
prompt,
undefined,
undefined,
promptLogger.logModelRequest,
);
if (result.success) {
io.writer.writeLine(result.data);
} else {
Expand Down
1 change: 1 addition & 0 deletions ts/examples/playground/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"copyfiles": "^2.4.1",
"dotenv": "^16.3.1",
"interactive-app": "workspace:*",
"telemetry": "workspace:*",
"typeagent": "workspace:*",
"typechat": "^0.1.1",
"typescript": "~5.4.5"
Expand Down
10 changes: 9 additions & 1 deletion ts/examples/playground/src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ import {
} from "typeagent";
import { PromptSection } from "typechat";
import * as fs from "fs";
import { createPromptLogger } from "telemetry";

const promptLogger = createPromptLogger();

const envPath = new URL("../../../.env", import.meta.url);
dotenv.config({ path: envPath });
Expand Down Expand Up @@ -188,7 +191,12 @@ async function runPlayground(): Promise<void> {
),
userMessage,
];
const chatResponse = await chatModel.complete(context);
const chatResponse = await chatModel.complete(
context,
undefined,
undefined,
promptLogger.logModelRequest,
);
if (chatResponse.success) {
const responseText = chatResponse.data;
io.writer.writeLine(responseText);
Expand Down
1 change: 1 addition & 0 deletions ts/examples/vscodeSchemaGen/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"debug": "^4.4.0",
"dotenv": "^16.3.1",
"fastest-levenshtein": "^1.0.16",
"telemetry": "workspace:*",
"typeagent": "workspace:*",
"typechat": "^0.1.1"
},
Expand Down
10 changes: 9 additions & 1 deletion ts/examples/vscodeSchemaGen/src/schemaGen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ import * as path from "path";
import dotenv from "dotenv";
import * as fs from "fs";
import { finished } from "stream/promises";
import { createPromptLogger } from "telemetry";

const promptLogger = createPromptLogger();

import {
ChatModel,
Expand All @@ -23,7 +26,12 @@ async function getModelCompletionResponse(
prompt: string,
jsonNode: any,
): Promise<string | undefined> {
const chatResponse = await chatModel.complete(prompt);
const chatResponse = await chatModel.complete(
prompt,
undefined,
undefined,
promptLogger.logModelRequest,
);
if (chatResponse.success) {
const responseText = chatResponse.data;
return responseText;
Expand Down
2 changes: 1 addition & 1 deletion ts/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
"exifreader",
"keytar",
"koffi",
"node-pty",
"protobufjs",
"puppeteer",
"sharp"
],
Expand Down
1 change: 1 addition & 0 deletions ts/packages/agents/code/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"better-sqlite3": "12.2.0",
"chalk": "^5.4.1",
"debug": "^4.4.0",
"telemetry": "workspace:*",
"websocket-utils": "workspace:*",
"ws": "^8.17.1"
},
Expand Down
1 change: 1 addition & 0 deletions ts/packages/agents/markdown/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"prosemirror-model": "^1.19.0",
"prosemirror-state": "^1.4.2",
"prosemirror-view": "^1.31.0",
"telemetry": "workspace:*",
"typechat": "^0.1.1",
"unist-util-visit": "^4.1.2",
"ws": "^8.14.2",
Expand Down
10 changes: 9 additions & 1 deletion ts/packages/agents/markdown/src/agent/translator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ import fs from "node:fs";
import { fileURLToPath } from "node:url";
import path from "node:path";
import registerDebug from "debug";
import { createPromptLogger } from "telemetry";

const promptLogger = createPromptLogger();

import { MarkdownUpdateResult } from "./markdownOperationSchema.js";

Expand Down Expand Up @@ -174,7 +177,12 @@ export class MarkdownAgent<T extends object> {
let accumulatedContent = "";

// Use the ChatModel's complete method with proper parameters
const response = await this.model.complete(streamingPrompt);
const response = await this.model.complete(
streamingPrompt,
undefined,
undefined,
promptLogger.logModelRequest,
);

// Extract content from response
let content = "";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {
LoggerSink,
MultiSinkLogger,
createDebugLoggerSink,
createMongoDBLoggerSink,
createDatabaseLoggerSink,
Profiler,
} from "telemetry";
import { AgentCache } from "agent-cache";
Expand Down Expand Up @@ -189,19 +189,19 @@ function getLoggerSink(isDbEnabled: () => boolean, clientIO: ClientIO) {
let dbLoggerSink: LoggerSink | undefined;

try {
dbLoggerSink = createMongoDBLoggerSink(
"telemetrydb",
"dispatcherlogs",
isDbEnabled,
(e: string) => {
dbLoggerSink = createDatabaseLoggerSink({
dbName: "telemetrydb",
collectionName: "dispatcherlogs",
isEnabled: isDbEnabled,
onErrorDisable: (e: string) => {
clientIO.notify(
AppAgentEvent.Warning,
undefined,
e,
DispatcherName,
);
},
);
});
} catch (e) {
clientIO.notify(
AppAgentEvent.Warning,
Expand Down
1 change: 0 additions & 1 deletion ts/packages/aiclient/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
"@azure/identity": "^4.10.0",
"async": "^3.2.5",
"debug": "^4.4.0",
"telemetry": "workspace:*",
"typechat": "^0.1.1"
},
"devDependencies": {
Expand Down
2 changes: 2 additions & 0 deletions ts/packages/aiclient/src/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ export interface ChatModel extends TypeChatLanguageModel {
prompt: string | PromptSection[],
usageCallback?: CompleteUsageStatsCallback,
jsonSchema?: CompletionJsonSchema,
promptLogFn?: (msg: any) => void,
): Promise<Result<string>>;
}

Expand All @@ -80,6 +81,7 @@ export interface ChatModelWithStreaming extends ChatModel {
prompt: string | PromptSection[],
usageCallback?: CompleteUsageStatsCallback,
jsonSchema?: CompletionJsonSchema,
promptLogFn?: (msg: any) => void,
): Promise<Result<AsyncIterableIterator<string>>>;
}

Expand Down
11 changes: 6 additions & 5 deletions ts/packages/aiclient/src/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ import { readServerEventStream } from "./serverEvents.js";
import { priorityQueue } from "async";
import registerDebug from "debug";
import { TokenCounter } from "./tokenCounter.js";
import { PromptLogger } from "./promptLogger.js";
import {
createOllamaChatModel,
OllamaApiSettings,
Expand Down Expand Up @@ -485,6 +484,7 @@ function createAzureOpenAIChatModel(
prompt: string | PromptSection[],
usageCallback?: CompleteUsageStatsCallback,
jsonSchema?: CompletionJsonSchema,
logFn?: (msg: any) => void,
): Promise<Result<string>> {
verifyPromptLength(settings, prompt);

Expand Down Expand Up @@ -521,9 +521,9 @@ function createAzureOpenAIChatModel(
}

try {
if (settings.enableModelRequestLogging) {
if (settings.enableModelRequestLogging && logFn) {
// Log request
PromptLogger.getInstance().logModelRequest({
logFn({
prompt: messages as PromptSection[],
response: data.choices[0].message?.content ?? "",
tokenUsage: data.usage,
Expand Down Expand Up @@ -561,6 +561,7 @@ function createAzureOpenAIChatModel(
prompt: string | PromptSection[],
usageCallback?: CompleteUsageStatsCallback,
jsonSchema?: CompletionJsonSchema,
logFn?: (msg: any) => void,
): Promise<Result<AsyncIterableIterator<string>>> {
verifyPromptLength(settings, prompt);

Expand Down Expand Up @@ -612,9 +613,9 @@ function createAzureOpenAIChatModel(
for await (const evt of readServerEventStream(result.data)) {
if (evt.data === "[DONE]") {
try {
if (settings.enableModelRequestLogging) {
if (settings.enableModelRequestLogging && logFn) {
// Log request.
PromptLogger.getInstance().logModelRequest({
logFn({
prompt: messages as PromptSection[],
response: fullResponseText,
tokenUsageData: tokenUsage,
Expand Down
52 changes: 0 additions & 52 deletions ts/packages/aiclient/src/promptLogger.ts

This file was deleted.

10 changes: 9 additions & 1 deletion ts/packages/cache/src/explanation/v5/explanationV5.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,9 @@ import {
PolitenessGeneralizer,
} from "./politenessGeneralizationV5.js";
import { PolitenessGeneralization } from "./politenessGeneralizationSchemaV5.js";
import { createPromptLogger } from "telemetry";

const promptLogger = createPromptLogger();

type Explanation = PropertyExplanation &
SubPhraseExplanation &
Expand Down Expand Up @@ -334,7 +337,12 @@ variations: ParameterVariation[];
For every value V that is within the range of ${paramRange.min} to ${paramRange.max} by step ${paramRange.step}, generate a phrase P that only changes the phrase '${subPhrase}' enough to change the value to V.
Emit the generated phrases and values as a JSON object of type ParameterVariationResult with 2 spaces of indentation and no properties with the value undefined:
`;
const result = await model.complete(prompt);
const result = await model.complete(
prompt,
undefined,
undefined,
promptLogger.logModelRequest,
);
if (result.success) {
const generatedAlternatives = JSON.parse(
result.data,
Expand Down
1 change: 1 addition & 0 deletions ts/packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
"marked": "^15.0.0",
"marked-terminal": "^7.3.0",
"open": "^10.1.0",
"telemetry": "workspace:*",
"ts-node": "^10.9.1",
"typechat": "^0.1.1",
"typechat-utils": "workspace:*"
Expand Down
10 changes: 9 additions & 1 deletion ts/packages/cli/src/commands/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ import { Args, Command, Flags } from "@oclif/core";
import { openai, getChatModelNames } from "aiclient";
import fs from "node:fs";
import chalk from "chalk";
import { createPromptLogger } from "telemetry";

const promptLogger = createPromptLogger();

const modelNames = await getChatModelNames();
export default class Prompt extends Command {
Expand Down Expand Up @@ -83,7 +86,12 @@ export default class Prompt extends Command {
time = end - start;
} else {
const start = performance.now();
const complete = await model.complete(request);
const complete = await model.complete(
request,
undefined,
undefined,
promptLogger.logModelRequest,
);
const end = performance.now();
if (complete.success) {
responseText = complete.data;
Expand Down
1 change: 1 addition & 0 deletions ts/packages/dispatcher/dispatcher/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
"@anthropic-ai/claude-agent-sdk": "^0.2.12",
"@azure/ai-agents": "^1.0.0-beta.3",
"@azure/ai-projects": "^1.0.0-beta.8",
"@azure/cosmos": "^4.2.1",
"@azure/identity": "^4.10.0",
"@typeagent/action-schema": "workspace:*",
"@typeagent/agent-rpc": "workspace:*",
Expand Down
Loading
Loading