Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
98 commits
Select commit Hold shift + click to select a range
37218d9
refactor: update styles and prompt selection logic (#103)
Junyi-99 Jan 26, 2026
d9dbc20
feat: initialize projectInstructions variable in conversation prepara…
Junyi-99 Jan 26, 2026
894532a
refactor: update MessageId handling to remove prefix (#105)
Junyi-99 Jan 26, 2026
be7b8c8
add byok modal, update user proto and model
kah-seng Jan 27, 2026
50a5f0b
feat: gpt-5.2, refactored streaming, improved md rendering, basic too…
Junyi-99 Jan 28, 2026
783f85e
Merge branch 'main' of https://github.com/PaperDebugger/paperdebugger…
kah-seng Jan 28, 2026
9603404
update CustomModel proto, model and mapper
kah-seng Jan 28, 2026
49ac78d
add button functions
kah-seng Jan 29, 2026
3ffbb5e
Update xtramcp tag
4ndrelim Jan 29, 2026
5ea58f1
fix: update openaiApiKey handling to refresh model list and cache (#107)
Junyi-99 Jan 29, 2026
d83983b
Merge branch 'staging' of https://github.com/PaperDebugger/paperdebug…
Junyi-99 Jan 29, 2026
74f44d5
refactor: remove gradient handling logic from GeneralToolCard component
Junyi-99 Jan 29, 2026
262f6b7
feat: implement dark mode support and theme synchronization across co…
Junyi-99 Jan 29, 2026
38d7d56
change CustomModel in user.proto to match SupportedModel in chat.proto
kah-seng Jan 30, 2026
3cb6692
allow custom model to be selected in chat
kah-seng Jan 30, 2026
83efbac
Merge branch 'main' into feat/byok
kah-seng Jan 30, 2026
49dd47f
fix: text patch on multi-message
Junyi-99 Jan 31, 2026
07321c9
fix: css
Junyi-99 Jan 31, 2026
3318812
fix: style
Junyi-99 Jan 31, 2026
c2d221e
fix: style
Junyi-99 Jan 31, 2026
b055a17
add: link to report bugs
Junyi-99 Jan 31, 2026
f4e4634
chore: format & lint
Junyi-99 Jan 31, 2026
e0d5808
docs: update qr code
Junyi-99 Jan 31, 2026
9259e76
Merge branch 'main' into feat/byok
kah-seng Feb 3, 2026
6333acc
add select input, update model selection
kah-seng Feb 3, 2026
f693cf4
Merge branch 'main' into feat/byok
kah-seng Feb 14, 2026
adf8216
todo: fix gemini bad request
kah-seng Feb 15, 2026
6ef12a3
feat: tab completion for citation keys (#110)
wjiayis Feb 17, 2026
dab4e6f
chore: update helm for xtramcp (#119)
wjiayis Feb 17, 2026
ea701a5
chore: merge branch 'main' into staging (#122)
wjiayis Feb 17, 2026
6fcdbee
refactor: improve state management and accessibility in components
Junyi-99 Feb 18, 2026
ee2cb93
refactor: improve state management and accessibility in components (#…
Junyi-99 Feb 18, 2026
f19e901
Merge branch 'staging' of https://github.com/PaperDebugger/paperdebug…
Junyi-99 Feb 18, 2026
19269cc
refactor: enhance accessibility and code consistency across components
Junyi-99 Feb 18, 2026
375c342
refactor: update styles and prompt selection logic (#103) (#124)
Junyi-99 Feb 18, 2026
44dd174
Merge branch 'staging' of https://github.com/PaperDebugger/paperdebug…
Junyi-99 Feb 18, 2026
2e593f7
Merge remote-tracking branch 'origin/main' into staging
Junyi-99 Feb 18, 2026
ebbfb44
fix gemini chat params
kah-seng Feb 19, 2026
a7b7ff7
Update llmProvider
kah-seng Feb 21, 2026
110bcaf
Polish UI
kah-seng Feb 21, 2026
58e9aac
Fix defaults
kah-seng Feb 21, 2026
bee8db5
feat: user cost (#126)
wjiayis Mar 11, 2026
f807eaf
Change slug to text input, hide disabled models
kah-seng Mar 18, 2026
035e6da
Allow multiple models with same slugs
kah-seng Mar 18, 2026
c9b078a
Trim inputs, polish UI
kah-seng Mar 19, 2026
6981907
UI polish, do not send disabled models
kah-seng Mar 19, 2026
b6975ef
Merge branch 'staging' into feat/byok
kah-seng Mar 19, 2026
6031234
Make gen
kah-seng Mar 19, 2026
8bf9095
Update UI
kah-seng Mar 19, 2026
167416b
Add input validation error indicator
kah-seng Mar 20, 2026
d107f62
Remove sorting of models by name
kah-seng Mar 20, 2026
6242602
Add optional fields
kah-seng Mar 20, 2026
18a5557
Revert package-lock.json
kah-seng Mar 20, 2026
dc587af
Add baseUrl https validation
kah-seng Mar 20, 2026
7b39a25
feat: track OpenRouter usage
wjiayis Mar 22, 2026
adb9032
fix: revert last change staging (#135)
wjiayis Mar 22, 2026
972c686
Merge branch 'staging' into feat/proj-cost-track
wjiayis Mar 22, 2026
bfeac21
Merge branch 'staging' into feat/byok
kah-seng Mar 26, 2026
8c11e21
make gen
kah-seng Mar 26, 2026
0022664
Resolve Copilot comments
kah-seng Mar 26, 2026
54514a8
Merge pull request #129 from PaperDebugger/feat/byok
kah-seng Mar 26, 2026
d9a41f8
feat: add index and TTL
wjiayis Mar 27, 2026
3d2ce5e
fix: frontend handle missing error response in API client
wjiayis Mar 27, 2026
cad2eb3
fix: remove model_slug from usages collection
wjiayis Mar 27, 2026
788c80e
feat: track usage by hourly and weekly
wjiayis Mar 27, 2026
bca53b7
fix: track partial usage if error occurs
wjiayis Mar 27, 2026
79678ca
fix: track usage for title and citation key generation
wjiayis Mar 27, 2026
a4c645b
feat: track lifetime usage
wjiayis Mar 28, 2026
2057e72
chore: merge main into staging: add nodeSelector support (#137)
Junyi-99 Mar 28, 2026
041d463
refactor: deduplicate CI workflows and fix double triggers (#138)
Junyi-99 Mar 28, 2026
95163e8
chore: enable in-cluster mongodb for dev and stg (#139)
Junyi-99 Mar 28, 2026
572b57d
docs: add contributing instructions (#127) (#142)
Junyi-99 Mar 28, 2026
89a066b
fix: set PD_MONGO_URI for in-cluster mongo and enable in_cluster (#145)
Junyi-99 Mar 28, 2026
56cbe48
fix: add permissions to backend caller workflows (#146)
Junyi-99 Mar 28, 2026
9c92a9a
Merge branch 'staging' into feat/proj-cost-track
wjiayis Mar 30, 2026
4c14874
fix: IsCustom() -> IsCustomModel
wjiayis Mar 30, 2026
3dd9bd9
Merge branch 'staging' into feat/proj-cost-track
wjiayis Mar 30, 2026
862da76
feat: separate success cost from failed cost
wjiayis Apr 1, 2026
acf9653
Add customModels to setting-store.ts
kah-seng Apr 6, 2026
3f6871c
Select models by ID instead of slug
kah-seng Apr 6, 2026
0f9dd22
Add loading spinner
kah-seng Apr 6, 2026
2ff12f9
Fix Copilot comments
kah-seng Apr 6, 2026
56d815e
fix: BYOK (#150)
kah-seng Apr 6, 2026
6508989
Show error when same slug and name
kah-seng Apr 7, 2026
7eca4d7
Add tooltips
kah-seng Apr 8, 2026
23ea667
Add temp, parallel tools, store fields
kah-seng Apr 8, 2026
4cad7c9
Add tooltips
kah-seng Apr 9, 2026
04eb9fe
Allow user to configure temp, parallel tools, store params
kah-seng Apr 10, 2026
2a0bd5f
Add tooltips
kah-seng Apr 10, 2026
4774fbe
Merge branch 'staging' into feat/byok
kah-seng Apr 10, 2026
549b5fb
Fix Copilot comments
kah-seng Apr 10, 2026
dcae304
Merge pull request #153 from PaperDebugger/feat/byok
kah-seng Apr 12, 2026
4f8312c
Merge branch 'staging' into feat/proj-cost-track
wjiayis Apr 14, 2026
60922e8
refactor: merge staging
wjiayis Apr 14, 2026
49935a2
chore: add test cases for failed usage
wjiayis Apr 18, 2026
e92cc7b
Merge branch 'main' into feat/proj-cost-track
wjiayis May 2, 2026
bd80a26
chore: undo changes in hack
wjiayis May 2, 2026
cb228f3
chore: revert accidental package-lock.json churn
wjiayis May 2, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions internal/api/chat/create_conversation_message_stream_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ func (s *ChatServerV2) CreateConversationMessageStream(
}
}

openaiChatHistory, inappChatHistory, err := s.aiClientV2.ChatCompletionStreamV2(ctx, stream, conversation.ID.Hex(), modelSlug, conversation.OpenaiChatHistoryCompletion, llmProvider, customModel)
openaiChatHistory, inappChatHistory, _, err := s.aiClientV2.ChatCompletionStreamV2(ctx, stream, conversation.UserID, conversation.ProjectID, conversation.ID.Hex(), modelSlug, conversation.OpenaiChatHistoryCompletion, llmProvider, customModel)
if err != nil {
return s.sendStreamError(stream, err)
}
Expand All @@ -347,7 +347,7 @@ func (s *ChatServerV2) CreateConversationMessageStream(
for i, bsonMsg := range conversation.InappChatHistory {
protoMessages[i] = mapper.BSONToChatMessageV2(bsonMsg)
}
title, err := s.aiClientV2.GetConversationTitleV2(ctx, protoMessages, llmProvider, modelSlug, customModel)
title, err := s.aiClientV2.GetConversationTitleV2(ctx, conversation.UserID, conversation.ProjectID, protoMessages, llmProvider, modelSlug, customModel)
if err != nil {
s.logger.Error("Failed to get conversation title", "error", err, "conversationID", conversation.ID.Hex())
return
Expand Down
71 changes: 71 additions & 0 deletions internal/models/usage.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
package models

import (
"time"

"go.mongodb.org/mongo-driver/v2/bson"
)

// HourlyUsage tracks cost per user, per project, per hour.
// Each document represents one hour bucket of usage.
type HourlyUsage struct {
ID bson.ObjectID `bson:"_id"`
UserID bson.ObjectID `bson:"user_id"`
ProjectID string `bson:"project_id"`
HourBucket bson.DateTime `bson:"hour_bucket"` // Timestamp truncated to the hour
SuccessCost float64 `bson:"success_cost"` // Cost in USD for successful requests
FailedCost float64 `bson:"failed_cost"` // Cost in USD for failed requests
UpdatedAt bson.DateTime `bson:"updated_at"`
}

func (u HourlyUsage) CollectionName() string {
return "hourly_usages"
}

// WeeklyUsage tracks cost per user, per project, per week.
// Each document represents one week bucket of usage.
type WeeklyUsage struct {
ID bson.ObjectID `bson:"_id"`
UserID bson.ObjectID `bson:"user_id"`
ProjectID string `bson:"project_id"`
WeekBucket bson.DateTime `bson:"week_bucket"` // Timestamp truncated to the week (Monday)
SuccessCost float64 `bson:"success_cost"` // Cost in USD for successful requests
FailedCost float64 `bson:"failed_cost"` // Cost in USD for failed requests
UpdatedAt bson.DateTime `bson:"updated_at"`
}

func (u WeeklyUsage) CollectionName() string {
return "weekly_usages"
}

// LifetimeUsage tracks total cost per user, per project, across all time.
// Each document represents the cumulative usage for a user-project pair.
type LifetimeUsage struct {
ID bson.ObjectID `bson:"_id"`
UserID bson.ObjectID `bson:"user_id"`
ProjectID string `bson:"project_id"`
SuccessCost float64 `bson:"success_cost"` // Total cost in USD for successful requests
FailedCost float64 `bson:"failed_cost"` // Total cost in USD for failed requests
UpdatedAt bson.DateTime `bson:"updated_at"`
}

func (u LifetimeUsage) CollectionName() string {
return "lifetime_usages"
}

// TruncateToHour truncates a time to the start of its hour.
func TruncateToHour(t time.Time) time.Time {
return t.Truncate(time.Hour)
}

// TruncateToWeek truncates a time to the start of its week (Monday 00:00:00 UTC).
func TruncateToWeek(t time.Time) time.Time {
t = t.UTC()
weekday := int(t.Weekday())
if weekday == 0 {
weekday = 7 // Sunday becomes 7
}
// Subtract days to get to Monday
monday := t.AddDate(0, 0, -(weekday - 1))
return time.Date(monday.Year(), monday.Month(), monday.Day(), 0, 0, 0, 0, time.UTC)
}
3 changes: 3 additions & 0 deletions internal/services/toolkit/client/client_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ type AIClientV2 struct {

reverseCommentService *services.ReverseCommentService
projectService *services.ProjectService
usageService *services.UsageService
cfg *cfg.Cfg
logger *logger.Logger
}
Expand Down Expand Up @@ -62,6 +63,7 @@ func NewAIClientV2(

reverseCommentService *services.ReverseCommentService,
projectService *services.ProjectService,
usageService *services.UsageService,
cfg *cfg.Cfg,
logger *logger.Logger,
) *AIClientV2 {
Expand Down Expand Up @@ -109,6 +111,7 @@ func NewAIClientV2(

reverseCommentService: reverseCommentService,
projectService: projectService,
usageService: usageService,
cfg: cfg,
logger: logger,
}
Expand Down
69 changes: 50 additions & 19 deletions internal/services/toolkit/client/completion_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,19 @@ import (
"paperdebugger/internal/models"
"paperdebugger/internal/services/toolkit/handler"
chatv2 "paperdebugger/pkg/gen/api/chat/v2"
"strconv"
"strings"
"time"

"github.com/openai/openai-go/v3"
"go.mongodb.org/mongo-driver/v2/bson"
)

// UsageCost holds cost information from a completion.
type UsageCost struct {
Cost float64
}

// define []openai.ChatCompletionMessageParamUnion as OpenAIChatHistory

// ChatCompletion orchestrates a chat completion process with a language model (e.g., GPT), handling tool calls and message history management.
Expand All @@ -24,13 +32,14 @@ import (
// Returns:
// 1. The full chat history sent to the language model (including any tool call results).
// 2. The incremental chat history visible to the user (including tool call results and assistant responses).
// 3. An error, if any occurred during the process.
func (a *AIClientV2) ChatCompletionV2(ctx context.Context, modelSlug string, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig, customModel *models.CustomModel) (OpenAIChatHistory, AppChatHistory, error) {
openaiChatHistory, inappChatHistory, err := a.ChatCompletionStreamV2(ctx, nil, "", modelSlug, messages, llmProvider, customModel)
// 3. Cost information (in USD).
// 4. An error, if any occurred during the process.
func (a *AIClientV2) ChatCompletionV2(ctx context.Context, userID bson.ObjectID, projectID string, modelSlug string, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig, customModel *models.CustomModel) (OpenAIChatHistory, AppChatHistory, UsageCost, error) {
openaiChatHistory, inappChatHistory, usage, err := a.ChatCompletionStreamV2(ctx, nil, userID, projectID, "", modelSlug, messages, llmProvider, customModel)
if err != nil {
return nil, nil, err
return nil, nil, usage, err
}
return openaiChatHistory, inappChatHistory, nil
return openaiChatHistory, inappChatHistory, usage, nil
}

// ChatCompletionStream orchestrates a streaming chat completion process with a language model (e.g., GPT), handling tool calls, message history management, and real-time streaming of responses to the client.
Expand All @@ -46,17 +55,20 @@ func (a *AIClientV2) ChatCompletionV2(ctx context.Context, modelSlug string, mes
// Returns: (same as ChatCompletion)
// 1. The full chat history sent to the language model (including any tool call results).
// 2. The incremental chat history visible to the user (including tool call results and assistant responses).
// 3. An error, if any occurred during the process. (However, in the streaming mode, the error is not returned, but sending by callbackStream)
// 3. Cost information (in USD, accumulated across all calls).
// 4. An error, if any occurred during the process. (However, in the streaming mode, the error is not returned, but sending by callbackStream)
//
// This function works as follows: (same as ChatCompletion)
// - It initializes the chat history for the language model and the user, and sets up a stream handler for real-time updates.
// - It repeatedly sends the current chat history to the language model, receives streaming responses, and forwards them to the client as they arrive.
// - If tool calls are required, it handles them and appends the results to the chat history, then continues the loop.
// - If no tool calls are needed, it appends the assistant's response and exits the loop.
// - Finally, it returns the updated chat histories and any error encountered.
func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream chatv2.ChatService_CreateConversationMessageStreamServer, conversationId string, modelSlug string, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig, customModel *models.CustomModel) (OpenAIChatHistory, AppChatHistory, error) {
// - Finally, it returns the updated chat histories, accumulated cost, and any error encountered.
func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream chatv2.ChatService_CreateConversationMessageStreamServer, userID bson.ObjectID, projectID string, conversationId string, modelSlug string, messages OpenAIChatHistory, llmProvider *models.LLMProviderConfig, customModel *models.CustomModel) (OpenAIChatHistory, AppChatHistory, UsageCost, error) {
openaiChatHistory := messages
inappChatHistory := AppChatHistory{}
usage := UsageCost{}
success := false // Track whether the request completed successfully

streamHandler := handler.NewStreamHandlerV2(callbackStream, conversationId, modelSlug)

Expand All @@ -65,6 +77,19 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
streamHandler.SendFinalization()
}()

// Track usage on all exit paths (success or error) to prevent abuse
// Only track if userID is provided and user is not using their own API key (BYOK)
defer func() {
if !userID.IsZero() && !llmProvider.IsCustomModel && usage.Cost > 0 {
// Use a detached context since the request context may be canceled
trackCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if err := a.usageService.TrackUsage(trackCtx, userID, projectID, usage.Cost, success); err != nil {
a.logger.Error("Error while tracking usage", "error", err)
}
}
}()

oaiClient := a.GetOpenAIClient(llmProvider)
params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry, customModel)

Expand All @@ -77,6 +102,7 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
answer_content := ""
answer_content_id := ""
has_sent_part_begin := false
has_finished := false
tool_info := map[int]map[string]string{}
toolCalls := []openai.FinishedChatCompletionToolCall{}
handleReasoning := func(raw string) (string, bool) {
Expand All @@ -92,12 +118,18 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
}

for stream.Next() {
// time.Sleep(5000 * time.Millisecond) // DEBUG POINT: change this to test in a slow mode
chunk := stream.Current()

// Capture cost from any chunk that has usage data (OpenRouter sends usage in a separate chunk after FinishReason)
if chunk.Usage.PromptTokens > 0 || chunk.Usage.CompletionTokens > 0 {
if costField, ok := chunk.Usage.JSON.ExtraFields["cost"]; ok {
if cost, err := strconv.ParseFloat(costField.Raw(), 64); err == nil {
usage.Cost += cost
}
}
}

if len(chunk.Choices) == 0 {
// Handle usage information
// fmt.Printf("Usage: %+v\n", chunk.Usage)
continue
}

Expand Down Expand Up @@ -180,17 +212,15 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
}
}

if chunk.Choices[0].FinishReason != "" {
// fmt.Printf("FinishReason: %s\n", chunk.Choices[0].FinishReason)
// answer_content += chunk.Choices[0].Delta.Content
// fmt.Printf("answer_content: %s\n", answer_content)
if chunk.Choices[0].FinishReason != "" && !has_finished {
streamHandler.HandleTextDoneItem(chunk, answer_content, reasoning_content)
break
has_finished = true
// Don't break - continue reading to capture the usage chunk that comes after
}
}

if err := stream.Err(); err != nil {
return nil, nil, err
return nil, nil, usage, err
}

if answer_content != "" {
Expand All @@ -200,7 +230,7 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
// Execute the calls (if any), return incremental data
openaiToolHistory, inappToolHistory, err := a.toolCallHandler.HandleToolCallsV2(ctx, toolCalls, streamHandler)
if err != nil {
return nil, nil, err
return nil, nil, usage, err
}

// // Record the tool call results
Expand All @@ -213,5 +243,6 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
}
}

return openaiChatHistory, inappChatHistory, nil
success = true
return openaiChatHistory, inappChatHistory, usage, nil
}
2 changes: 1 addition & 1 deletion internal/services/toolkit/client/get_citation_keys.go
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ func (a *AIClientV2) GetCitationKeys(ctx context.Context, sentence string, userI
// Bibliography is placed at the start of the prompt to leverage prompt caching
message := fmt.Sprintf("Bibliography: %s\nSentence: %s\nBased on the sentence and bibliography, suggest only the most relevant citation keys separated by commas with no spaces (e.g. key1,key2). Be selective and only include citations that are directly relevant. Avoid suggesting more than 3 citations. If no relevant citations are found, return '%s'.", bibliography, sentence, emptyCitation)

_, resp, err := a.ChatCompletionV2(ctx, "gpt-5.2", OpenAIChatHistory{
_, resp, _, err := a.ChatCompletionV2(ctx, userId, projectId, "gpt-5.2", OpenAIChatHistory{
openai.SystemMessage("You are a helpful assistant that suggests relevant citation keys."),
openai.UserMessage(message),
}, llmProvider, nil)
Expand Down
2 changes: 2 additions & 0 deletions internal/services/toolkit/client/get_citation_keys_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,12 @@ func setupTestClient(t *testing.T) (*client.AIClientV2, *services.ProjectService
}

projectService := services.NewProjectService(dbInstance, cfg.GetCfg(), logger.GetLogger())
usageService := services.NewUsageService(dbInstance, cfg.GetCfg(), logger.GetLogger())
aiClient := client.NewAIClientV2(
dbInstance,
&services.ReverseCommentService{},
projectService,
usageService,
cfg.GetCfg(),
logger.GetLogger(),
)
Expand Down
5 changes: 3 additions & 2 deletions internal/services/toolkit/client/get_conversation_title_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@ import (

"github.com/openai/openai-go/v3"
"github.com/samber/lo"
"go.mongodb.org/mongo-driver/v2/bson"
)

func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig, modelSlug string, customModel *models.CustomModel) (string, error) {
func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, userID bson.ObjectID, projectID string, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig, modelSlug string, customModel *models.CustomModel) (string, error) {
messages := lo.Map(inappChatHistory, func(message *chatv2.Message, _ int) string {
if _, ok := message.Payload.MessageType.(*chatv2.MessagePayload_Assistant); ok {
return fmt.Sprintf("Assistant: %s", message.Payload.GetAssistant().GetContent())
Expand All @@ -35,7 +36,7 @@ func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistor
modelToUse = modelSlug
}

_, resp, err := a.ChatCompletionV2(ctx, modelToUse, OpenAIChatHistory{
_, resp, _, err := a.ChatCompletionV2(ctx, userID, projectID, modelToUse, OpenAIChatHistory{
openai.SystemMessage("You are a helpful assistant that generates a title for a conversation."),
openai.UserMessage(message),
}, llmProvider, customModel)
Expand Down
6 changes: 6 additions & 0 deletions internal/services/toolkit/client/utils_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,9 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2,
Tools: toolRegistry.GetTools(),
ParallelToolCalls: openaiv3.Bool(true),
Store: openaiv3.Bool(false),
StreamOptions: openaiv3.ChatCompletionStreamOptionsParam{
IncludeUsage: openaiv3.Bool(true),
},
}
}
}
Expand All @@ -105,6 +108,9 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2,
Tools: toolRegistry.GetTools(), // Tool registration is managed centrally by the registry
ParallelToolCalls: openaiv3.Bool(true),
Store: openaiv3.Bool(false), // Must set to false, because we are construct our own chat history.
StreamOptions: openaiv3.ChatCompletionStreamOptionsParam{
IncludeUsage: openaiv3.Bool(true),
},
}
}

Expand Down
Loading
Loading