Skip to content

Commit 982f23a

Browse files
committed
fix: 변경된 LLM 포맷에 맞게 수정
1 parent 6c3c53b commit 982f23a

File tree

7 files changed

+138
-86
lines changed

7 files changed

+138
-86
lines changed

ProjectVG.Application/Services/Chat/Preprocessors/UserInputAnalysisProcessor.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ public async Task ProcessAsync(ChatRequestCommand request)
3838
);
3939

4040
var cost = format.CalculateCost(llmResponse.InputTokens, llmResponse.OutputTokens);
41-
var (processType, intent) = format.Parse(llmResponse.Response, userPrompt);
41+
var (processType, intent) = format.Parse(llmResponse.OutputText, userPrompt);
4242

4343
request.AddCost(cost);
4444
request.SetAnalysisResult(processType, intent);

ProjectVG.Application/Services/Chat/Processors/ChatLLMProcessor.cs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,18 +28,17 @@ public async Task ProcessAsync(ChatProcessContext context)
2828
context.UserMessage,
2929
format.GetInstructions(context),
3030
context.ParseConversationHistory().ToList(),
31-
context.MemoryContext?.ToList(),
3231
model: format.Model,
3332
maxTokens: format.MaxTokens,
3433
temperature: format.Temperature
3534
);
3635

37-
var segments = format.Parse(llmResponse.Response, context);
36+
var segments = format.Parse(llmResponse.OutputText, context);
3837
var cost = format.CalculateCost(llmResponse.InputTokens, llmResponse.OutputTokens);
39-
context.SetResponse(llmResponse.Response, segments, cost);
38+
context.SetResponse(llmResponse.OutputText, segments, cost);
4039

4140
_logger.LogInformation("채팅 처리 결과: {Response}\n 세그먼트 생성 개수: {SementCount}\n 입력 토큰: {InputTokens}\n 출력 토큰: {OutputTokens}\n 비용: {Cost}",
42-
llmResponse.Response, segments.Count, llmResponse.InputTokens, llmResponse.OutputTokens, cost);
41+
llmResponse.OutputText, segments.Count, llmResponse.InputTokens, llmResponse.OutputTokens, cost);
4342
}
4443
}
4544
}

ProjectVG.Infrastructure/Integrations/LLMClient/ILLMClient.cs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ public interface ILLMClient
1818
/// <param name="userMessage">사용자 메시지</param>
1919
/// <param name="instructions">지시사항</param>
2020
/// <param name="conversationHistory">대화 기록</param>
21-
/// <param name="memoryContext">메모리 컨텍스트</param>
2221
/// <param name="model">모델명</param>
2322
/// <param name="maxTokens">최대 토큰 수</param>
2423
/// <param name="temperature">온도</param>
@@ -28,7 +27,6 @@ Task<LLMResponse> CreateTextResponseAsync(
2827
string userMessage,
2928
string? instructions = "",
3029
List<string>? conversationHistory = default,
31-
List<string>? memoryContext = default,
3230
string? model = "gpt-4o-mini",
3331
int? maxTokens = 1000,
3432
float? temperature = 0.7f);

ProjectVG.Infrastructure/Integrations/LLMClient/LLMClient.cs

Lines changed: 31 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ public async Task<LLMResponse> SendRequestAsync(LLMRequest request)
3636
{
3737
try
3838
{
39-
_logger.LogDebug("LLM 요청 시작: {Model}, 사용자 메시지: {UserMessage}",
39+
_logger.LogDebug("LLM 요청 시작: {Model}, 사용자 메시지: {UserPrompt}",
4040
request.Model,
41-
request.UserMessage[..Math.Min(50, request.UserMessage.Length)]);
41+
request.UserPrompt[..Math.Min(50, request.UserPrompt.Length)]);
4242

4343
using var jsonContent = JsonContent.Create(request, options: _jsonOptions);
4444
using var response = await _httpClient.PostAsync("api/v1/chat", jsonContent);
@@ -51,7 +51,7 @@ public async Task<LLMResponse> SendRequestAsync(LLMRequest request)
5151
return new LLMResponse
5252
{
5353
Success = false,
54-
ErrorMessage = $"서비스 오류: {response.StatusCode}"
54+
Error = $"서비스 오류: {response.StatusCode}"
5555
};
5656
}
5757

@@ -60,15 +60,15 @@ public async Task<LLMResponse> SendRequestAsync(LLMRequest request)
6060

6161
if (llmResponse?.Success == true)
6262
{
63-
_logger.LogInformation("LLM 요청 성공: 토큰 {TokensUsed}, 응답 길이 {ResponseLength}",
64-
llmResponse.TokensUsed,
65-
llmResponse.Response?.Length ?? 0);
63+
_logger.LogInformation("LLM 요청 성공: 토큰 {TotalTokens}, 응답 길이 {ResponseLength}",
64+
llmResponse.TotalTokens,
65+
llmResponse.OutputText?.Length ?? 0);
6666
}
6767

6868
return llmResponse ?? new LLMResponse
6969
{
7070
Success = false,
71-
ErrorMessage = "응답을 파싱할 수 없습니다."
71+
Error = "응답을 파싱할 수 없습니다."
7272
};
7373
}
7474
catch (HttpRequestException ex)
@@ -80,11 +80,21 @@ public async Task<LLMResponse> SendRequestAsync(LLMRequest request)
8080
{
8181
Success = true,
8282
Id = "mock-chatcmpl-" + Guid.NewGuid().ToString("N")[..8],
83-
Response = "안녕하세요! 저는 현재 Mock 모드로 동작하고 있습니다. 실제 LLM 서비스가 연결되지 않았습니다.",
84-
TokensUsed = 50,
83+
RequestId = request.RequestId ?? "",
84+
Object = "response",
85+
CreatedAt = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
86+
Status = "completed",
87+
Model = request.Model ?? "gpt-4o-mini",
88+
OutputText = "안녕하세요! 저는 현재 Mock 모드로 동작하고 있습니다. 실제 LLM 서비스가 연결되지 않았습니다.",
8589
InputTokens = 30,
8690
OutputTokens = 20,
87-
ResponseTime = 100
91+
TotalTokens = 50,
92+
CachedTokens = 0,
93+
ReasoningTokens = 0,
94+
TextFormatType = "text",
95+
Cost = 5,
96+
ResponseTime = 0.1,
97+
UseUserApiKey = request.UseUserApiKey ?? false
8898
};
8999
}
90100
catch (TaskCanceledException ex)
@@ -93,7 +103,7 @@ public async Task<LLMResponse> SendRequestAsync(LLMRequest request)
93103
return new LLMResponse
94104
{
95105
Success = false,
96-
ErrorMessage = "요청 시간이 초과되었습니다."
106+
Error = "요청 시간이 초과되었습니다."
97107
};
98108
}
99109
catch (Exception ex)
@@ -102,7 +112,7 @@ public async Task<LLMResponse> SendRequestAsync(LLMRequest request)
102112
return new LLMResponse
103113
{
104114
Success = false,
105-
ErrorMessage = "요청 처리 중 오류가 발생했습니다."
115+
Error = "요청 처리 중 오류가 발생했습니다."
106116
};
107117
}
108118
}
@@ -112,21 +122,22 @@ public async Task<LLMResponse> CreateTextResponseAsync(
112122
string userMessage,
113123
string? instructions = "",
114124
List<string>? conversationHistory = default,
115-
List<string>? memoryContext = default,
116125
string? model = "gpt-4o-mini",
117126
int? maxTokens = 1000,
118127
float? temperature = 0.7f)
119128
{
120129
var request = new LLMRequest
121130
{
122-
SystemMessage = systemMessage,
123-
UserMessage = userMessage,
131+
RequestId = Guid.NewGuid().ToString(),
132+
SystemPrompt = systemMessage,
133+
UserPrompt = userMessage,
124134
Instructions = instructions ?? "",
125-
ConversationHistory = conversationHistory ?? new List<string>(),
126-
MemoryContext = memoryContext ?? new List<string>(),
127-
Model = model ?? LLMModelInfo.GPT4oMini.Name,
128-
MaxTokens = maxTokens ?? LLMModelInfo.DefaultSettings.DefaultMaxTokens,
129-
Temperature = temperature ?? LLMModelInfo.DefaultSettings.DefaultTemperature
135+
ConversationHistory = conversationHistory?.Select(msg => new History { Role = "user", Content = msg }).ToList() ?? new List<History>(),
136+
Model = model ?? "gpt-4o-mini",
137+
MaxTokens = maxTokens ?? 1000,
138+
Temperature = temperature ?? 0.7f,
139+
OpenAiApiKey = "",
140+
UseUserApiKey = false
130141
};
131142

132143
return await SendRequestAsync(request);
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
using System.Text.Json.Serialization;
2+
3+
namespace ProjectVG.Infrastructure.Integrations.LLMClient.Models
4+
{
5+
public class History
6+
{
7+
/// <summary>
8+
/// 메시지 역할 (user, assistant, system)
9+
/// </summary>
10+
[JsonPropertyName("role")]
11+
public string Role { get; set; } = "";
12+
13+
/// <summary>
14+
/// 메시지 내용
15+
/// </summary>
16+
[JsonPropertyName("content")]
17+
public string Content { get; set; } = "";
18+
}
19+
}

ProjectVG.Infrastructure/Integrations/LLMClient/Models/LLMRequest.cs

Lines changed: 25 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -5,40 +5,34 @@ namespace ProjectVG.Infrastructure.Integrations.LLMClient.Models
55
public class LLMRequest
66
{
77
/// <summary>
8-
/// 세션 ID
8+
/// 요청 ID
99
/// </summary>
10-
[JsonPropertyName("session_id")]
11-
public string? SessionId { get; set; } = "";
10+
[JsonPropertyName("request_id")]
11+
public string? RequestId { get; set; } = "";
1212

1313
/// <summary>
14-
/// 추가 시스템 프롬포트 메시지 (System Prompt)
14+
/// 시스템 프롬프트
1515
/// </summary>
16-
[JsonPropertyName("system_message")]
17-
public string? SystemMessage { get; set; } = "";
16+
[JsonPropertyName("system_prompt")]
17+
public string? SystemPrompt { get; set; } = "";
1818

1919
/// <summary>
20-
/// 매인 쿼리, 유저 메시지 (Users Prompt)
20+
/// 사용자 메시지
2121
/// </summary>
22-
[JsonPropertyName("user_message")]
23-
public string UserMessage { get; set; } = "";
22+
[JsonPropertyName("user_prompt")]
23+
public string UserPrompt { get; set; } = "";
2424

2525
/// <summary>
26-
/// 필수 지시사항 (output form 지정)
26+
/// 추가 지시사항
2727
/// </summary>
2828
[JsonPropertyName("instructions")]
2929
public string? Instructions { get; set; } = "";
3030

3131
/// <summary>
32-
/// 최근 대화 내역 (Users Prompt에 추가됨)
32+
/// 대화 기록
3333
/// </summary>
3434
[JsonPropertyName("conversation_history")]
35-
public List<string>? ConversationHistory { get; set; } = new();
36-
37-
/// <summary>
38-
/// 장기 기억 Context (System Prompt에 추가됨)
39-
/// </summary>
40-
[JsonPropertyName("memory_context")]
41-
public List<string>? MemoryContext { get; set; } = new();
35+
public List<History>? ConversationHistory { get; set; } = new();
4236

4337
[JsonPropertyName("max_tokens")]
4438
public int? MaxTokens { get; set; } = 1000;
@@ -47,6 +41,18 @@ public class LLMRequest
4741
public float? Temperature { get; set; } = 0.7f;
4842

4943
[JsonPropertyName("model")]
50-
public string? Model { get; set; } = "gpt-4.1-mini";
44+
public string? Model { get; set; } = "gpt-4o-mini";
45+
46+
/// <summary>
47+
/// 사용자 제공 API Key
48+
/// </summary>
49+
[JsonPropertyName("openai_api_key")]
50+
public string? OpenAiApiKey { get; set; } = "";
51+
52+
/// <summary>
53+
/// 사용자 API Key 사용 여부
54+
/// </summary>
55+
[JsonPropertyName("use_user_api_key")]
56+
public bool? UseUserApiKey { get; set; } = false;
5157
}
5258
}

ProjectVG.Infrastructure/Integrations/LLMClient/Models/LLMResponse.cs

Lines changed: 59 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -4,58 +4,77 @@ namespace ProjectVG.Infrastructure.Integrations.LLMClient.Models
44
{
55
public class LLMResponse
66
{
7-
/// <summary>
8-
/// OpenAI API 응답 ID
9-
/// </summary>
7+
/// <summary> OpenAI Response ID </summary>
108
[JsonPropertyName("id")]
119
public string Id { get; set; } = default!;
1210

13-
/// <summary>
14-
/// 세션 ID
15-
/// </summary>
16-
[JsonPropertyName("session_id")]
17-
public string SessionId { get; set; } = default!;
18-
19-
/// <summary>
20-
/// 응답
21-
/// </summary>
22-
[JsonPropertyName("response_text")]
23-
public string Response { get; set; } = default!;
24-
25-
/// <summary>
26-
/// 사용된 토큰 수 (총합)
27-
/// </summary>
28-
[JsonPropertyName("total_tokens_used")]
29-
public int TokensUsed { get; set; }
30-
31-
/// <summary>
32-
/// 입력 토큰 수 (input_tokens)
33-
/// </summary>
11+
/// <summary> 요청 ID </summary>
12+
[JsonPropertyName("request_id")]
13+
public string RequestId { get; set; } = default!;
14+
15+
/// <summary> 응답 객체 타입 </summary>
16+
[JsonPropertyName("object")]
17+
public string Object { get; set; } = "response";
18+
19+
/// <summary> 생성 시간 </summary>
20+
[JsonPropertyName("created_at")]
21+
public long CreatedAt { get; set; }
22+
23+
/// <summary> 응답 상태 (completed, failed) </summary>
24+
[JsonPropertyName("status")]
25+
public string Status { get; set; } = "completed";
26+
27+
/// <summary> 사용된 OpenAI 모델 </summary>
28+
[JsonPropertyName("model")]
29+
public string Model { get; set; } = default!;
30+
31+
/// <summary> AI 응답 텍스트 </summary>
32+
[JsonPropertyName("output_text")]
33+
public string OutputText { get; set; } = default!;
34+
35+
/// <summary> 입력 토큰 수 </summary>
3436
[JsonPropertyName("input_tokens")]
3537
public int InputTokens { get; set; }
3638

37-
/// <summary>
38-
/// 출력 토큰 수 (output_tokens)
39-
/// </summary>
39+
/// <summary> 출력 토큰 수 </summary>
4040
[JsonPropertyName("output_tokens")]
4141
public int OutputTokens { get; set; }
4242

43-
/// <summary>
44-
/// 처리 시간 (ms)
45-
/// </summary>
46-
[JsonPropertyName("response_time")]
47-
public double ResponseTime { get; set; }
43+
/// <summary> 총 토큰 수 </summary>
44+
[JsonPropertyName("total_tokens")]
45+
public int TotalTokens { get; set; }
4846

49-
/// <summary>
50-
/// 사용된 LLMClient 모델
51-
/// </summary>
52-
[JsonPropertyName("model")]
53-
public string Model { get; set; } = default!;
47+
/// <summary> 캐시된 토큰 수 </summary>
48+
[JsonPropertyName("cached_tokens")]
49+
public int CachedTokens { get; set; }
50+
51+
/// <summary> 추론 토큰 수 (o-series) </summary>
52+
[JsonPropertyName("reasoning_tokens")]
53+
public int ReasoningTokens { get; set; }
54+
55+
/// <summary> 텍스트 형식 타입</summary>
56+
[JsonPropertyName("text_format_type")]
57+
public string TextFormatType { get; set; } = "text";
58+
59+
/// <summary> 비용 </summary>
60+
[JsonPropertyName("cost")]
61+
public int? Cost { get; set; }
5462

63+
/// <summary> 응답 시간 (초) </summary>
64+
[JsonPropertyName("response_time")]
65+
public double? ResponseTime { get; set; }
66+
67+
/// <summary> 성공 여부 </summary>
5568
[JsonPropertyName("success")]
56-
public bool Success { get; set; }
69+
public bool Success { get; set; } = true;
70+
71+
/// <summary> 에러 메시지 </summary>
72+
[JsonPropertyName("error")]
73+
public string? Error { get; set; }
74+
75+
/// <summary> 사용자 API Key 사용 여부 </summary>
76+
[JsonPropertyName("use_user_api_key")]
77+
public bool UseUserApiKey { get; set; } = false;
5778

58-
[JsonPropertyName("error_message")]
59-
public string? ErrorMessage { get; set; }
6079
}
6180
}

0 commit comments

Comments
 (0)