using System.Net.Http.Json;
using System.Text.Json.Nodes;
using Dpz.Core.Service.Network.Models;
using Markdig;
using Markdig.Syntax;
namespace Dpz.Core.Service.Network;
public class AnalyzeService(
IConfiguration configuration,
ILogger<AnalyzeService> logger,
HttpClient httpClient
)
{
private readonly Lazy<string> _host = new(
() =>
configuration["AnalyzeHost"]
?? throw new InvalidConfigurationException("AnalyzeHost is null")
);
private readonly Lazy<string> _apiKey = new(
() =>
configuration["AnalyzeApiKey"]
?? throw new InvalidConfigurationException("AnalyzeApiKey is null")
);
public async Task<ResponseResult<T?>> AnalyzeAsync<T>(string text)
where T : new()
{
var result = new ResponseResult<T?>();
var analyzeResult = await SendMessageAsync(
[new ChatMessage { Role = "user", Message = text }]
);
if (!analyzeResult.Success || analyzeResult.Data is null)
{
return result.FailResult(analyzeResult.Message ?? "Call API fail");
}
// logger.LogInformation("Analyze result :{@AnalyzeResult}", analyzeResult);
T? t = default;
try
{
var pipeline = new MarkdownPipelineBuilder()
.UseAutoLinks()
.UsePipeTables()
.UseTaskLists()
.UseEmphasisExtras()
.UseAutoIdentifiers()
.Build();
var document = Markdown.Parse(analyzeResult.Data, pipeline);
var options = new JsonSerializerOptions { PropertyNameCaseInsensitive = true };
foreach (var codeBlock in document.Descendants<CodeBlock>())
{
var json = codeBlock.Lines.ToString();
t = JsonSerializer.Deserialize<T>(json, options);
break;
}
}
catch (Exception e)
{
logger.LogWarning(e, "deserialize fail");
return result.FailResult("deserialize fail:" + e.Message);
}
return result.SuccessResult(t);
}
public async Task<ResponseResult<string>> ChatAsync(List<ChatMessage> messages)
{
return await SendMessageAsync(messages, x => x.Stream = true);
}
private async Task<ResponseResult<string>> SendMessageAsync(
List<ChatMessage> messages,
Action<CallOption>? options = null
)
{
var result = new ResponseResult<string>();
var request = new HttpRequestMessage(HttpMethod.Post, $"{_host.Value}/gpt/completions");
request.Headers.Add("Authorization", $"Basic {_apiKey.Value}");
var callOption =
configuration.GetSection("DefaultCallOption").Get<CallOption>()
?? new CallOption
{
Model = "gpt-4o-mini",
Stream = false,
MaxTokens = 1024,
Temperature = 0,
};
options?.Invoke(callOption);
var message = new
{
messages,
model = callOption.Model,
stream = callOption.Stream,
max_tokens = callOption.MaxTokens,
temperature = callOption.Temperature,
};
logger.LogInformation("Send message :{@Message}", message);
request.Content = JsonContent.Create(message);
HttpResponseMessage response;
try
{
response = await httpClient.SendAsync(request);
if (!response.IsSuccessStatusCode)
{
logger.LogInformation("Call AI failed,response {Status}", response.StatusCode);
return result.FailResult($"Call AI failed,response {response.StatusCode}");
}
}
catch (Exception e)
{
logger.LogError(e, "Call AI failed");
return result.FailResult(e.Message);
}
var content = await response.Content.ReadAsStringAsync();
return result.SuccessResult(content);
}
private class CallOption
{
public string Model { get; set; } = "gpt-4o-mini";
public bool Stream { get; set; }
public int MaxTokens { get; set; }
public double Temperature { get; set; }
}
}