feat: helper function for generating LLM responses

This commit is contained in:
2025-07-23 18:28:28 +02:00
parent bc05e91790
commit 9e9017717a
2 changed files with 114 additions and 2 deletions

View File

@@ -1,5 +1,7 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.IdentityModel.Tokens;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using QuotifyBE.Data;
using QuotifyBE.Entities;
using System.IdentityModel.Tokens.Jwt;
@@ -132,4 +134,108 @@ public class GeneralUseHelpers(ApplicationDbContext db, IConfiguration appsettin
return new JwtSecurityTokenHandler().WriteToken(token);
}
public async Task<JObject?> GenerateLLMResponse(string? prompt, string? model, float? temp, int? includedCategory, bool? includeCategorySample)
{
string _model = model ?? _appsettings.GetSection("LlmIntegration")["DefaultModel"] ?? "deepclaude";
float _temp = temp ?? 0.6f; // sane default
string _included_sample = string.Empty;
string _prompt = prompt ?? _appsettings.GetSection("LlmIntegration")["DefaultPrompt"] ??
"Cześć, czy jesteś w stanie wymyślić i stworzyć jeden oryginalny cytat? " +
"Zastanów się nad jego puentą, a kiedy będziesz gotów - zwróć sam cytat. " +
"Nie pytaj mnie co o nim sądzę, ani nie używaj emotikonów (emoji). " +
"Pamiętaj, że dobre cytaty są krótkie, zwięzłe.";
if (includedCategory != null)
{
// Check if category to be included is present.
Category? cat = await _db.Categories.FirstOrDefaultAsync(c => c.Id == includedCategory.Value);
// It isn't?
if (cat == null) return null;
// It is?
_prompt += $" Niech należy on do kategorii o nazwie \"{cat.Name}\" ({cat.Description}).";
}
// Sanity check
if (includeCategorySample != null && includeCategorySample == true)
{
if (includedCategory == null)
{
// Can't append something that we're not given.
return null;
}
else
{
// Try to find the category in question.
Category? cat = await _db.Categories.FirstOrDefaultAsync(c => c.Id == includedCategory.Value);
// Failed?
if (cat == null)
{
return null;
}
else
{
IQueryable<Quote> query = _db.Quotes
.Include(q => q.QuoteCategories!)
.Where(q => q.QuoteCategories
.Any(qc => qc.Category == cat)
);
int totalQuotes = await query.CountAsync();
if (totalQuotes > 0) {
Random random = new();
int skip = random.Next(0, totalQuotes);
Quote? quote = await query
.Skip(skip)
.Take(1)
.FirstOrDefaultAsync();
if (quote != null) {
_prompt += $" Przykładowy cytat z tej kategorii brzmi: \"{quote.Text} ~ {quote.Author}\".\n";
}
}
}
}
}
List<Dictionary<string, string>> promptMessages =
[
new() { { "role", "user" }, {"content", _prompt } }
];
// Will throw error if not present
string apiUrl = _appsettings.GetSection("LlmIntegration")["ApiUrl"] + "/chat/completions"
?? throw new MissingFieldException("API URL missing in LlmIntegration section of appsettings.json!");
string apiKey = _appsettings.GetSection("LlmIntegration")["ApiKey"]
?? throw new MissingFieldException("API key missing in LlmIntegration section of appsettings.json!");
using (var client = new HttpClient())
{
// Not the best practice if we want reusable connections
// https://stackoverflow.com/a/40707446
client.DefaultRequestHeaders.Add("Authorization", $"Bearer {apiKey}");
var json = JsonConvert.SerializeObject(new
{
model = _model,
temperature = _temp,
max_tokens = (includeCategorySample ?? false) ? 2000 : 1000,
messages = promptMessages
});
var content = new StringContent(json, Encoding.UTF8, "application/json");
var response = await client.PostAsync(apiUrl, content);
if (response.IsSuccessStatusCode)
{
string receivedResponse = await response.Content.ReadAsStringAsync();
return JObject.Parse(receivedResponse);
}
else
{
// Handle the error
Console.WriteLine($"[QuotifyBE] Error: response status code from API was {response.StatusCode}.");
return null;
}
}
}
}

View File

@@ -2,10 +2,16 @@
"JwtSecret": "this is a sample jwt secret token required for quotify - it needs to have at least 256 bits (32 bytes long)",
"DomainName": "example.com",
"CorsOrigins": [
"http://localhost:5259", "http://localhost:5258", "http://localhost:3000", "http://example.com"
"https://localhost:7029", "http://localhost:5259", "http://localhost:5258", "http://localhost:3000", "http://example.com"
],
"UserContent": {
"MaxFileSize": 5242880,
"MaxFileSize": 5242880
},
"LlmIntegration": {
"ApiUrl": "URL to OpenAI-compatible API server, e.g. https://example.com/api/v1",
"ApiKey": "FILL ME for AI-generation capabilities",
"DefaultPrompt": "Cześć, czy jesteś w stanie wymyślić i stworzyć jeden oryginalny cytat?\nZastanów się nad jego puentą, a kiedy będziesz gotów - zwróć sam cytat.\nNie pytaj mnie co o nim sądzę, ani nie używaj emotikonów (emoji).\nPamiętaj, że dobre cytaty są krótkie, zwięzłe.",
"DefaultModel": "deepclaude"
},
"ConnectionStrings": {
"DefaultConnection": "Server=server-host;Database=db-name;Username=quotify-user;Password=user-secret"