mirror of
https://github.com/QuotifyTeam/QuotifyBE.git
synced 2025-12-16 19:00:07 +01:00
Compare commits
3 Commits
user_conte
...
ask_a_llm
| Author | SHA1 | Date | |
|---|---|---|---|
| 3e823fb37b | |||
| 9e9017717a | |||
| bc05e91790 |
@@ -1,5 +1,7 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using QuotifyBE.Data;
|
||||
using QuotifyBE.Entities;
|
||||
using System.IdentityModel.Tokens.Jwt;
|
||||
@@ -132,4 +134,108 @@ public class GeneralUseHelpers(ApplicationDbContext db, IConfiguration appsettin
|
||||
|
||||
return new JwtSecurityTokenHandler().WriteToken(token);
|
||||
}
|
||||
|
||||
public async Task<JObject?> GenerateLLMResponse(string? prompt, string? model, float? temp, int? includedCategory, bool? includeCategorySample)
|
||||
{
|
||||
|
||||
string _model = model ?? _appsettings.GetSection("LlmIntegration")["DefaultModel"] ?? "deepclaude";
|
||||
float _temp = temp ?? 0.6f; // sane default
|
||||
string _included_sample = string.Empty;
|
||||
string _prompt = prompt ?? _appsettings.GetSection("LlmIntegration")["DefaultPrompt"] ??
|
||||
"Cześć, czy jesteś w stanie wymyślić i stworzyć jeden oryginalny cytat? " +
|
||||
"Zastanów się nad jego puentą, a kiedy będziesz gotów - zwróć sam cytat. " +
|
||||
"Nie pytaj mnie co o nim sądzę, ani nie używaj emotikonów (emoji). " +
|
||||
"Pamiętaj, że dobre cytaty są krótkie, zwięzłe.";
|
||||
|
||||
if (includedCategory != null)
|
||||
{
|
||||
// Check if category to be included is present.
|
||||
Category? cat = await _db.Categories.FirstOrDefaultAsync(c => c.Id == includedCategory.Value);
|
||||
// It isn't?
|
||||
if (cat == null) return null;
|
||||
// It is?
|
||||
_prompt += $" Niech należy on do kategorii o nazwie \"{cat.Name}\" ({cat.Description}).";
|
||||
}
|
||||
|
||||
// Sanity check
|
||||
if (includeCategorySample != null && includeCategorySample == true)
|
||||
{
|
||||
if (includedCategory == null)
|
||||
{
|
||||
// Can't append something that we're not given.
|
||||
return null;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Try to find the category in question.
|
||||
Category? cat = await _db.Categories.FirstOrDefaultAsync(c => c.Id == includedCategory.Value);
|
||||
// Failed?
|
||||
if (cat == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
else
|
||||
{
|
||||
IQueryable<Quote> query = _db.Quotes
|
||||
.Include(q => q.QuoteCategories!)
|
||||
.Where(q => q.QuoteCategories
|
||||
.Any(qc => qc.Category == cat)
|
||||
);
|
||||
int totalQuotes = await query.CountAsync();
|
||||
if (totalQuotes > 0) {
|
||||
|
||||
Random random = new();
|
||||
int skip = random.Next(0, totalQuotes);
|
||||
|
||||
Quote? quote = await query
|
||||
.Skip(skip)
|
||||
.Take(1)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
if (quote != null) {
|
||||
_prompt += $" Przykładowy cytat z tej kategorii brzmi: \"{quote.Text} ~ {quote.Author}\".\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<Dictionary<string, string>> promptMessages =
|
||||
[
|
||||
new() { { "role", "user" }, {"content", _prompt } }
|
||||
];
|
||||
|
||||
// Will throw error if not present
|
||||
string apiUrl = _appsettings.GetSection("LlmIntegration")["ApiUrl"] + "/chat/completions"
|
||||
?? throw new MissingFieldException("API URL missing in LlmIntegration section of appsettings.json!");
|
||||
string apiKey = _appsettings.GetSection("LlmIntegration")["ApiKey"]
|
||||
?? throw new MissingFieldException("API key missing in LlmIntegration section of appsettings.json!");
|
||||
|
||||
using (var client = new HttpClient())
|
||||
{
|
||||
// Not the best practice if we want reusable connections
|
||||
// https://stackoverflow.com/a/40707446
|
||||
client.DefaultRequestHeaders.Add("Authorization", $"Bearer {apiKey}");
|
||||
var json = JsonConvert.SerializeObject(new
|
||||
{
|
||||
model = _model,
|
||||
temperature = _temp,
|
||||
max_tokens = (includeCategorySample ?? false) ? 2000 : 1000,
|
||||
messages = promptMessages
|
||||
});
|
||||
var content = new StringContent(json, Encoding.UTF8, "application/json");
|
||||
var response = await client.PostAsync(apiUrl, content);
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
string receivedResponse = await response.Content.ReadAsStringAsync();
|
||||
return JObject.Parse(receivedResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Handle the error
|
||||
Console.WriteLine($"[QuotifyBE] Error: response status code from API was {response.StatusCode}.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Cors;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using QuotifyBE.Data;
|
||||
using QuotifyBE.DTOs;
|
||||
using QuotifyBE.Entities;
|
||||
@@ -481,4 +482,75 @@ public class QuotesController : ControllerBase
|
||||
return Ok(quote.ToQuoteShortDTO());
|
||||
}
|
||||
|
||||
// POST /api/v1/quotes/ai
|
||||
/// <summary>
|
||||
/// [AUTHED] Request a LLM-generated quote
|
||||
/// </summary>
|
||||
/// <returns>Generated quote's text</returns>
|
||||
/// <remarks>
|
||||
/// <b>Notes</b>:<br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>customPrompt</i> is passed:
|
||||
/// <li>The default prompt is overriden by whatever has been passed by the user.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>model</i> is passed:
|
||||
/// <li>The default large language model is overriden by whatever has been passed by the user.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>temperature</i> is passed:
|
||||
/// <li>The default temperature (= 0.8) is overriden by whatever has been passed by the user.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>categoryId</i> is passed:
|
||||
/// <li>The prompt is appended with an instruction in Polish to generate quotes based on the provided category
|
||||
/// (both name and description get passed to the model).</li>
|
||||
/// <li><b>Heads up!</b> The text is appended even if <i>customPrompt</i> has been provided.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>useSampleQuote</i> is passed:
|
||||
/// <li>The prompt will be appended with a randomly chosen quote from the categoryId (if any exist),
|
||||
/// thus passing categoryId becomes a prerequisite.</li>
|
||||
/// <li><b>Heads up!</b> The request will fail returning status code 400 if categoryId isn't provided!</li>
|
||||
/// </ul>
|
||||
/// </remarks>
|
||||
/// <param name="request">Form data containing required quote information</param>
|
||||
/// <response code="200">Returned on valid request</response>
|
||||
/// <response code="400">Returned when generation failed due to remote server error (likely because of a bad request)</response>
|
||||
/// <response code="500">Returned when response has been generated, but couldn't be parsed (likely because of incompatible server or bad URL)</response>
|
||||
[HttpPost("ai")]
|
||||
[Authorize]
|
||||
[EnableCors]
|
||||
[ProducesResponseType(200)]
|
||||
[ProducesResponseType(typeof(ErrorDTO), 400)]
|
||||
[ProducesResponseType(typeof(ErrorDTO), 500)]
|
||||
public async Task<IActionResult> CreateLLMQuote([FromBody] AskLLMInDTO request)
|
||||
{
|
||||
|
||||
JObject? generatedResponse = await guhf.GenerateLLMResponse(
|
||||
request.CustomPrompt, request.Model, request.Temperature, request.CategoryId, request.UseSampleQuote
|
||||
);
|
||||
|
||||
// Check if any errors occurred
|
||||
if (generatedResponse == null)
|
||||
{
|
||||
return StatusCode(400, new ErrorDTO { Status = "error", Error_msg = "Generation failed most likely due to bad request" });
|
||||
}
|
||||
|
||||
// Parse JSON to get the bot reply
|
||||
string? llmResponse = generatedResponse["choices"]?[0]?["message"]?["content"]?.ToString().Trim('"');
|
||||
|
||||
// If response string is not where we expect it, return 500
|
||||
if (llmResponse == null)
|
||||
return StatusCode(500, new ErrorDTO { Status = "error", Error_msg = "Unexpected API response" });
|
||||
|
||||
// Otherwise, return the response
|
||||
return Ok(new { Status = "ok", BotResponse = llmResponse });
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
12
DTOs/AskLLMInDTO.cs
Normal file
12
DTOs/AskLLMInDTO.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
namespace QuotifyBE.DTOs;
|
||||
|
||||
public record class AskLLMInDTO
|
||||
{
|
||||
public string? CustomPrompt { get; set; }
|
||||
public string? Model { get; set; } = "deepclaude";
|
||||
public float? Temperature { get; set; } = 0.8f;
|
||||
public int? CategoryId { get; set; } = null;
|
||||
public bool? UseSampleQuote { get; set; } = false;
|
||||
|
||||
};
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.22.1" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="Npgsql" Version="9.0.3" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.3" />
|
||||
|
||||
@@ -2,10 +2,16 @@
|
||||
"JwtSecret": "this is a sample jwt secret token required for quotify - it needs to have at least 256 bits (32 bytes long)",
|
||||
"DomainName": "example.com",
|
||||
"CorsOrigins": [
|
||||
"http://localhost:5259", "http://localhost:5258", "http://localhost:3000", "http://example.com"
|
||||
"https://localhost:7029", "http://localhost:5259", "http://localhost:5258", "http://localhost:3000", "http://example.com"
|
||||
],
|
||||
"UserContent": {
|
||||
"MaxFileSize": 5242880,
|
||||
"MaxFileSize": 5242880
|
||||
},
|
||||
"LlmIntegration": {
|
||||
"ApiUrl": "URL to OpenAI-compatible API server, e.g. https://example.com/api/v1",
|
||||
"ApiKey": "FILL ME for AI-generation capabilities",
|
||||
"DefaultPrompt": "Cześć, czy jesteś w stanie wymyślić i stworzyć jeden oryginalny cytat?\nZastanów się nad jego puentą, a kiedy będziesz gotów - zwróć sam cytat.\nNie pytaj mnie co o nim sądzę, ani nie używaj emotikonów (emoji).\nPamiętaj, że dobre cytaty są krótkie, zwięzłe.",
|
||||
"DefaultModel": "deepclaude"
|
||||
},
|
||||
"ConnectionStrings": {
|
||||
"DefaultConnection": "Server=server-host;Database=db-name;Username=quotify-user;Password=user-secret"
|
||||
|
||||
Reference in New Issue
Block a user