mirror of
https://github.com/QuotifyTeam/QuotifyBE.git
synced 2025-12-16 02:30:08 +01:00
feat: LLM API endpoint
This commit is contained in:
@@ -2,6 +2,7 @@ using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Cors;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using QuotifyBE.Data;
|
||||
using QuotifyBE.DTOs;
|
||||
using QuotifyBE.Entities;
|
||||
@@ -481,4 +482,75 @@ public class QuotesController : ControllerBase
|
||||
return Ok(quote.ToQuoteShortDTO());
|
||||
}
|
||||
|
||||
// POST /api/v1/quotes/ai
|
||||
/// <summary>
|
||||
/// [AUTHED] Request a LLM-generated quote
|
||||
/// </summary>
|
||||
/// <returns>Generated quote's text</returns>
|
||||
/// <remarks>
|
||||
/// <b>Notes</b>:<br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>customPrompt</i> is passed:
|
||||
/// <li>The default prompt is overriden by whatever has been passed by the user.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>model</i> is passed:
|
||||
/// <li>The default large language model is overriden by whatever has been passed by the user.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>temperature</i> is passed:
|
||||
/// <li>The default temperature (= 0.8) is overriden by whatever has been passed by the user.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>categoryId</i> is passed:
|
||||
/// <li>The prompt is appended with an instruction in Polish to generate quotes based on the provided category
|
||||
/// (both name and description get passed to the model).</li>
|
||||
/// <li><b>Heads up!</b> The text is appended even if <i>customPrompt</i> has been provided.</li>
|
||||
/// </ul><br/>
|
||||
///
|
||||
/// <ul>
|
||||
/// If <i>useSampleQuote</i> is passed:
|
||||
/// <li>The prompt will be appended with a randomly chosen quote from the categoryId (if any exist),
|
||||
/// thus passing categoryId becomes a prerequisite.</li>
|
||||
/// <li><b>Heads up!</b> The request will fail returning status code 400 if categoryId isn't provided!</li>
|
||||
/// </ul>
|
||||
/// </remarks>
|
||||
/// <param name="request">Form data containing required quote information</param>
|
||||
/// <response code="200">Returned on valid request</response>
|
||||
/// <response code="400">Returned when generation failed due to remote server error (likely because of a bad request)</response>
|
||||
/// <response code="500">Returned when response has been generated, but couldn't be parsed (likely because of incompatible server or bad URL)</response>
|
||||
[HttpPost("ai")]
|
||||
[Authorize]
|
||||
[EnableCors]
|
||||
[ProducesResponseType(200)]
|
||||
[ProducesResponseType(typeof(ErrorDTO), 400)]
|
||||
[ProducesResponseType(typeof(ErrorDTO), 500)]
|
||||
public async Task<IActionResult> CreateLLMQuote([FromBody] AskLLMInDTO request)
|
||||
{
|
||||
|
||||
JObject? generatedResponse = await guhf.GenerateLLMResponse(
|
||||
request.CustomPrompt, request.Model, request.Temperature, request.CategoryId, request.UseSampleQuote
|
||||
);
|
||||
|
||||
// Check if any errors occurred
|
||||
if (generatedResponse == null)
|
||||
{
|
||||
return StatusCode(400, new ErrorDTO { Status = "error", Error_msg = "Generation failed most likely due to bad request" });
|
||||
}
|
||||
|
||||
// Parse JSON to get the bot reply
|
||||
string? llmResponse = generatedResponse["choices"]?[0]?["message"]?["content"]?.ToString().Trim('"');
|
||||
|
||||
// If response string is not where we expect it, return 500
|
||||
if (llmResponse == null)
|
||||
return StatusCode(500, new ErrorDTO { Status = "error", Error_msg = "Unexpected API response" });
|
||||
|
||||
// Otherwise, return the response
|
||||
return Ok(new { Status = "ok", BotResponse = llmResponse });
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user