feature/FA-5_ImageSupport #31

Merged
conco merged 4 commits from feature/FA-5_ImageSupport into master 2025-11-24 02:17:10 +00:00
19 changed files with 519 additions and 103 deletions
Showing only changes of commit 573a0f6e3f - Show all commits

View File

@@ -1,99 +0,0 @@
@echo off
setlocal enabledelayedexpansion
set ROOT=%~dp0
for %%A in ("%ROOT%..") do set SERVICES_DIR=%%~fA\
REM ----------------------------------------
REM List of project names to skip
REM (space-separated, match folder names exactly)
REM ----------------------------------------
set SKIP_PROJECTS=FictionArchive.Service.Shared FictionArchive.Service.AuthenticationService
echo ----------------------------------------
echo Finding GraphQL services...
echo ----------------------------------------
set SERVICE_LIST=
for /d %%F in ("%SERVICES_DIR%FictionArchive.Service.*") do (
set "PROJECT_NAME=%%~nxF"
set "SKIP=0"
REM Check if this project name is in the skip list
for %%X in (%SKIP_PROJECTS%) do (
if /I "!PROJECT_NAME!"=="%%X" (
set "SKIP=1"
)
)
if !SKIP!==0 (
echo Found service: !PROJECT_NAME!
set SERVICE_LIST=!SERVICE_LIST! %%F
) else (
echo Skipping service: !PROJECT_NAME!
)
)
echo:
echo ----------------------------------------
echo Exporting schemas and packing subgraphs...
echo ----------------------------------------
for %%S in (%SERVICE_LIST%) do (
echo Processing service folder: %%S
pushd "%%S"
echo Running schema export...
dotnet run -- schema export --output schema.graphql
if errorlevel 1 (
echo ERROR during schema export in %%S
popd
exit /b 1
)
echo Running fusion subgraph pack...
fusion subgraph pack
if errorlevel 1 (
echo ERROR during subgraph pack in %%S
popd
exit /b 1
)
popd
echo Completed: %%S
echo.
)
echo ----------------------------------------
echo Running fusion compose...
echo ----------------------------------------
pushd "%ROOT%"
if exist gateway.fgp del gateway.fgp
for %%S in (%SERVICE_LIST%) do (
REM Extract the full folder name WITH dots preserved
set "SERVICE_NAME=%%~nxS"
echo Composing subgraph: !SERVICE_NAME!
fusion compose -p gateway.fgp -s "..\!SERVICE_NAME!"
if errorlevel 1 (
echo ERROR during fusion compose
popd
exit /b 1
)
)
popd
echo ----------------------------------------
echo Fusion build complete!
echo ----------------------------------------
endlocal
exit /b 0

View File

@@ -0,0 +1,138 @@
<#
.SYNOPSIS
Export GraphQL schemas, pack subgraphs and compose the gateway (PowerShell).
.DESCRIPTION
- Searches for FictionArchive.Service.* folders one directory above this script.
- Reads skip-projects.txt next to the script.
- Builds each service (Release).
- Runs `dotnet run --no-build --no-launch-profile -- schema export` in each service to avoid running the web host.
- Packs subgraphs.
- Composes the gateway from FictionArchive.API.
#>
[CmdletBinding()]
param()
function Write-ErrExit {
param($Message, $Code = 1)
Write-Error $Message
exit $Code
}
# Resolve directories
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
$ServicesDir = Resolve-Path -Path (Join-Path $ScriptDir '..') -ErrorAction Stop
$ApiDir = Join-Path $ServicesDir 'FictionArchive.API'
Write-Host "Script dir: $ScriptDir"
Write-Host "Services dir: $ServicesDir"
# Load skip list
$SkipFile = Join-Path $ScriptDir 'gateway_skip.txt'
$SkipList = @()
Write-Host "----------------------------------------"
Write-Host " Loading skip list..."
Write-Host "----------------------------------------"
if (Test-Path $SkipFile) {
$SkipList = Get-Content $SkipFile |
ForEach-Object { $_.Trim() } |
Where-Object { $_ -and -not $_.StartsWith('#') }
Write-Host "Skipping: $($SkipList -join ', ')"
} else {
Write-Warning "skip-projects.txt not found — no services will be skipped."
}
# Find service directories
Write-Host
Write-Host "----------------------------------------"
Write-Host " Finding GraphQL services..."
Write-Host "----------------------------------------"
$servicePattern = 'FictionArchive.Service.*'
$serviceDirs = Get-ChildItem -Path $ServicesDir -Directory -Filter 'FictionArchive.Service.*'
if (-not $serviceDirs) {
Write-ErrExit "No service folders found matching FictionArchive.Service.* under $ServicesDir"
}
$selectedServices = @()
foreach ($d in $serviceDirs) {
if ($SkipList -contains $d.Name) {
Write-Host "Skipping: $($d.Name)"
continue
}
Write-Host "Found: $($d.Name)"
$selectedServices += $d.FullName
}
if (-not $selectedServices) {
Write-ErrExit "All services skipped — nothing to do."
}
# Export schemas and pack subgraphs
Write-Host
Write-Host "----------------------------------------"
Write-Host " Exporting schemas & packing subgraphs..."
Write-Host "----------------------------------------"
foreach ($svcPath in $selectedServices) {
$svcName = Split-Path -Leaf $svcPath
Write-Host "`nProcessing: $svcName"
Push-Location $svcPath
try {
# Build Release
Write-Host "Building $svcName..."
dotnet build -c Release
if ($LASTEXITCODE -ne 0) { Write-ErrExit "dotnet build failed for $svcName" }
# Schema export using dotnet run (no server)
Write-Host "Running schema export..."
dotnet run --no-build --no-launch-profile -- schema export --output schema.graphql
if ($LASTEXITCODE -ne 0) { Write-ErrExit "Schema export failed for $svcName" }
# Pack subgraph
Write-Host "Running fusion subgraph pack..."
fusion subgraph pack
if ($LASTEXITCODE -ne 0) { Write-ErrExit "fusion subgraph pack failed for $svcName" }
Write-Host "Completed: $svcName"
}
finally {
Pop-Location
}
}
# Compose gateway
Write-Host
Write-Host "----------------------------------------"
Write-Host " Running fusion compose..."
Write-Host "----------------------------------------"
if (-not (Test-Path $ApiDir)) {
Write-ErrExit "API directory not found: $ApiDir"
}
Push-Location $ApiDir
try {
if (Test-Path "gateway.fgp") { Remove-Item "gateway.fgp" -Force }
foreach ($svcPath in $selectedServices) {
$svcName = Split-Path -Leaf $svcPath
Write-Host "Composing: $svcName"
fusion compose -p gateway.fgp -s ("..\" + $svcName)
if ($LASTEXITCODE -ne 0) { Write-ErrExit "fusion compose failed for $svcName" }
}
Write-Host "`nFusion build complete!"
}
finally {
Pop-Location
}
exit 0

View File

@@ -14,10 +14,18 @@ SERVICES_DIR="$(cd "$ROOT/.." && pwd)"
############################################### ###############################################
# Skip list (folder names, match exactly) # Skip list (folder names, match exactly)
############################################### ###############################################
SKIP_PROJECTS=( SKIP_FILE="$ROOT/gateway_skip.txt"
"FictionArchive.Service.Shared" SKIP_PROJECTS=()
"FictionArchive.Service.Legacy"
) if [[ -f "$SKIP_FILE" ]]; then
# Read non-empty lines ignoring comments
while IFS= read -r line; do
[[ -z "$line" || "$line" =~ ^# ]] && continue
SKIP_PROJECTS+=("$line")
done < "$SKIP_FILE"
else
echo "WARNING: skip-projects.txt not found — no projects will be skipped."
fi
echo "----------------------------------------" echo "----------------------------------------"
echo " Finding GraphQL services..." echo " Finding GraphQL services..."

View File

@@ -0,0 +1,4 @@
# List of service folders to skip
FictionArchive.Service.Shared
FictionArchive.Service.AuthenticationService
FictionArchive.Service.FileService

View File

@@ -0,0 +1,8 @@
namespace FictionArchive.Common.Enums;
public enum RequestStatus
{
Failed = -1,
Pending = 0,
Success = 1
}

View File

@@ -0,0 +1,49 @@
using System.Web;
using Amazon.S3;
using Amazon.S3.Model;
using FictionArchive.Service.FileService.Models;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
namespace FictionArchive.Service.FileService.Controllers
{
[Route("api/{*path}")]
[ApiController]
public class S3ProxyController : ControllerBase
{
private readonly AmazonS3Client _amazonS3Client;
private readonly S3Configuration _s3Configuration;
public S3ProxyController(AmazonS3Client amazonS3Client, IOptions<S3Configuration> s3Configuration)
{
_amazonS3Client = amazonS3Client;
_s3Configuration = s3Configuration.Value;
}
[HttpGet]
public async Task<IActionResult> Get(string path)
{
var decodedPath = HttpUtility.UrlDecode(path);
try
{
var s3Response = await _amazonS3Client.GetObjectAsync(new GetObjectRequest()
{
BucketName = _s3Configuration.Bucket,
Key = decodedPath
});
return new FileStreamResult(s3Response.ResponseStream, s3Response.Headers.ContentType);
}
catch (AmazonS3Exception e)
{
if (e.Message == "Key not found")
{
return NotFound();
}
throw;
}
}
}
}

View File

@@ -0,0 +1,23 @@
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
USER $APP_UID
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["FictionArchive.Service.ImageService/FictionArchive.Service.ImageService.csproj", "FictionArchive.Service.ImageService/"]
RUN dotnet restore "FictionArchive.Service.ImageService/FictionArchive.Service.ImageService.csproj"
COPY . .
WORKDIR "/src/FictionArchive.Service.ImageService"
RUN dotnet build "./FictionArchive.Service.ImageService.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "./FictionArchive.Service.ImageService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "FictionArchive.Service.ImageService.dll"]

View File

@@ -0,0 +1,30 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<Content Include="..\.dockerignore">
<Link>.dockerignore</Link>
</Content>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\FictionArchive.Service.Shared\FictionArchive.Service.Shared.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.S3" Version="4.0.13.1" />
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="9.0.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="10.0.1" />
</ItemGroup>
<ItemGroup>
<Folder Include="Controllers\" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,10 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.FileService.Models.IntegrationEvents;
public class FileUploadRequestCreatedEvent : IIntegrationEvent
{
public Guid RequestId { get; set; }
public string FilePath { get; set; }
public byte[] FileData { get; set; }
}

View File

@@ -0,0 +1,22 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.FileService.Models.IntegrationEvents;
public class FileUploadRequestStatusUpdateEvent : IIntegrationEvent
{
public Guid RequestId { get; set; }
public RequestStatus Status { get; set; }
#region Success
public string? FileAccessUrl { get; set; }
#endregion
#region Failure
public string? ErrorMessage { get; set; }
#endregion
}

View File

@@ -0,0 +1,6 @@
namespace FictionArchive.Service.FileService.Models;
public class ProxyConfiguration
{
public string BaseUrl { get; set; }
}

View File

@@ -0,0 +1,9 @@
namespace FictionArchive.Service.FileService.Models;
public class S3Configuration
{
public string Url { get; set; }
public string Bucket { get; set; }
public string AccessKey { get; set; }
public string SecretKey { get; set; }
}

View File

@@ -0,0 +1,66 @@
using Amazon.Runtime;
using Amazon.S3;
using FictionArchive.Common.Extensions;
using FictionArchive.Service.FileService.Models;
using FictionArchive.Service.Shared.Extensions;
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
using Microsoft.Extensions.Options;
namespace FictionArchive.Service.FileService;
public class Program
{
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
builder.AddLocalAppsettings();
builder.Services.AddControllers();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();
builder.Services.AddHealthChecks();
#region Event Bus
builder.Services.AddRabbitMQ(opt =>
{
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
});
#endregion
builder.Services.Configure<ProxyConfiguration>(builder.Configuration.GetSection("ProxyConfiguration"));
// Add S3 Client
builder.Services.Configure<S3Configuration>(builder.Configuration.GetSection("S3"));
builder.Services.AddSingleton<AmazonS3Client>(provider =>
{
var config = provider.GetRequiredService<IOptions<S3Configuration>>().Value;
var s3Config = new AmazonS3Config
{
ServiceURL = config.Url, // Garage endpoint
ForcePathStyle = true, // REQUIRED for Garage
AuthenticationRegion = "garage"
};
return new AmazonS3Client(
new BasicAWSCredentials(config.AccessKey, config.SecretKey),
s3Config);
});
var app = builder.Build();
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI();
}
app.MapHealthChecks("/healthz");
app.MapControllers();
app.Run();
}
}

View File

@@ -0,0 +1,39 @@
{
"$schema": "http://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:5546",
"sslPort": 44373
}
},
"profiles": {
"http": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"applicationUrl": "http://localhost:5057",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"https": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "swagger",
"applicationUrl": "https://localhost:7247;http://localhost:5057",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
}
}

View File

@@ -0,0 +1,57 @@
using Amazon.S3;
using Amazon.S3.Model;
using FictionArchive.Common.Enums;
using FictionArchive.Service.FileService.Models;
using FictionArchive.Service.FileService.Models.IntegrationEvents;
using FictionArchive.Service.Shared.Services.EventBus;
using Microsoft.Extensions.Options;
namespace FictionArchive.Service.FileService.Services.EventHandlers;
public class FileUploadRequestCreatedEventHandler : IIntegrationEventHandler<FileUploadRequestCreatedEvent>
{
private readonly ILogger<FileUploadRequestCreatedEventHandler> _logger;
private readonly AmazonS3Client _amazonS3Client;
private readonly IEventBus _eventBus;
private readonly S3Configuration _s3Configuration;
private readonly ProxyConfiguration _proxyConfiguration;
public FileUploadRequestCreatedEventHandler(ILogger<FileUploadRequestCreatedEventHandler> logger, AmazonS3Client amazonS3Client, IEventBus eventBus, IOptions<S3Configuration> s3Configuration, IOptions<ProxyConfiguration> proxyConfiguration)
{
_logger = logger;
_amazonS3Client = amazonS3Client;
_eventBus = eventBus;
_proxyConfiguration = proxyConfiguration.Value;
_s3Configuration = s3Configuration.Value;
}
public async Task Handle(FileUploadRequestCreatedEvent @event)
{
var putObjectRequest = new PutObjectRequest();
putObjectRequest.BucketName = _s3Configuration.Bucket;
putObjectRequest.Key = @event.FilePath;
using MemoryStream memoryStream = new MemoryStream(@event.FileData);
putObjectRequest.InputStream = memoryStream;
var s3Response = await _amazonS3Client.PutObjectAsync(putObjectRequest);
if (s3Response.HttpStatusCode != System.Net.HttpStatusCode.OK)
{
_logger.LogError("An error occurred while uploading file to S3. Response code: {responsecode}", s3Response.HttpStatusCode);
await _eventBus.Publish(new FileUploadRequestStatusUpdateEvent()
{
RequestId = @event.RequestId,
Status = RequestStatus.Failed,
ErrorMessage = "An error occurred while uploading file to S3."
});
return;
}
await _eventBus.Publish(new FileUploadRequestStatusUpdateEvent()
{
Status = RequestStatus.Success,
RequestId = @event.RequestId,
FileAccessUrl = _proxyConfiguration.BaseUrl + "/" + @event.FilePath
});
}
}

View File

@@ -0,0 +1,8 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}

View File

@@ -0,0 +1,22 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Proxy": {
"BaseUrl": "https://localhost:7247/api"
},
"RabbitMQ": {
"ConnectionString": "amqp://localhost",
"ClientIdentifier": "NovelService"
},
"S3": {
"Url": "https://s3.orfl.xyz",
"Bucket": "fictionarchive",
"AccessKey": "REPLACE_ME",
"SecretKey": "REPLACE_ME"
},
"AllowedHosts": "*"
}

View File

@@ -0,0 +1,10 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.FileService.IntegrationEvents;
public class FileUploadRequestCreatedEvent : IIntegrationEvent
{
public Guid RequestId { get; set; }
public string FilePath { get; set; }
public byte[] FileData { get; set; }
}

View File

@@ -16,6 +16,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.User
EndProject EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.AuthenticationService", "FictionArchive.Service.AuthenticationService\FictionArchive.Service.AuthenticationService.csproj", "{70C4AE82-B01E-421D-B590-C0F47E63CD0C}" Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.AuthenticationService", "FictionArchive.Service.AuthenticationService\FictionArchive.Service.AuthenticationService.csproj", "{70C4AE82-B01E-421D-B590-C0F47E63CD0C}"
EndProject EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.FileService", "FictionArchive.Service.FileService\FictionArchive.Service.FileService.csproj", "{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}"
EndProject
Global Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU Debug|Any CPU = Debug|Any CPU
@@ -54,5 +56,9 @@ Global
{70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Debug|Any CPU.Build.0 = Debug|Any CPU {70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Release|Any CPU.ActiveCfg = Release|Any CPU {70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Release|Any CPU.Build.0 = Release|Any CPU {70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Release|Any CPU.Build.0 = Release|Any CPU
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection EndGlobalSection
EndGlobal EndGlobal