Compare commits
52 Commits
708f1a5338
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| aae17021af | |||
|
|
c60aaf2bdb | ||
|
|
b2f4548807 | ||
|
|
8d6f0d6cfd | ||
| bc83bffb4b | |||
|
|
75e96cbee5 | ||
|
|
9c82d648cd | ||
|
|
78612ea29d | ||
| 4412a1f658 | |||
| 12e3c5dfdd | |||
|
|
b71d9031e1 | ||
|
|
09ebdb1b2a | ||
| 43d5ada7fb | |||
|
|
4635ed1b4e | ||
|
|
920fd00910 | ||
|
|
0d9f788678 | ||
|
|
0938c16a76 | ||
|
|
f25cbc1a04 | ||
|
|
078eaf5237 | ||
|
|
b9115d78a9 | ||
|
|
7e94f06853 | ||
|
|
50263109ab | ||
|
|
6ebfe81ae3 | ||
|
|
80aac63f7d | ||
|
|
adc99c7000 | ||
| 87075be61e | |||
| 259dc08aea | |||
| 2203d2ee54 | |||
| 30cc89242d | |||
| 84294455f9 | |||
| be62af98d3 | |||
|
|
15a8185621 | ||
|
|
0180a58084 | ||
|
|
573f3fc7b0 | ||
|
|
cdc2176e35 | ||
|
|
e9eaf1569b | ||
|
|
ba99642e97 | ||
|
|
c6d794aabc | ||
|
|
62e7e20f94 | ||
| aff1396c6a | |||
| 9e1792e4d0 | |||
|
|
747a212fb0 | ||
|
|
200bdaabed | ||
|
|
caa36648e2 | ||
| 6f2454329d | |||
|
|
fdf2ff7c1b | ||
|
|
e8596b67c4 | ||
| a01250696f | |||
|
|
16ed16ff62 | ||
|
|
573a0f6e3f | ||
|
|
1adbb955cf | ||
|
|
71e27b5dbb |
166
.gitea/workflows/build-gateway.yml
Normal file
166
.gitea/workflows/build-gateway.yml
Normal file
@@ -0,0 +1,166 @@
|
||||
name: Build Gateway
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
REGISTRY: ${{ gitea.server_url }}
|
||||
IMAGE_NAME: ${{ gitea.repository_owner }}/fictionarchive-api
|
||||
|
||||
jobs:
|
||||
build-subgraphs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
service:
|
||||
- name: novel-service
|
||||
project: FictionArchive.Service.NovelService
|
||||
subgraph: Novel
|
||||
- name: translation-service
|
||||
project: FictionArchive.Service.TranslationService
|
||||
subgraph: Translation
|
||||
- name: scheduler-service
|
||||
project: FictionArchive.Service.SchedulerService
|
||||
subgraph: Scheduler
|
||||
- name: user-service
|
||||
project: FictionArchive.Service.UserService
|
||||
subgraph: User
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '8.0.x'
|
||||
|
||||
- name: Install Fusion CLI
|
||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||
|
||||
- name: Add .NET tools to PATH
|
||||
run: echo "$HOME/.dotnet/tools" >> $GITHUB_PATH
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj -c Release --no-restore
|
||||
|
||||
- name: Export schema
|
||||
run: |
|
||||
dotnet run -c Release --no-launch-profile \
|
||||
--project ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj \
|
||||
-- schema export --output schema.graphql
|
||||
|
||||
- name: Pack subgraph
|
||||
run: fusion subgraph pack -w ${{ matrix.service.project }}
|
||||
|
||||
- name: Upload subgraph package
|
||||
uses: christopherhx/gitea-upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.service.name }}-subgraph
|
||||
path: ${{ matrix.service.project }}/*.fsp
|
||||
retention-days: 30
|
||||
|
||||
build-gateway:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-subgraphs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '8.0.x'
|
||||
|
||||
- name: Install Fusion CLI
|
||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||
|
||||
- name: Add .NET tools to PATH
|
||||
run: echo "$HOME/.dotnet/tools" >> $GITHUB_PATH
|
||||
|
||||
- name: Create subgraphs directory
|
||||
run: mkdir -p subgraphs
|
||||
|
||||
- name: Download Novel Service subgraph
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: novel-service-subgraph
|
||||
path: subgraphs/novel
|
||||
|
||||
- name: Download Translation Service subgraph
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: translation-service-subgraph
|
||||
path: subgraphs/translation
|
||||
|
||||
- name: Download Scheduler Service subgraph
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: scheduler-service-subgraph
|
||||
path: subgraphs/scheduler
|
||||
|
||||
- name: Download User Service subgraph
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: user-service-subgraph
|
||||
path: subgraphs/user
|
||||
|
||||
- name: Configure subgraph URLs for Docker
|
||||
run: |
|
||||
for fsp in subgraphs/*/*.fsp; do
|
||||
if [ -f "$fsp" ]; then
|
||||
dir=$(dirname "$fsp")
|
||||
name=$(basename "$dir")
|
||||
url="http://${name}-service:8080/graphql"
|
||||
echo "Setting $name URL to $url"
|
||||
fusion subgraph config set http --url "$url" -c "$fsp"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Compose gateway
|
||||
run: |
|
||||
cd FictionArchive.API
|
||||
rm -f gateway.fgp
|
||||
for fsp in ../subgraphs/*/*.fsp; do
|
||||
if [ -f "$fsp" ]; then
|
||||
echo "Composing: $fsp"
|
||||
fusion compose -p gateway.fgp -s "$fsp"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore FictionArchive.API/FictionArchive.API.csproj
|
||||
|
||||
- name: Build gateway
|
||||
run: dotnet build FictionArchive.API/FictionArchive.API.csproj -c Release --no-restore -p:SkipFusionBuild=true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract registry hostname
|
||||
id: registry
|
||||
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: FictionArchive.API/Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_NAME }}:latest
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_NAME }}:${{ gitea.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
74
.gitea/workflows/build.yml
Normal file
74
.gitea/workflows/build.yml
Normal file
@@ -0,0 +1,74 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build-backend:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '8.0.x'
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore FictionArchive.sln
|
||||
|
||||
- name: Build solution
|
||||
run: dotnet build FictionArchive.sln --configuration Release --no-restore /p:SkipFusionBuild=true
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
dotnet test FictionArchive.sln --configuration Release --no-build --verbosity normal \
|
||||
--logger "trx;LogFileName=test-results.trx" \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload test results
|
||||
uses: christopherhx/gitea-upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results
|
||||
path: ./TestResults/**/*.trx
|
||||
retention-days: 30
|
||||
|
||||
- name: Upload coverage results
|
||||
uses: christopherhx/gitea-upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: coverage-results
|
||||
path: ./TestResults/**/coverage.cobertura.xml
|
||||
retention-days: 30
|
||||
|
||||
build-frontend:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: fictionarchive-web
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6.0.0
|
||||
with:
|
||||
node-version: '20'
|
||||
package-manager-cache: false
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
49
.gitea/workflows/claude_assistant.yml
Normal file
49
.gitea/workflows/claude_assistant.yml
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Claude PR Assistant
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude-code-action:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && contains(github.event.issue.body, '@claude'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
issues: write
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run Claude PR Action
|
||||
uses: markwylde/claude-code-gitea-action@v1.0.20
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
gitea_token: ${{ secrets.CLAUDE_GITEA_TOKEN }}
|
||||
# Or use OAuth token instead:
|
||||
# claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
timeout_minutes: "60"
|
||||
# mode: tag # Default: responds to @claude mentions
|
||||
# Optional: Restrict network access to specific domains only
|
||||
# experimental_allowed_domains: |
|
||||
# .anthropic.com
|
||||
# .github.com
|
||||
# api.github.com
|
||||
# .githubusercontent.com
|
||||
# bun.sh
|
||||
# registry.npmjs.org
|
||||
# .blob.core.windows.net
|
||||
104
.gitea/workflows/release.yml
Normal file
104
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,104 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
REGISTRY: ${{ gitea.server_url }}
|
||||
IMAGE_PREFIX: ${{ gitea.repository_owner }}/fictionarchive
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
service:
|
||||
- name: novel-service
|
||||
dockerfile: FictionArchive.Service.NovelService/Dockerfile
|
||||
- name: user-service
|
||||
dockerfile: FictionArchive.Service.UserService/Dockerfile
|
||||
- name: translation-service
|
||||
dockerfile: FictionArchive.Service.TranslationService/Dockerfile
|
||||
- name: file-service
|
||||
dockerfile: FictionArchive.Service.FileService/Dockerfile
|
||||
- name: scheduler-service
|
||||
dockerfile: FictionArchive.Service.SchedulerService/Dockerfile
|
||||
- name: authentication-service
|
||||
dockerfile: FictionArchive.Service.AuthenticationService/Dockerfile
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract version from tag
|
||||
id: version
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract registry hostname
|
||||
id: registry
|
||||
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.service.dockerfile }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:${{ steps.version.outputs.VERSION }}
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
build-frontend:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract version from tag
|
||||
id: version
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract registry hostname
|
||||
id: registry
|
||||
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Build and push frontend Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./fictionarchive-web
|
||||
file: fictionarchive-web/Dockerfile
|
||||
push: true
|
||||
build-args: |
|
||||
VITE_GRAPHQL_URI=${{ vars.VITE_GRAPHQL_URI }}
|
||||
VITE_OIDC_AUTHORITY=${{ vars.VITE_OIDC_AUTHORITY }}
|
||||
VITE_OIDC_CLIENT_ID=${{ vars.VITE_OIDC_CLIENT_ID }}
|
||||
VITE_OIDC_REDIRECT_URI=${{ vars.VITE_OIDC_REDIRECT_URI }}
|
||||
VITE_OIDC_POST_LOGOUT_REDIRECT_URI=${{ vars.VITE_OIDC_POST_LOGOUT_REDIRECT_URI }}
|
||||
tags: |
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-frontend:${{ steps.version.outputs.VERSION }}
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-frontend:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -134,4 +134,9 @@ $RECYCLE.BIN/
|
||||
_NCrunch*
|
||||
|
||||
# Local user appsettings
|
||||
appsettings.Local.json
|
||||
appsettings.Local.json
|
||||
|
||||
# Fusion Builds
|
||||
schema.graphql
|
||||
*.fsp
|
||||
gateway.fgp
|
||||
36
Documentation/AGENTS.md
Normal file
36
Documentation/AGENTS.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## Project Structure & Module Organization
|
||||
- `FictionArchive.sln` ties together the gateway and all subgraph services.
|
||||
- `FictionArchive.API`: Fusion gateway host; GraphQL endpoint at `/graphql`, health at `/healthz`, gateway configuration in `gateway.fgp`, and helper script `build_gateway.py`.
|
||||
- `FictionArchive.Service.*`: GraphQL subgraphs (`AuthenticationService`, `FileService`, `NovelService`, `SchedulerService`, `TranslationService`, `UserService`) plus shared helpers in `FictionArchive.Service.Shared`.
|
||||
- `FictionArchive.Common`: shared enums and hosting extensions used across services.
|
||||
- Environment/config files live beside each service (`appsettings*.json`, `Properties/launchSettings.json`); build outputs under `bin/` and `obj/` should stay untracked.
|
||||
|
||||
## Build, Test, and Development Commands
|
||||
- `dotnet restore` then `dotnet build FictionArchive.sln` (Debug by default) to validate all projects compile.
|
||||
- Run the gateway: `dotnet run --project FictionArchive.API` (serves HTTPS; ensure certificates are trusted locally).
|
||||
- Run a subgraph locally: `dotnet run --project FictionArchive.Service.NovelService` (or any other service) to debug a single domain.
|
||||
- Rebuild the Fusion gateway config after subgraph changes: `python FictionArchive.API/build_gateway.py` (requires Python 3 and the `fusion` CLI on PATH; uses `gateway_skip.txt` to omit services).
|
||||
- If tests are added, prefer `dotnet test FictionArchive.sln` to cover the whole solution.
|
||||
|
||||
## Coding Style & Naming Conventions
|
||||
- Target .NET 8/C# 12; use 4-space indentation and file-scoped namespaces where practical.
|
||||
- PascalCase for classes, records, interfaces, and public members; camelCase for locals/parameters; suffix async methods with `Async`.
|
||||
- Favor dependency injection and extension methods for service wiring (see `Program.cs` files and `FictionArchive.Service.Shared/Extensions`).
|
||||
- Keep GraphQL schema files and other generated artifacts out of commits unless intentionally versioned.
|
||||
|
||||
## Testing Guidelines
|
||||
- No dedicated test projects exist yet; when adding tests, create `*.Tests` projects aligned to each service (e.g., `FictionArchive.Service.NovelService.Tests`) and name test files `*Tests.cs`.
|
||||
- Prefer xUnit with fluent assertions; aim for coverage on controllers/resolvers, integration events, and critical extension methods.
|
||||
- Use in-memory fakes or test containers for external dependencies to keep tests deterministic.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
- Follow the observed pattern: `[FA-123] Short, imperative summary` (reference the tracker ID and keep scope focused).
|
||||
- Keep commits small and self-contained; include relevant config/schema updates produced by the gateway build script when behavior changes.
|
||||
- PRs should describe the problem, the solution, and any follow-up work; link to issues, attach GraphQL schema diffs or sample queries when applicable, and note any manual steps (migrations, secrets).
|
||||
|
||||
## Security & Configuration Tips
|
||||
- Do not commit secrets; use user secrets or environment variables for API keys and connection strings referenced in `appsettings*.json`.
|
||||
- Verify HTTPS is enabled locally; adjust `launchSettings.json` only when necessary and document non-default ports.
|
||||
- Regenerate `gateway.fgp` after changing subgraph schemas to avoid stale compositions.
|
||||
405
Documentation/ARCHITECTURE.md
Normal file
405
Documentation/ARCHITECTURE.md
Normal file
@@ -0,0 +1,405 @@
|
||||
# FictionArchive Architecture Overview
|
||||
|
||||
## High-Level Architecture
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────┐
|
||||
│ React 19 Frontend │
|
||||
│ (Apollo Client, TailwindCSS, OIDC Auth) │
|
||||
└───────────────────────────┬────────────────────────────────────┘
|
||||
│ GraphQL
|
||||
▼
|
||||
┌────────────────────────────────────────────────────────────────┐
|
||||
│ Hot Chocolate Fusion Gateway │
|
||||
│ (FictionArchive.API) │
|
||||
└──────┬────────┬────────┬────────┬────────┬─────────────────────┘
|
||||
│ │ │ │ │
|
||||
▼ ▼ ▼ ▼ ▼
|
||||
┌──────────┐┌──────────┐┌───────────┐┌──────────┐┌──────────────┐
|
||||
│ Novel ││ User ││Translation││Scheduler ││ File │
|
||||
│ Service ││ Service ││ Service ││ Service ││ Service │
|
||||
└────┬─────┘└────┬─────┘└─────┬─────┘└────┬─────┘└──────┬───────┘
|
||||
│ │ │ │ │
|
||||
└───────────┴────────────┴───────────┴─────────────┘
|
||||
│
|
||||
┌────────┴────────┐
|
||||
│ RabbitMQ │
|
||||
│ (Event Bus) │
|
||||
└─────────────────┘
|
||||
│
|
||||
┌────────┴────────┐
|
||||
│ PostgreSQL │
|
||||
│ (per service) │
|
||||
└─────────────────┘
|
||||
```
|
||||
|
||||
## Technology Stack
|
||||
|
||||
| Layer | Technology | Version |
|
||||
|-------|------------|---------|
|
||||
| Runtime | .NET | 8.0 |
|
||||
| GraphQL | Hot Chocolate / Fusion | 13+ |
|
||||
| Database | PostgreSQL | 12+ |
|
||||
| ORM | Entity Framework Core | 8.0 |
|
||||
| Message Broker | RabbitMQ | 3.12+ |
|
||||
| Job Scheduler | Quartz.NET | Latest |
|
||||
| Object Storage | AWS S3 / Garage | - |
|
||||
| Date/Time | NodaTime | Latest |
|
||||
| Frontend | React | 19.2 |
|
||||
| Frontend Build | Vite | 7.2 |
|
||||
| GraphQL Client | Apollo Client | 4.0 |
|
||||
| Auth | OIDC Client TS | 3.4 |
|
||||
| Styling | TailwindCSS | 3.4 |
|
||||
| UI Components | Radix UI | Latest |
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
FictionArchive.sln
|
||||
├── FictionArchive.Common # Shared enums and extensions
|
||||
├── FictionArchive.API # GraphQL Fusion Gateway
|
||||
├── FictionArchive.Service.Shared # Shared infrastructure
|
||||
├── FictionArchive.Service.NovelService
|
||||
├── FictionArchive.Service.UserService
|
||||
├── FictionArchive.Service.TranslationService
|
||||
├── FictionArchive.Service.FileService
|
||||
├── FictionArchive.Service.SchedulerService
|
||||
├── FictionArchive.Service.AuthenticationService
|
||||
├── FictionArchive.Service.NovelService.Tests
|
||||
└── fictionarchive-web # React frontend
|
||||
```
|
||||
|
||||
## Services
|
||||
|
||||
### FictionArchive.API - GraphQL Fusion Gateway
|
||||
|
||||
- **Role**: Single entry point for all GraphQL queries
|
||||
- **Port**: 5001 (HTTPS)
|
||||
- **Endpoints**:
|
||||
- `/graphql` - GraphQL endpoint
|
||||
- `/healthz` - Health check
|
||||
- **Responsibilities**:
|
||||
- Compose GraphQL schemas from all subgraphs
|
||||
- Route queries to appropriate services
|
||||
- CORS policy management
|
||||
|
||||
### FictionArchive.Service.NovelService
|
||||
|
||||
- **Role**: Novel/fiction content management
|
||||
- **Port**: 8081 (HTTPS)
|
||||
- **Database**: `FictionArchive_NovelService`
|
||||
- **GraphQL Operations**:
|
||||
- `GetNovels` - Paginated, filterable novel listing
|
||||
- `ImportNovel` - Trigger novel import
|
||||
- `FetchChapterContents` - Fetch chapter content
|
||||
- **Models**: Novel, Chapter, Source, NovelTag, Image, LocalizationKey
|
||||
- **External Integration**: Novelpia adapter
|
||||
- **Events Published**: `TranslationRequestCreatedEvent`, `FileUploadRequestCreatedEvent`
|
||||
- **Events Subscribed**: `TranslationRequestCompletedEvent`, `NovelUpdateRequestedEvent`, `ChapterPullRequestedEvent`, `FileUploadRequestStatusUpdateEvent`
|
||||
|
||||
### FictionArchive.Service.UserService
|
||||
|
||||
- **Role**: User identity and profile management
|
||||
- **Port**: 8081 (HTTPS)
|
||||
- **Database**: `FictionArchive_UserService`
|
||||
- **Models**: User (with OAuth provider linking)
|
||||
- **Events Subscribed**: `AuthUserAddedEvent`
|
||||
|
||||
### FictionArchive.Service.TranslationService
|
||||
|
||||
- **Role**: Text translation orchestration
|
||||
- **Port**: 8081 (HTTPS)
|
||||
- **Database**: `FictionArchive_TranslationService`
|
||||
- **External Integration**: DeepL API
|
||||
- **Models**: TranslationRequest
|
||||
- **Events Published**: `TranslationRequestCompletedEvent`
|
||||
- **Events Subscribed**: `TranslationRequestCreatedEvent`
|
||||
|
||||
### FictionArchive.Service.FileService
|
||||
|
||||
- **Role**: File storage and S3 proxy
|
||||
- **Port**: 8080 (HTTP)
|
||||
- **Protocol**: REST only (not GraphQL)
|
||||
- **Endpoints**: `GET /api/{*path}` - S3 file proxy
|
||||
- **External Integration**: S3-compatible storage (AWS S3 / Garage)
|
||||
- **Events Published**: `FileUploadRequestStatusUpdateEvent`
|
||||
- **Events Subscribed**: `FileUploadRequestCreatedEvent`
|
||||
|
||||
### FictionArchive.Service.SchedulerService
|
||||
|
||||
- **Role**: Job scheduling and automation
|
||||
- **Port**: 8081 (HTTPS)
|
||||
- **Database**: `FictionArchive_SchedulerService`
|
||||
- **Scheduler**: Quartz.NET with persistent job store
|
||||
- **GraphQL Operations**: `ScheduleEventJob`, `GetScheduledJobs`
|
||||
- **Models**: SchedulerJob, EventJobTemplate
|
||||
|
||||
### FictionArchive.Service.AuthenticationService
|
||||
|
||||
- **Role**: OAuth/OIDC webhook receiver
|
||||
- **Port**: 8080 (HTTP)
|
||||
- **Protocol**: REST only
|
||||
- **Endpoints**: `POST /api/AuthenticationWebhook/UserRegistered`
|
||||
- **Events Published**: `AuthUserAddedEvent`
|
||||
- **No Database** - Stateless webhook handler
|
||||
|
||||
## Communication Patterns
|
||||
|
||||
### GraphQL Federation
|
||||
|
||||
- Hot Chocolate Fusion Gateway composes subgraph schemas
|
||||
- Schema export automated via `build_gateway.py`
|
||||
- Each service defines its own Query/Mutation types
|
||||
|
||||
### Event-Driven Architecture (RabbitMQ)
|
||||
|
||||
- Direct exchange: `fiction-archive-event-bus`
|
||||
- Per-service queues based on `ClientIdentifier`
|
||||
- Routing key = event class name
|
||||
- Headers: `X-Created-At`, `X-Event-Id`
|
||||
- NodaTime JSON serialization
|
||||
|
||||
### Event Flow Examples
|
||||
|
||||
**Novel Import:**
|
||||
```
|
||||
1. Frontend → importNovel mutation
|
||||
2. NovelService publishes NovelUpdateRequestedEvent
|
||||
3. NovelUpdateRequestedEventHandler processes
|
||||
4. Fetches metadata via NovelpiaAdapter
|
||||
5. Publishes FileUploadRequestCreatedEvent (for cover)
|
||||
6. FileService uploads to S3
|
||||
7. FileService publishes FileUploadRequestStatusUpdateEvent
|
||||
8. NovelService updates image path
|
||||
```
|
||||
|
||||
**Translation:**
|
||||
```
|
||||
1. NovelService publishes TranslationRequestCreatedEvent
|
||||
2. TranslationService translates via DeepL
|
||||
3. TranslationService publishes TranslationRequestCompletedEvent
|
||||
4. NovelService updates chapter translation
|
||||
```
|
||||
|
||||
## Data Storage
|
||||
|
||||
### Database Pattern
|
||||
- Database per service (PostgreSQL)
|
||||
- Connection string format: `Host=localhost;Database=FictionArchive_{ServiceName};...`
|
||||
- Auto-migration on startup via `dbContext.UpdateDatabase()`
|
||||
|
||||
### Audit Trail
|
||||
- `AuditInterceptor` auto-sets `CreatedTime` and `LastUpdatedTime`
|
||||
- `IAuditable` interface with NodaTime `Instant` fields
|
||||
- `BaseEntity<TKey>` abstract base class
|
||||
|
||||
### Object Storage
|
||||
- S3-compatible (AWS S3 or Garage)
|
||||
- Path-style URLs for Garage compatibility
|
||||
- Proxied through FileService
|
||||
|
||||
## Frontend Architecture
|
||||
|
||||
### Structure
|
||||
```
|
||||
fictionarchive-web/
|
||||
├── src/
|
||||
│ ├── auth/ # OIDC authentication
|
||||
│ ├── components/ # React components
|
||||
│ │ └── ui/ # Radix-based primitives
|
||||
│ ├── pages/ # Route pages
|
||||
│ ├── layouts/ # Layout components
|
||||
│ ├── graphql/ # GraphQL queries
|
||||
│ ├── __generated__/ # Codegen output
|
||||
│ └── lib/ # Utilities
|
||||
└── codegen.ts # GraphQL Codegen config
|
||||
```
|
||||
|
||||
### Authentication
|
||||
- OIDC via `oidc-client-ts`
|
||||
- Environment variables for configuration
|
||||
- `useAuth` hook for state access
|
||||
|
||||
### State Management
|
||||
- Apollo Client for GraphQL state
|
||||
- React Context for auth state
|
||||
|
||||
## Infrastructure
|
||||
|
||||
### Docker
|
||||
- Multi-stage builds
|
||||
- Base: `mcr.microsoft.com/dotnet/aspnet:8.0`
|
||||
- Non-root user for security
|
||||
- Ports: 8080 (HTTP) or 8081 (HTTPS)
|
||||
|
||||
### Health Checks
|
||||
- All services expose `/healthz`
|
||||
|
||||
### Configuration
|
||||
- `appsettings.json` - Default settings
|
||||
- `appsettings.Development.json` - Dev overrides
|
||||
- `appsettings.Local.json` - Local secrets (not committed)
|
||||
|
||||
---
|
||||
|
||||
# Improvement Recommendations
|
||||
|
||||
## Critical
|
||||
|
||||
### 1. Event Bus - No Dead Letter Queue or Retry Logic
|
||||
**Location**: `FictionArchive.Service.Shared/Services/EventBus/Implementations/RabbitMQEventBus.cs:126-133`
|
||||
|
||||
**Issue**: Events are always ACK'd even on failure. No DLQ configuration for poison messages. Failed events are lost forever.
|
||||
|
||||
**Recommendation**: Implement retry with exponential backoff, dead-letter exchange, and poison message handling.
|
||||
|
||||
```csharp
|
||||
// Example: Add retry and DLQ
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "Error handling event");
|
||||
if (retryCount < maxRetries)
|
||||
{
|
||||
await channel.BasicNackAsync(@event.DeliveryTag, false, true); // requeue
|
||||
}
|
||||
else
|
||||
{
|
||||
// Send to DLQ
|
||||
await channel.BasicNackAsync(@event.DeliveryTag, false, false);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. CORS Configuration is Insecure
|
||||
**Location**: `FictionArchive.API/Program.cs:24-33`
|
||||
|
||||
**Issue**: `AllowAnyOrigin()` allows requests from any domain, unsuitable for production.
|
||||
|
||||
**Recommendation**: Configure specific allowed origins via appsettings:
|
||||
```csharp
|
||||
builder.Services.AddCors(options =>
|
||||
{
|
||||
options.AddPolicy("Production", policy =>
|
||||
{
|
||||
policy.WithOrigins(builder.Configuration.GetSection("Cors:AllowedOrigins").Get<string[]>())
|
||||
.AllowAnyMethod()
|
||||
.AllowAnyHeader();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### 3. Auto-Migration on Startup
|
||||
**Location**: `FictionArchive.Service.Shared/Services/Database/FictionArchiveDbContext.cs:23-38`
|
||||
|
||||
**Issue**: Running migrations at startup can cause race conditions with multiple instances and potential data corruption during rolling deployments.
|
||||
|
||||
**Recommendation**: Use a migration job, init container, or CLI tool instead of startup code.
|
||||
|
||||
## Important
|
||||
|
||||
### 4. No Circuit Breaker Pattern
|
||||
**Issue**: External service calls (DeepL, Novelpia, S3) lack resilience patterns.
|
||||
|
||||
**Recommendation**: Add Polly for circuit breaker, retry, and timeout policies:
|
||||
```csharp
|
||||
builder.Services.AddHttpClient<ISourceAdapter, NovelpiaAdapter>()
|
||||
.AddPolicyHandler(GetRetryPolicy())
|
||||
.AddPolicyHandler(GetCircuitBreakerPolicy());
|
||||
```
|
||||
|
||||
### 5. Missing Request Validation/Rate Limiting
|
||||
**Issue**: No visible rate limiting on GraphQL mutations. `ImportNovel` could be abused.
|
||||
|
||||
**Recommendation**: Add rate limiting middleware and input validation.
|
||||
|
||||
### 6. Hardcoded Exchange Name
|
||||
**Location**: `RabbitMQEventBus.cs:24`
|
||||
|
||||
**Issue**: `fiction-archive-event-bus` is hardcoded.
|
||||
|
||||
**Recommendation**: Move to configuration for environment flexibility.
|
||||
|
||||
### 7. No Distributed Tracing
|
||||
**Issue**: Event correlation exists (`X-Event-Id` header) but not integrated with tracing.
|
||||
|
||||
**Recommendation**: Add OpenTelemetry for end-to-end request tracing across services.
|
||||
|
||||
### 8. Singleton AuditInterceptor
|
||||
**Location**: `FictionArchiveDbContext.cs:20`
|
||||
|
||||
**Issue**: `new AuditInterceptor()` created per DbContext instance.
|
||||
|
||||
**Recommendation**: Register as singleton in DI and inject.
|
||||
|
||||
## Minor / Code Quality
|
||||
|
||||
### 9. Limited Test Coverage
|
||||
**Issue**: Only `NovelService.Tests` exists. No integration tests for event handlers.
|
||||
|
||||
**Recommendation**: Add unit and integration tests for each service, especially event handlers.
|
||||
|
||||
### 10. Inconsistent Port Configuration
|
||||
**Issue**: Some services use 8080 (HTTP), others 8081 (HTTPS).
|
||||
|
||||
**Recommendation**: Standardize on HTTPS with proper cert management.
|
||||
|
||||
### 11. No API Versioning
|
||||
**Issue**: GraphQL schemas have no versioning strategy.
|
||||
|
||||
**Recommendation**: Consider schema versioning or deprecation annotations for breaking changes.
|
||||
|
||||
### 12. Frontend - No Error Boundary
|
||||
**Issue**: React app lacks error boundaries for graceful failure handling.
|
||||
|
||||
**Recommendation**: Add React Error Boundaries around routes.
|
||||
|
||||
### 13. Missing Health Check Depth
|
||||
**Issue**: Health checks only verify service is running, not dependencies.
|
||||
|
||||
**Recommendation**: Add database, RabbitMQ, and S3 health checks:
|
||||
```csharp
|
||||
builder.Services.AddHealthChecks()
|
||||
.AddNpgSql(connectionString)
|
||||
.AddRabbitMQ()
|
||||
.AddS3(options => { });
|
||||
```
|
||||
|
||||
### 14. Synchronous File Operations in Event Handlers
|
||||
**Issue**: File uploads may block event handling thread for large files.
|
||||
|
||||
**Recommendation**: Consider async streaming for large files.
|
||||
|
||||
## Architectural Suggestions
|
||||
|
||||
### 15. Consider Outbox Pattern
|
||||
**Issue**: Publishing events and saving to DB aren't transactional, could lead to inconsistent state.
|
||||
|
||||
**Recommendation**: Implement transactional outbox pattern for guaranteed delivery:
|
||||
```
|
||||
1. Save entity + outbox message in same transaction
|
||||
2. Background worker publishes from outbox
|
||||
3. Delete outbox message after successful publish
|
||||
```
|
||||
|
||||
### 16. Gateway Schema Build Process
|
||||
**Issue**: Python script (`build_gateway.py`) for schema composition requires manual execution.
|
||||
|
||||
**Recommendation**: Integrate into CI/CD pipeline or consider runtime schema polling.
|
||||
|
||||
### 17. Secret Management
|
||||
**Issue**: Credentials in appsettings files.
|
||||
|
||||
**Recommendation**: Use Azure Key Vault, AWS Secrets Manager, HashiCorp Vault, or similar secret management solution.
|
||||
|
||||
---
|
||||
|
||||
## Key Files Reference
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `FictionArchive.API/Program.cs` | Gateway setup |
|
||||
| `FictionArchive.API/build_gateway.py` | Schema composition script |
|
||||
| `FictionArchive.Service.Shared/Services/EventBus/` | Event bus implementation |
|
||||
| `FictionArchive.Service.Shared/Extensions/` | Service registration helpers |
|
||||
| `FictionArchive.Service.Shared/Services/Database/` | DB infrastructure |
|
||||
| `fictionarchive-web/src/auth/AuthContext.tsx` | Frontend auth state |
|
||||
297
Documentation/CICD.md
Normal file
297
Documentation/CICD.md
Normal file
@@ -0,0 +1,297 @@
|
||||
# CI/CD Configuration
|
||||
|
||||
This document describes the CI/CD pipeline configuration for FictionArchive using Gitea Actions.
|
||||
|
||||
## Workflows Overview
|
||||
|
||||
| Workflow | File | Trigger | Purpose |
|
||||
|----------|------|---------|---------|
|
||||
| CI | `build.yml` | Push/PR to master | Build and test all projects |
|
||||
| Build Gateway | `build-gateway.yml` | Tag `v*.*.*` or manual | Build subgraphs, compose gateway, push API image |
|
||||
| Release | `release.yml` | Tag `v*.*.*` | Build and push all Docker images |
|
||||
| Claude PR Assistant | `claude_assistant.yml` | Issue/PR comments with @claude | AI-assisted code review and issue handling |
|
||||
|
||||
## Pipeline Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ Push to master │
|
||||
└─────────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────┐
|
||||
│ build.yml │
|
||||
│ (CI checks) │
|
||||
└─────────────────────────┘
|
||||
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ Push tag v*.*.* │
|
||||
└─────────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
┌───────────────┴───────────────┐
|
||||
▼ ▼
|
||||
┌─────────────────────────┐ ┌─────────────────────────┐
|
||||
│ release.yml │ │ build-gateway.yml │
|
||||
│ (build & push all │ │ (build subgraphs & │
|
||||
│ backend + frontend) │ │ push API gateway) │
|
||||
└─────────────────────────┘ └─────────────────────────┘
|
||||
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ Issue/PR comment containing @claude │
|
||||
└─────────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────┐
|
||||
│ claude_assistant.yml │
|
||||
│ (AI code assistance) │
|
||||
└─────────────────────────┘
|
||||
```
|
||||
|
||||
## Required Configuration
|
||||
|
||||
### Repository Secrets
|
||||
|
||||
Configure these in **Settings → Actions → Secrets**:
|
||||
|
||||
| Secret | Description | Required By |
|
||||
|--------|-------------|-------------|
|
||||
| `REGISTRY_TOKEN` | Gitea access token with `write:package` scope | `release.yml`, `build-gateway.yml` |
|
||||
| `CLAUDE_CODE_OAUTH_TOKEN` | Claude Code OAuth token | `claude_assistant.yml` |
|
||||
| `CLAUDE_GITEA_TOKEN` | Gitea token for Claude assistant | `claude_assistant.yml` |
|
||||
|
||||
#### Creating Access Tokens
|
||||
|
||||
1. Go to **Settings → Applications → Access Tokens**
|
||||
2. Create a new token with the following scopes:
|
||||
- `write:package` - Push container images
|
||||
- `write:repository` - For Claude assistant to push commits
|
||||
3. Copy the token and add it as a repository secret
|
||||
|
||||
### Repository Variables
|
||||
|
||||
Configure these in **Settings → Actions → Variables**:
|
||||
|
||||
| Variable | Description | Example | Required By |
|
||||
|----------|-------------|---------|-------------|
|
||||
| `VITE_GRAPHQL_URI` | GraphQL API endpoint URL | `https://api.fictionarchive.example.com/graphql/` | `release.yml` |
|
||||
| `VITE_OIDC_AUTHORITY` | OIDC provider authority URL | `https://auth.example.com/application/o/fiction-archive/` | `release.yml` |
|
||||
| `VITE_OIDC_CLIENT_ID` | OIDC client identifier | `your-client-id` | `release.yml` |
|
||||
| `VITE_OIDC_REDIRECT_URI` | Post-login redirect URL | `https://fictionarchive.example.com/` | `release.yml` |
|
||||
| `VITE_OIDC_POST_LOGOUT_REDIRECT_URI` | Post-logout redirect URL | `https://fictionarchive.example.com/` | `release.yml` |
|
||||
|
||||
## Workflow Details
|
||||
|
||||
### CI (`build.yml`)
|
||||
|
||||
**Trigger:** Push or pull request to `master`
|
||||
|
||||
**Jobs:**
|
||||
1. `build-backend` - Builds .NET solution and runs tests
|
||||
2. `build-frontend` - Builds React application with linting
|
||||
|
||||
**Requirements:**
|
||||
- .NET 8.0 SDK
|
||||
- Node.js 20
|
||||
|
||||
**Steps (Backend):**
|
||||
1. Checkout repository
|
||||
2. Setup .NET 8.0
|
||||
3. Restore dependencies
|
||||
4. Build solution (Release, with `SkipFusionBuild=true`)
|
||||
5. Run tests
|
||||
|
||||
**Steps (Frontend):**
|
||||
1. Checkout repository
|
||||
2. Setup Node.js 20
|
||||
3. Install dependencies (`npm ci`)
|
||||
4. Run linter (`npm run lint`)
|
||||
5. Build application (`npm run build`)
|
||||
|
||||
### Build Gateway (`build-gateway.yml`)
|
||||
|
||||
**Trigger:**
|
||||
- Manual dispatch (`workflow_dispatch`)
|
||||
- Push tag matching `v*.*.*`
|
||||
|
||||
**Jobs:**
|
||||
|
||||
#### 1. `build-subgraphs` (Matrix Job)
|
||||
Builds GraphQL subgraph packages for each service:
|
||||
|
||||
| Service | Project | Subgraph Name |
|
||||
|---------|---------|---------------|
|
||||
| novel-service | FictionArchive.Service.NovelService | Novel |
|
||||
| translation-service | FictionArchive.Service.TranslationService | Translation |
|
||||
| scheduler-service | FictionArchive.Service.SchedulerService | Scheduler |
|
||||
| user-service | FictionArchive.Service.UserService | User |
|
||||
|
||||
**Note:** File Service and Authentication Service are not subgraphs (no GraphQL schema).
|
||||
|
||||
**Steps:**
|
||||
1. Checkout repository
|
||||
2. Setup .NET 8.0
|
||||
3. Install HotChocolate Fusion CLI
|
||||
4. Restore and build service project
|
||||
5. Export GraphQL schema (`schema export`)
|
||||
6. Pack subgraph into `.fsp` file
|
||||
7. Upload artifact (retained 30 days)
|
||||
|
||||
#### 2. `build-gateway` (Depends on `build-subgraphs`)
|
||||
Composes the API gateway from subgraph packages.
|
||||
|
||||
**Steps:**
|
||||
1. Checkout repository
|
||||
2. Setup .NET 8.0 and Fusion CLI
|
||||
3. Download all subgraph artifacts
|
||||
4. Configure Docker-internal URLs (`http://{service}-service:8080/graphql`)
|
||||
5. Compose gateway schema using Fusion CLI
|
||||
6. Build gateway project
|
||||
7. Build and push Docker image
|
||||
|
||||
**Image Tags:**
|
||||
- `<registry>/<owner>/fictionarchive-api:latest`
|
||||
- `<registry>/<owner>/fictionarchive-api:<commit-sha>`
|
||||
|
||||
### Release (`release.yml`)
|
||||
|
||||
**Trigger:** Push tag matching `v*.*.*` (e.g., `v1.0.0`)
|
||||
|
||||
**Jobs:**
|
||||
|
||||
#### 1. `build-and-push` (Matrix Job)
|
||||
Builds and pushes all backend service images:
|
||||
|
||||
| Service | Dockerfile |
|
||||
|---------|------------|
|
||||
| novel-service | FictionArchive.Service.NovelService/Dockerfile |
|
||||
| user-service | FictionArchive.Service.UserService/Dockerfile |
|
||||
| translation-service | FictionArchive.Service.TranslationService/Dockerfile |
|
||||
| file-service | FictionArchive.Service.FileService/Dockerfile |
|
||||
| scheduler-service | FictionArchive.Service.SchedulerService/Dockerfile |
|
||||
| authentication-service | FictionArchive.Service.AuthenticationService/Dockerfile |
|
||||
|
||||
#### 2. `build-frontend`
|
||||
Builds and pushes the frontend image with environment-specific build arguments.
|
||||
|
||||
**Build Args:**
|
||||
- `VITE_GRAPHQL_URI`
|
||||
- `VITE_OIDC_AUTHORITY`
|
||||
- `VITE_OIDC_CLIENT_ID`
|
||||
- `VITE_OIDC_REDIRECT_URI`
|
||||
- `VITE_OIDC_POST_LOGOUT_REDIRECT_URI`
|
||||
|
||||
**Image Tags:**
|
||||
- `<registry>/<owner>/fictionarchive-<service>:<version>`
|
||||
- `<registry>/<owner>/fictionarchive-<service>:latest`
|
||||
|
||||
### Claude PR Assistant (`claude_assistant.yml`)
|
||||
|
||||
**Trigger:** Comments or issues containing `@claude`:
|
||||
- Issue comments
|
||||
- Pull request review comments
|
||||
- Pull request reviews
|
||||
- New issues (opened or assigned)
|
||||
|
||||
**Permissions Required:**
|
||||
- `contents: write`
|
||||
- `pull-requests: write`
|
||||
- `issues: write`
|
||||
- `id-token: write`
|
||||
|
||||
**Usage:**
|
||||
Mention `@claude` in any issue or PR comment to invoke the AI assistant for:
|
||||
- Code review assistance
|
||||
- Bug analysis
|
||||
- Implementation suggestions
|
||||
- Documentation help
|
||||
|
||||
## Container Registry
|
||||
|
||||
Images are pushed to the Gitea Container Registry at:
|
||||
```
|
||||
<gitea-server-url>/<repository-owner>/fictionarchive-<service>:<tag>
|
||||
```
|
||||
|
||||
### Image Naming Convention
|
||||
|
||||
| Image | Description |
|
||||
|-------|-------------|
|
||||
| `fictionarchive-api` | API Gateway (GraphQL Federation) |
|
||||
| `fictionarchive-novel-service` | Novel Service |
|
||||
| `fictionarchive-user-service` | User Service |
|
||||
| `fictionarchive-translation-service` | Translation Service |
|
||||
| `fictionarchive-file-service` | File Service |
|
||||
| `fictionarchive-scheduler-service` | Scheduler Service |
|
||||
| `fictionarchive-authentication-service` | Authentication Service |
|
||||
| `fictionarchive-frontend` | Web Frontend |
|
||||
|
||||
### Pulling Images
|
||||
|
||||
```bash
|
||||
# Login to registry
|
||||
docker login <gitea-server-url> -u <username> -p <token>
|
||||
|
||||
# Pull an image
|
||||
docker pull <gitea-server-url>/<owner>/fictionarchive-api:latest
|
||||
```
|
||||
|
||||
## Creating a Release
|
||||
|
||||
1. Ensure all changes are committed and pushed to `master`
|
||||
2. Create and push a version tag:
|
||||
```bash
|
||||
git tag v1.0.0
|
||||
git push origin v1.0.0
|
||||
```
|
||||
3. The release workflow will automatically build and push all images
|
||||
4. Monitor progress in **Actions** tab
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Build Failures
|
||||
|
||||
**"REGISTRY_TOKEN secret not found"**
|
||||
- Ensure the `REGISTRY_TOKEN` secret is configured in repository settings
|
||||
- Verify the token has `write:package` scope
|
||||
|
||||
**"No subgraph artifacts found"**
|
||||
- The gateway build requires subgraph artifacts from the `build-subgraphs` job
|
||||
- If subgraph builds failed, check the matrix job logs for errors
|
||||
|
||||
**"Schema export failed"**
|
||||
- Ensure the service project has a valid `subgraph-config.json`
|
||||
- Check that the service starts correctly for schema export
|
||||
|
||||
### Frontend Build Failures
|
||||
|
||||
**"VITE_* variables are empty"**
|
||||
- Ensure all required variables are configured in repository settings
|
||||
- Variables use `vars.*` context, not `secrets.*`
|
||||
|
||||
### Docker Push Failures
|
||||
|
||||
**"unauthorized: authentication required"**
|
||||
- Verify `REGISTRY_TOKEN` has correct permissions
|
||||
- Check that the token hasn't expired
|
||||
|
||||
### Claude Assistant Failures
|
||||
|
||||
**"Claude assistant not responding"**
|
||||
- Verify `CLAUDE_CODE_OAUTH_TOKEN` is configured
|
||||
- Verify `CLAUDE_GITEA_TOKEN` is configured and has write permissions
|
||||
- Check that the comment contains `@claude` mention
|
||||
|
||||
## Local Testing
|
||||
|
||||
To test workflows locally before pushing:
|
||||
|
||||
```bash
|
||||
# Install act (GitHub Actions local runner)
|
||||
# Note: act has partial Gitea Actions compatibility
|
||||
|
||||
# Run CI workflow
|
||||
act push -W .gitea/workflows/build.yml
|
||||
|
||||
# Run with specific event
|
||||
act push --eventpath .gitea/test-event.json
|
||||
```
|
||||
187
Documentation/README.md
Normal file
187
Documentation/README.md
Normal file
@@ -0,0 +1,187 @@
|
||||
# FictionArchive
|
||||
|
||||
A distributed microservices-based web application for managing fiction and novel content. Features include importing from external sources, multi-language translation, file storage, and user management.
|
||||
|
||||
## Architecture
|
||||
|
||||
FictionArchive uses a GraphQL Fusion gateway pattern to orchestrate multiple domain services with event-driven communication via RabbitMQ.
|
||||
More information available in [ARCHITECTURE.md](ARCHITECTURE.md)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- .NET SDK 8.0+
|
||||
- Node.js 20+
|
||||
- Python 3 (for gateway build script)
|
||||
- Docker & Docker Compose
|
||||
- PostgreSQL 16+
|
||||
- RabbitMQ 3+
|
||||
|
||||
**Required CLI Tools**
|
||||
```bash
|
||||
# Hot Chocolate Fusion CLI
|
||||
dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Local Development
|
||||
|
||||
1. **Clone the repository**
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd FictionArchive
|
||||
```
|
||||
|
||||
2. **Start infrastructure** (PostgreSQL, RabbitMQ)
|
||||
```bash
|
||||
docker compose up -d postgres rabbitmq
|
||||
```
|
||||
|
||||
3. **Build and run backend**
|
||||
```bash
|
||||
dotnet restore
|
||||
dotnet build FictionArchive.sln
|
||||
|
||||
# Start services (in separate terminals or use a process manager)
|
||||
dotnet run --project FictionArchive.Service.NovelService
|
||||
dotnet run --project FictionArchive.Service.UserService
|
||||
dotnet run --project FictionArchive.Service.TranslationService
|
||||
dotnet run --project FictionArchive.Service.FileService
|
||||
dotnet run --project FictionArchive.Service.SchedulerService
|
||||
dotnet run --project FictionArchive.Service.AuthenticationService
|
||||
|
||||
# Start the gateway (builds fusion schema automatically)
|
||||
dotnet run --project FictionArchive.API
|
||||
```
|
||||
|
||||
4. **Build and run frontend**
|
||||
```bash
|
||||
cd fictionarchive-web
|
||||
npm install
|
||||
npm run codegen # Generate GraphQL types
|
||||
npm run dev # Start dev server at http://localhost:5173
|
||||
```
|
||||
|
||||
### Docker Deployment
|
||||
|
||||
1. **Create environment file**
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your configuration
|
||||
```
|
||||
|
||||
2. **Start all services**
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Create a `.env` file in the project root:
|
||||
|
||||
```bash
|
||||
# PostgreSQL
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=your-secure-password
|
||||
|
||||
# RabbitMQ
|
||||
RABBITMQ_USER=guest
|
||||
RABBITMQ_PASSWORD=your-secure-password
|
||||
|
||||
# External Services
|
||||
NOVELPIA_USERNAME=your-username
|
||||
NOVELPIA_PASSWORD=your-password
|
||||
DEEPL_API_KEY=your-api-key
|
||||
|
||||
# S3 Storage
|
||||
S3_ENDPOINT=https://s3.example.com
|
||||
S3_BUCKET=fictionarchive
|
||||
S3_ACCESS_KEY=your-access-key
|
||||
S3_SECRET_KEY=your-secret-key
|
||||
|
||||
# OIDC Authentication
|
||||
OIDC_AUTHORITY=https://auth.example.com/application/o/fiction-archive/
|
||||
OIDC_CLIENT_ID=your-client-id
|
||||
```
|
||||
|
||||
### Frontend Environment
|
||||
|
||||
Create `fictionarchive-web/.env.local`:
|
||||
|
||||
```bash
|
||||
VITE_GRAPHQL_URI=http://localhost:5234/graphql/
|
||||
VITE_OIDC_AUTHORITY=https://auth.example.com/application/o/fiction-archive/
|
||||
VITE_OIDC_CLIENT_ID=your-client-id
|
||||
VITE_OIDC_REDIRECT_URI=http://localhost:5173/
|
||||
VITE_OIDC_POST_LOGOUT_REDIRECT_URI=http://localhost:5173/
|
||||
```
|
||||
|
||||
## Building the GraphQL Gateway
|
||||
|
||||
The API gateway uses Hot Chocolate Fusion to compose schemas from all subgraphs. The gateway schema is rebuilt automatically when building the API project.
|
||||
|
||||
**Manual rebuild:**
|
||||
```bash
|
||||
cd FictionArchive.API
|
||||
python build_gateway.py
|
||||
```
|
||||
|
||||
**Skip specific services** by adding them to `FictionArchive.API/gateway_skip.txt`:
|
||||
```
|
||||
FictionArchive.Service.NovelService.Tests
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
The project uses Gitea Actions with the following workflows:
|
||||
|
||||
| Workflow | Trigger | Description |
|
||||
|----------|---------|-------------|
|
||||
| `build.yml` | Push/PR to master | CI checks - builds and tests |
|
||||
| `build-subgraphs.yml` | Service changes on master | Builds subgraph `.fsp` packages |
|
||||
| `build-gateway.yml` | Gateway changes or subgraph builds | Composes gateway and builds Docker image |
|
||||
| `release.yml` | Tag `v*.*.*` | Builds and pushes all Docker images |
|
||||
|
||||
### Release Process
|
||||
|
||||
```bash
|
||||
git tag v1.0.0
|
||||
git push origin v1.0.0
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
FictionArchive/
|
||||
├── FictionArchive.sln
|
||||
├── FictionArchive.Common/ # Shared enums and extensions
|
||||
├── FictionArchive.Service.Shared/ # Shared infrastructure (EventBus, DB)
|
||||
├── FictionArchive.API/ # GraphQL Fusion Gateway
|
||||
├── FictionArchive.Service.NovelService/
|
||||
├── FictionArchive.Service.UserService/
|
||||
├── FictionArchive.Service.TranslationService/
|
||||
├── FictionArchive.Service.FileService/
|
||||
├── FictionArchive.Service.SchedulerService/
|
||||
├── FictionArchive.Service.AuthenticationService/
|
||||
├── FictionArchive.Service.NovelService.Tests/
|
||||
├── fictionarchive-web/ # React frontend
|
||||
├── docker-compose.yml
|
||||
└── .gitea/workflows/ # CI/CD workflows
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
dotnet test FictionArchive.sln
|
||||
|
||||
# Run specific test project
|
||||
dotnet test FictionArchive.Service.NovelService.Tests
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
- [ARCHITECTURE.md](ARCHITECTURE.md) - Detailed architecture documentation
|
||||
- [AGENTS.md](AGENTS.md) - Development guidelines and coding standards
|
||||
@@ -7,15 +7,23 @@ EXPOSE 8081
|
||||
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
|
||||
COPY ["FictionArchive.API/FictionArchive.API.csproj", "FictionArchive.API/"]
|
||||
COPY ["FictionArchive.Common/FictionArchive.Common.csproj", "FictionArchive.Common/"]
|
||||
COPY ["FictionArchive.Service.Shared/FictionArchive.Service.Shared.csproj", "FictionArchive.Service.Shared/"]
|
||||
RUN dotnet restore "FictionArchive.API/FictionArchive.API.csproj"
|
||||
COPY . .
|
||||
|
||||
COPY FictionArchive.API/ FictionArchive.API/
|
||||
COPY FictionArchive.Common/ FictionArchive.Common/
|
||||
COPY FictionArchive.Service.Shared/ FictionArchive.Service.Shared/
|
||||
|
||||
WORKDIR "/src/FictionArchive.API"
|
||||
RUN dotnet build "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||
# Skip fusion build - gateway.fgp should be pre-composed in CI
|
||||
RUN dotnet build "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/build -p:SkipFusionBuild=true
|
||||
|
||||
FROM build AS publish
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
RUN dotnet publish "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
||||
RUN dotnet publish "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false /p:SkipFusionBuild=true
|
||||
|
||||
FROM base AS final
|
||||
WORKDIR /app
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
<PackageReference Include="HotChocolate.Data.EntityFramework" Version="15.1.11" />
|
||||
<PackageReference Include="HotChocolate.Fusion" Version="15.1.11" />
|
||||
<PackageReference Include="HotChocolate.Types.Scalars" Version="15.1.11" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.HeaderPropagation" Version="8.0.22" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.11">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -20,7 +21,13 @@
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Relational" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="8.0.7" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2"/>
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.11" />
|
||||
</ItemGroup>
|
||||
|
||||
<!-- Builds the Fusion graph file before building the application itself (skipped in CI) -->
|
||||
<Target Name="RunFusionBuild" BeforeTargets="BeforeBuild" Condition="'$(SkipFusionBuild)' != 'true'">
|
||||
<Exec Command="python build_gateway.py $(FusionBuildArgs)" WorkingDirectory="$(ProjectDir)" />
|
||||
</Target>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="..\.dockerignore">
|
||||
|
||||
@@ -12,23 +12,46 @@ public class Program
|
||||
|
||||
#region Fusion Gateway
|
||||
|
||||
builder.Services.AddHttpClient("Fusion");
|
||||
// Register header propagation service to forward Authorization header to subgraphs
|
||||
builder.Services.AddHttpClient("Fusion")
|
||||
.AddHeaderPropagation();
|
||||
builder.Services.AddHeaderPropagation(opt =>
|
||||
{
|
||||
opt.Headers.Add("Authorization");
|
||||
});
|
||||
|
||||
builder.Services
|
||||
.AddFusionGatewayServer()
|
||||
.ConfigureFromFile("gateway.fgp")
|
||||
.CoreBuilder.ApplySaneDefaults();
|
||||
|
||||
builder.Services.AddOidcAuthentication(builder.Configuration);
|
||||
|
||||
#endregion
|
||||
|
||||
var allowedOrigin = builder.Configuration["Cors:AllowedOrigin"] ?? "http://localhost:4321";
|
||||
builder.Services.AddCors(options =>
|
||||
{
|
||||
options.AddPolicy("AllowFictionArchiveOrigins",
|
||||
policyBuilder =>
|
||||
{
|
||||
policyBuilder.WithOrigins(allowedOrigin)
|
||||
.AllowAnyMethod()
|
||||
.AllowAnyHeader()
|
||||
.AllowCredentials();
|
||||
});
|
||||
});
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
app.UseHttpsRedirection();
|
||||
app.UseCors("AllowFictionArchiveOrigins");
|
||||
|
||||
app.MapHealthChecks("/healthz");
|
||||
|
||||
app.UseHeaderPropagation();
|
||||
|
||||
app.MapGraphQL();
|
||||
|
||||
app.Run();
|
||||
app.RunWithGraphQLCommands(args);
|
||||
}
|
||||
}
|
||||
@@ -5,5 +5,18 @@
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
"AllowedHosts": "*",
|
||||
"Cors": {
|
||||
"AllowedOrigin": "http://localhost:4321"
|
||||
},
|
||||
"OIDC": {
|
||||
"Authority": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ClientId": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"Audience": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"ValidIssuer": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ValidateIssuer": true,
|
||||
"ValidateAudience": true,
|
||||
"ValidateLifetime": true,
|
||||
"ValidateIssuerSigningKey": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
set ROOT=%~dp0
|
||||
|
||||
for %%A in ("%ROOT%..") do set SERVICES_DIR=%%~fA\
|
||||
|
||||
REM ----------------------------------------
|
||||
REM List of project names to skip
|
||||
REM (space-separated, match folder names exactly)
|
||||
REM ----------------------------------------
|
||||
set SKIP_PROJECTS=FictionArchive.Service.Shared FictionArchive.Service.AuthenticationService
|
||||
|
||||
echo ----------------------------------------
|
||||
echo Finding GraphQL services...
|
||||
echo ----------------------------------------
|
||||
|
||||
set SERVICE_LIST=
|
||||
|
||||
for /d %%F in ("%SERVICES_DIR%FictionArchive.Service.*") do (
|
||||
set "PROJECT_NAME=%%~nxF"
|
||||
set "SKIP=0"
|
||||
|
||||
REM Check if this project name is in the skip list
|
||||
for %%X in (%SKIP_PROJECTS%) do (
|
||||
if /I "!PROJECT_NAME!"=="%%X" (
|
||||
set "SKIP=1"
|
||||
)
|
||||
)
|
||||
|
||||
if !SKIP!==0 (
|
||||
echo Found service: !PROJECT_NAME!
|
||||
set SERVICE_LIST=!SERVICE_LIST! %%F
|
||||
) else (
|
||||
echo Skipping service: !PROJECT_NAME!
|
||||
)
|
||||
)
|
||||
|
||||
echo:
|
||||
echo ----------------------------------------
|
||||
echo Exporting schemas and packing subgraphs...
|
||||
echo ----------------------------------------
|
||||
|
||||
for %%S in (%SERVICE_LIST%) do (
|
||||
echo Processing service folder: %%S
|
||||
pushd "%%S"
|
||||
|
||||
echo Running schema export...
|
||||
dotnet run -- schema export --output schema.graphql
|
||||
if errorlevel 1 (
|
||||
echo ERROR during schema export in %%S
|
||||
popd
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo Running fusion subgraph pack...
|
||||
fusion subgraph pack
|
||||
if errorlevel 1 (
|
||||
echo ERROR during subgraph pack in %%S
|
||||
popd
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
popd
|
||||
echo Completed: %%S
|
||||
echo.
|
||||
)
|
||||
|
||||
echo ----------------------------------------
|
||||
echo Running fusion compose...
|
||||
echo ----------------------------------------
|
||||
|
||||
pushd "%ROOT%"
|
||||
|
||||
if exist gateway.fgp del gateway.fgp
|
||||
|
||||
for %%S in (%SERVICE_LIST%) do (
|
||||
REM Extract the full folder name WITH dots preserved
|
||||
set "SERVICE_NAME=%%~nxS"
|
||||
|
||||
echo Composing subgraph: !SERVICE_NAME!
|
||||
|
||||
fusion compose -p gateway.fgp -s "..\!SERVICE_NAME!"
|
||||
if errorlevel 1 (
|
||||
echo ERROR during fusion compose
|
||||
popd
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
popd
|
||||
|
||||
|
||||
echo ----------------------------------------
|
||||
echo Fusion build complete!
|
||||
echo ----------------------------------------
|
||||
|
||||
endlocal
|
||||
exit /b 0
|
||||
139
FictionArchive.API/build_gateway.py
Normal file
139
FictionArchive.API/build_gateway.py
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Local development script for building the Fusion gateway.
|
||||
|
||||
This script is used for local development only. In CI/CD, subgraphs are built
|
||||
separately and the gateway is composed from pre-built .fsp artifacts.
|
||||
|
||||
Usage:
|
||||
python build_gateway.py
|
||||
|
||||
Requirements:
|
||||
- .NET 8.0 SDK
|
||||
- HotChocolate Fusion CLI (dotnet tool install -g HotChocolate.Fusion.CommandLine)
|
||||
"""
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run(cmd, cwd=None):
|
||||
"""Run a command and exit on failure."""
|
||||
print(f"> {' '.join(cmd)}")
|
||||
result = subprocess.run(cmd, cwd=cwd)
|
||||
if result.returncode != 0:
|
||||
print(f"ERROR: command failed in {cwd or os.getcwd()}")
|
||||
sys.exit(result.returncode)
|
||||
|
||||
|
||||
def load_skip_list(skip_file: Path):
|
||||
if not skip_file.exists():
|
||||
print(f"WARNING: gateway_skip.txt not found at {skip_file}")
|
||||
return set()
|
||||
|
||||
lines = skip_file.read_text().splitlines()
|
||||
skip = {line.strip() for line in lines
|
||||
if line.strip() and not line.strip().startswith("#")}
|
||||
print("Skip list:", ", ".join(skip) if skip else "(none)")
|
||||
return skip
|
||||
|
||||
|
||||
# ----------------------------------------
|
||||
# Setup paths
|
||||
# ----------------------------------------
|
||||
|
||||
script_dir = Path(__file__).parent.resolve()
|
||||
services_dir = (script_dir / "..").resolve()
|
||||
api_dir = services_dir / "FictionArchive.API"
|
||||
|
||||
print(f"Script dir: {script_dir}")
|
||||
print(f"Services dir: {services_dir}")
|
||||
|
||||
skip_file = script_dir / "gateway_skip.txt"
|
||||
skip_list = load_skip_list(skip_file)
|
||||
|
||||
# ----------------------------------------
|
||||
# Find services
|
||||
# ----------------------------------------
|
||||
|
||||
print("\n----------------------------------------")
|
||||
print(" Finding GraphQL services...")
|
||||
print("----------------------------------------")
|
||||
|
||||
service_dirs = [
|
||||
d for d in services_dir.glob("FictionArchive.Service.*")
|
||||
if d.is_dir() and (d / "subgraph-config.json").exists()
|
||||
]
|
||||
|
||||
selected_services = []
|
||||
|
||||
for d in service_dirs:
|
||||
name = d.name
|
||||
if name in skip_list:
|
||||
print(f"Skipping: {name}")
|
||||
else:
|
||||
print(f"Found: {name}")
|
||||
selected_services.append(d)
|
||||
|
||||
if not selected_services:
|
||||
print("No services to process. Exiting.")
|
||||
sys.exit(0)
|
||||
|
||||
# ----------------------------------------
|
||||
# Export + pack
|
||||
# ----------------------------------------
|
||||
|
||||
print("\n----------------------------------------")
|
||||
print(" Exporting schemas & packing subgraphs...")
|
||||
print("----------------------------------------")
|
||||
|
||||
for svc in selected_services:
|
||||
name = svc.name
|
||||
print(f"\nProcessing {name}")
|
||||
|
||||
# Build once
|
||||
run(["dotnet", "build", "-c", "Release"], cwd=svc)
|
||||
|
||||
# Export schema
|
||||
run([
|
||||
"dotnet", "run",
|
||||
"--no-build",
|
||||
"--no-launch-profile",
|
||||
"--",
|
||||
"schema", "export",
|
||||
"--output", "schema.graphql"
|
||||
], cwd=svc)
|
||||
|
||||
# Pack subgraph
|
||||
run(["fusion", "subgraph", "pack"], cwd=svc)
|
||||
|
||||
# ----------------------------------------
|
||||
# Compose gateway
|
||||
# ----------------------------------------
|
||||
|
||||
print("\n----------------------------------------")
|
||||
print(" Running fusion compose...")
|
||||
print("----------------------------------------")
|
||||
|
||||
if not api_dir.exists():
|
||||
print(f"ERROR: FictionArchive.API not found at {api_dir}")
|
||||
sys.exit(1)
|
||||
|
||||
gateway_file = api_dir / "gateway.fgp"
|
||||
if gateway_file.exists():
|
||||
gateway_file.unlink()
|
||||
|
||||
for svc in selected_services:
|
||||
name = svc.name
|
||||
print(f"Composing: {name}")
|
||||
|
||||
run([
|
||||
"fusion", "compose",
|
||||
"-p", "gateway.fgp",
|
||||
"-s", f"..{os.sep}{name}"
|
||||
], cwd=api_dir)
|
||||
|
||||
print("\n----------------------------------------")
|
||||
print(" Fusion build complete!")
|
||||
print("----------------------------------------")
|
||||
@@ -1,104 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
###############################################
|
||||
# Resolve important directories
|
||||
###############################################
|
||||
|
||||
# Directory where this script lives
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Services live one directory above the script's directory
|
||||
SERVICES_DIR="$(cd "$ROOT/.." && pwd)"
|
||||
|
||||
###############################################
|
||||
# Skip list (folder names, match exactly)
|
||||
###############################################
|
||||
SKIP_PROJECTS=(
|
||||
"FictionArchive.Service.Shared"
|
||||
"FictionArchive.Service.Legacy"
|
||||
)
|
||||
|
||||
echo "----------------------------------------"
|
||||
echo " Finding GraphQL services..."
|
||||
echo "----------------------------------------"
|
||||
|
||||
SERVICE_LIST=()
|
||||
|
||||
# Convert skip projects into a single searchable string
|
||||
SKIP_STRING=" ${SKIP_PROJECTS[*]} "
|
||||
|
||||
# Find service directories
|
||||
shopt -s nullglob
|
||||
for FOLDER in "$SERVICES_DIR"/FictionArchive.Service.*; do
|
||||
[ -d "$FOLDER" ] || continue
|
||||
|
||||
PROJECT_NAME="$(basename "$FOLDER")"
|
||||
|
||||
# Skip entries that match the skip list
|
||||
if [[ "$SKIP_STRING" == *" $PROJECT_NAME "* ]]; then
|
||||
echo "Skipping service: $PROJECT_NAME"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Found service: $PROJECT_NAME"
|
||||
SERVICE_LIST+=("$FOLDER")
|
||||
done
|
||||
shopt -u nullglob
|
||||
|
||||
echo
|
||||
echo "----------------------------------------"
|
||||
echo " Exporting schemas and packing subgraphs..."
|
||||
echo "----------------------------------------"
|
||||
|
||||
for SERVICE in "${SERVICE_LIST[@]}"; do
|
||||
PROJECT_NAME="$(basename "$SERVICE")"
|
||||
|
||||
echo "Processing service: $PROJECT_NAME"
|
||||
pushd "$SERVICE" >/dev/null
|
||||
|
||||
echo "Building service..."
|
||||
dotnet build -c Release >/dev/null
|
||||
|
||||
# Automatically detect built DLL in bin/Release/<TFM>/
|
||||
DLL_PATH="$(find "bin/Release" -maxdepth 3 -name '*.dll' | head -n 1)"
|
||||
if [[ -z "$DLL_PATH" ]]; then
|
||||
echo "ERROR: Could not locate DLL for $PROJECT_NAME"
|
||||
popd >/dev/null
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Running schema export..."
|
||||
dotnet exec "$DLL_PATH" schema export --output schema.graphql
|
||||
|
||||
echo "Running subgraph pack..."
|
||||
fusion subgraph pack
|
||||
|
||||
popd >/dev/null
|
||||
echo "Completed: $PROJECT_NAME"
|
||||
echo
|
||||
done
|
||||
|
||||
echo "----------------------------------------"
|
||||
echo " Running fusion compose..."
|
||||
echo "----------------------------------------"
|
||||
|
||||
pushd "$ROOT" >/dev/null
|
||||
|
||||
# Remove old composition file
|
||||
rm -f gateway.fgp
|
||||
|
||||
for SERVICE in "${SERVICE_LIST[@]}"; do
|
||||
SERVICE_NAME="$(basename "$SERVICE")"
|
||||
|
||||
echo "Composing subgraph: $SERVICE_NAME"
|
||||
|
||||
# Note: Fusion compose must reference parent dir (services live above ROOT)
|
||||
fusion compose -p gateway.fgp -s "../$SERVICE_NAME"
|
||||
done
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
echo "----------------------------------------"
|
||||
echo " Fusion build complete!"
|
||||
echo "----------------------------------------"
|
||||
Binary file not shown.
5
FictionArchive.API/gateway_skip.txt
Normal file
5
FictionArchive.API/gateway_skip.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
# List of service folders to skip
|
||||
FictionArchive.Service.Shared
|
||||
FictionArchive.Service.AuthenticationService
|
||||
FictionArchive.Service.FileService
|
||||
FictionArchive.Service.NovelService.Tests
|
||||
8
FictionArchive.Common/Enums/RequestStatus.cs
Normal file
8
FictionArchive.Common/Enums/RequestStatus.cs
Normal file
@@ -0,0 +1,8 @@
|
||||
namespace FictionArchive.Common.Enums;
|
||||
|
||||
public enum RequestStatus
|
||||
{
|
||||
Failed = -1,
|
||||
Pending = 0,
|
||||
Success = 1
|
||||
}
|
||||
@@ -9,14 +9,8 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="9.0.0" />
|
||||
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Reference Include="Microsoft.Extensions.Configuration.Abstractions" />
|
||||
<Reference Include="Microsoft.Extensions.Hosting.Abstractions">
|
||||
<HintPath>..\..\..\..\..\..\Program Files\dotnet\shared\Microsoft.AspNetCore.App\8.0.15\Microsoft.Extensions.Hosting.Abstractions.dll</HintPath>
|
||||
</Reference>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
using System.Web;
|
||||
using Amazon.S3;
|
||||
using Amazon.S3.Model;
|
||||
using FictionArchive.Service.FileService.Models;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace FictionArchive.Service.FileService.Controllers
|
||||
{
|
||||
[Route("api/{*path}")]
|
||||
[ApiController]
|
||||
[Authorize]
|
||||
public class S3ProxyController : ControllerBase
|
||||
{
|
||||
private readonly AmazonS3Client _amazonS3Client;
|
||||
private readonly S3Configuration _s3Configuration;
|
||||
|
||||
public S3ProxyController(AmazonS3Client amazonS3Client, IOptions<S3Configuration> s3Configuration)
|
||||
{
|
||||
_amazonS3Client = amazonS3Client;
|
||||
_s3Configuration = s3Configuration.Value;
|
||||
}
|
||||
|
||||
[HttpGet]
|
||||
public async Task<IActionResult> Get(string path)
|
||||
{
|
||||
var decodedPath = HttpUtility.UrlDecode(path);
|
||||
|
||||
try
|
||||
{
|
||||
var s3Response = await _amazonS3Client.GetObjectAsync(new GetObjectRequest()
|
||||
{
|
||||
BucketName = _s3Configuration.Bucket,
|
||||
Key = decodedPath
|
||||
});
|
||||
|
||||
return new FileStreamResult(s3Response.ResponseStream, s3Response.Headers.ContentType);
|
||||
}
|
||||
catch (AmazonS3Exception e)
|
||||
{
|
||||
if (e.Message == "Key not found")
|
||||
{
|
||||
return NotFound();
|
||||
}
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
23
FictionArchive.Service.FileService/Dockerfile
Normal file
23
FictionArchive.Service.FileService/Dockerfile
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
|
||||
USER $APP_UID
|
||||
WORKDIR /app
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["FictionArchive.Service.FileService/FictionArchive.Service.FileService.csproj", "FictionArchive.Service.FileService/"]
|
||||
RUN dotnet restore "FictionArchive.Service.FileService/FictionArchive.Service.FileService.csproj"
|
||||
COPY . .
|
||||
WORKDIR "/src/FictionArchive.Service.FileService"
|
||||
RUN dotnet build "./FictionArchive.Service.FileService.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||
|
||||
FROM build AS publish
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
RUN dotnet publish "./FictionArchive.Service.FileService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
||||
|
||||
FROM base AS final
|
||||
WORKDIR /app
|
||||
COPY --from=publish /app/publish .
|
||||
ENTRYPOINT ["dotnet", "FictionArchive.Service.FileService.dll"]
|
||||
@@ -0,0 +1,31 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="..\.dockerignore">
|
||||
<Link>.dockerignore</Link>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\FictionArchive.Service.Shared\FictionArchive.Service.Shared.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="AWSSDK.S3" Version="4.0.13.1" />
|
||||
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="9.0.0" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="10.0.1" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.11" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Folder Include="Controllers\" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,10 @@
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
|
||||
namespace FictionArchive.Service.FileService.Models.IntegrationEvents;
|
||||
|
||||
public class FileUploadRequestCreatedEvent : IIntegrationEvent
|
||||
{
|
||||
public Guid RequestId { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public byte[] FileData { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
using FictionArchive.Common.Enums;
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
|
||||
namespace FictionArchive.Service.FileService.Models.IntegrationEvents;
|
||||
|
||||
public class FileUploadRequestStatusUpdateEvent : IIntegrationEvent
|
||||
{
|
||||
public Guid RequestId { get; set; }
|
||||
public RequestStatus Status { get; set; }
|
||||
|
||||
#region Success
|
||||
|
||||
public string? FileAccessUrl { get; set; }
|
||||
|
||||
#endregion
|
||||
|
||||
#region Failure
|
||||
|
||||
public string? ErrorMessage { get; set; }
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace FictionArchive.Service.FileService.Models;
|
||||
|
||||
public class ProxyConfiguration
|
||||
{
|
||||
public string BaseUrl { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
namespace FictionArchive.Service.FileService.Models;
|
||||
|
||||
public class S3Configuration
|
||||
{
|
||||
public string Url { get; set; }
|
||||
public string Bucket { get; set; }
|
||||
public string AccessKey { get; set; }
|
||||
public string SecretKey { get; set; }
|
||||
}
|
||||
76
FictionArchive.Service.FileService/Program.cs
Normal file
76
FictionArchive.Service.FileService/Program.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
using Amazon.Runtime;
|
||||
using Amazon.S3;
|
||||
using FictionArchive.Common.Extensions;
|
||||
using FictionArchive.Service.FileService.Models;
|
||||
using FictionArchive.Service.FileService.Models.IntegrationEvents;
|
||||
using FictionArchive.Service.FileService.Services.EventHandlers;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace FictionArchive.Service.FileService;
|
||||
|
||||
public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
builder.AddLocalAppsettings();
|
||||
|
||||
builder.Services.AddControllers();
|
||||
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
|
||||
builder.Services.AddEndpointsApiExplorer();
|
||||
builder.Services.AddSwaggerGen();
|
||||
|
||||
builder.Services.AddHealthChecks();
|
||||
|
||||
#region Event Bus
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<FileUploadRequestCreatedEvent, FileUploadRequestCreatedEventHandler>();
|
||||
|
||||
#endregion
|
||||
|
||||
// Add authentication with cookie support
|
||||
builder.Services.AddOidcCookieAuthentication(builder.Configuration);
|
||||
builder.Services.AddFictionArchiveAuthorization();
|
||||
|
||||
builder.Services.Configure<ProxyConfiguration>(builder.Configuration.GetSection("ProxyConfiguration"));
|
||||
|
||||
// Add S3 Client
|
||||
builder.Services.Configure<S3Configuration>(builder.Configuration.GetSection("S3"));
|
||||
builder.Services.AddSingleton<AmazonS3Client>(provider =>
|
||||
{
|
||||
var config = provider.GetRequiredService<IOptions<S3Configuration>>().Value;
|
||||
var s3Config = new AmazonS3Config
|
||||
{
|
||||
ServiceURL = config.Url, // Garage endpoint
|
||||
ForcePathStyle = true, // REQUIRED for Garage
|
||||
AuthenticationRegion = "garage"
|
||||
};
|
||||
return new AmazonS3Client(
|
||||
new BasicAWSCredentials(config.AccessKey, config.SecretKey),
|
||||
s3Config);
|
||||
});
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
if (app.Environment.IsDevelopment())
|
||||
{
|
||||
app.UseSwagger();
|
||||
app.UseSwaggerUI();
|
||||
}
|
||||
|
||||
app.UseAuthentication();
|
||||
app.UseAuthorization();
|
||||
|
||||
app.MapHealthChecks("/healthz");
|
||||
|
||||
app.MapControllers();
|
||||
|
||||
app.Run();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"$schema": "http://json.schemastore.org/launchsettings.json",
|
||||
"iisSettings": {
|
||||
"windowsAuthentication": false,
|
||||
"anonymousAuthentication": true,
|
||||
"iisExpress": {
|
||||
"applicationUrl": "http://localhost:5546",
|
||||
"sslPort": 44373
|
||||
}
|
||||
},
|
||||
"profiles": {
|
||||
"http": {
|
||||
"commandName": "Project",
|
||||
"dotnetRunMessages": true,
|
||||
"launchBrowser": true,
|
||||
"applicationUrl": "http://localhost:5057",
|
||||
"environmentVariables": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development"
|
||||
}
|
||||
},
|
||||
"https": {
|
||||
"commandName": "Project",
|
||||
"dotnetRunMessages": true,
|
||||
"launchBrowser": true,
|
||||
"launchUrl": "swagger",
|
||||
"applicationUrl": "https://localhost:7247;http://localhost:5057",
|
||||
"environmentVariables": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development"
|
||||
}
|
||||
},
|
||||
"IIS Express": {
|
||||
"commandName": "IISExpress",
|
||||
"launchBrowser": true,
|
||||
"environmentVariables": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
using Amazon.S3;
|
||||
using Amazon.S3.Model;
|
||||
using FictionArchive.Common.Enums;
|
||||
using FictionArchive.Service.FileService.Models;
|
||||
using FictionArchive.Service.FileService.Models.IntegrationEvents;
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace FictionArchive.Service.FileService.Services.EventHandlers;
|
||||
|
||||
public class FileUploadRequestCreatedEventHandler : IIntegrationEventHandler<FileUploadRequestCreatedEvent>
|
||||
{
|
||||
private readonly ILogger<FileUploadRequestCreatedEventHandler> _logger;
|
||||
private readonly AmazonS3Client _amazonS3Client;
|
||||
private readonly IEventBus _eventBus;
|
||||
private readonly S3Configuration _s3Configuration;
|
||||
private readonly ProxyConfiguration _proxyConfiguration;
|
||||
|
||||
public FileUploadRequestCreatedEventHandler(ILogger<FileUploadRequestCreatedEventHandler> logger, AmazonS3Client amazonS3Client, IEventBus eventBus, IOptions<S3Configuration> s3Configuration, IOptions<ProxyConfiguration> proxyConfiguration)
|
||||
{
|
||||
_logger = logger;
|
||||
_amazonS3Client = amazonS3Client;
|
||||
_eventBus = eventBus;
|
||||
_proxyConfiguration = proxyConfiguration.Value;
|
||||
_s3Configuration = s3Configuration.Value;
|
||||
}
|
||||
|
||||
public async Task Handle(FileUploadRequestCreatedEvent @event)
|
||||
{
|
||||
var putObjectRequest = new PutObjectRequest();
|
||||
putObjectRequest.BucketName = _s3Configuration.Bucket;
|
||||
putObjectRequest.Key = @event.FilePath;
|
||||
putObjectRequest.UseChunkEncoding = false; // Needed to avoid an error with Garage
|
||||
|
||||
using MemoryStream memoryStream = new MemoryStream(@event.FileData);
|
||||
putObjectRequest.InputStream = memoryStream;
|
||||
|
||||
var s3Response = await _amazonS3Client.PutObjectAsync(putObjectRequest);
|
||||
if (s3Response.HttpStatusCode != System.Net.HttpStatusCode.OK)
|
||||
{
|
||||
_logger.LogError("An error occurred while uploading file to S3. Response code: {responsecode}", s3Response.HttpStatusCode);
|
||||
await _eventBus.Publish(new FileUploadRequestStatusUpdateEvent()
|
||||
{
|
||||
RequestId = @event.RequestId,
|
||||
Status = RequestStatus.Failed,
|
||||
ErrorMessage = "An error occurred while uploading file to S3."
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await _eventBus.Publish(new FileUploadRequestStatusUpdateEvent()
|
||||
{
|
||||
Status = RequestStatus.Success,
|
||||
RequestId = @event.RequestId,
|
||||
FileAccessUrl = _proxyConfiguration.BaseUrl + "/" + @event.FilePath
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information",
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
}
|
||||
}
|
||||
32
FictionArchive.Service.FileService/appsettings.json
Normal file
32
FictionArchive.Service.FileService/appsettings.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information",
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
},
|
||||
"ProxyConfiguration": {
|
||||
"BaseUrl": "https://localhost:7247/api"
|
||||
},
|
||||
"RabbitMQ": {
|
||||
"ConnectionString": "amqp://localhost",
|
||||
"ClientIdentifier": "FileService"
|
||||
},
|
||||
"S3": {
|
||||
"Url": "https://s3.orfl.xyz",
|
||||
"Bucket": "fictionarchive",
|
||||
"AccessKey": "REPLACE_ME",
|
||||
"SecretKey": "REPLACE_ME"
|
||||
},
|
||||
"OIDC": {
|
||||
"Authority": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ClientId": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"Audience": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"ValidIssuer": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ValidateIssuer": true,
|
||||
"ValidateAudience": true,
|
||||
"ValidateLifetime": true,
|
||||
"ValidateIssuerSigningKey": true
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.2">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\\FictionArchive.Service.NovelService\\FictionArchive.Service.NovelService.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,165 @@
|
||||
using FictionArchive.Common.Enums;
|
||||
using FictionArchive.Service.FileService.IntegrationEvents;
|
||||
using FictionArchive.Service.NovelService.Models.Configuration;
|
||||
using FictionArchive.Service.NovelService.Models.Enums;
|
||||
using FictionArchive.Service.NovelService.Models.Images;
|
||||
using FictionArchive.Service.NovelService.Models.Localization;
|
||||
using FictionArchive.Service.NovelService.Models.Novels;
|
||||
using FictionArchive.Service.NovelService.Models.SourceAdapters;
|
||||
using FictionArchive.Service.NovelService.Services;
|
||||
using FictionArchive.Service.NovelService.Services.SourceAdapters;
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
using FluentAssertions;
|
||||
using HtmlAgilityPack;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace FictionArchive.Service.NovelService.Tests;
|
||||
|
||||
public class NovelUpdateServiceTests
|
||||
{
|
||||
private static NovelServiceDbContext CreateDbContext()
|
||||
{
|
||||
var options = new DbContextOptionsBuilder<NovelServiceDbContext>()
|
||||
.UseInMemoryDatabase($"NovelUpdateServiceTests-{Guid.NewGuid()}")
|
||||
.Options;
|
||||
|
||||
return new NovelServiceDbContext(options, NullLogger<NovelServiceDbContext>.Instance);
|
||||
}
|
||||
|
||||
private static NovelCreateResult CreateNovelWithSingleChapter(NovelServiceDbContext dbContext, Source source)
|
||||
{
|
||||
var chapter = new Chapter
|
||||
{
|
||||
Order = 1,
|
||||
Revision = 1,
|
||||
Url = "http://demo/chapter-1",
|
||||
Name = LocalizationKey.CreateFromText("Chapter 1", Language.En),
|
||||
Body = new LocalizationKey { Texts = new List<LocalizationText>() },
|
||||
Images = new List<Image>()
|
||||
};
|
||||
|
||||
var novel = new Novel
|
||||
{
|
||||
Url = "http://demo/novel",
|
||||
ExternalId = "demo-1",
|
||||
Author = new Person { Name = LocalizationKey.CreateFromText("Author", Language.En) },
|
||||
RawLanguage = Language.En,
|
||||
RawStatus = NovelStatus.InProgress,
|
||||
Source = source,
|
||||
Name = LocalizationKey.CreateFromText("Demo Novel", Language.En),
|
||||
Description = LocalizationKey.CreateFromText("Description", Language.En),
|
||||
Chapters = new List<Chapter> { chapter },
|
||||
Tags = new List<NovelTag>()
|
||||
};
|
||||
|
||||
dbContext.Novels.Add(novel);
|
||||
dbContext.SaveChanges();
|
||||
|
||||
return new NovelCreateResult(novel, chapter);
|
||||
}
|
||||
|
||||
private static NovelUpdateService CreateService(
|
||||
NovelServiceDbContext dbContext,
|
||||
ISourceAdapter adapter,
|
||||
IEventBus eventBus,
|
||||
string pendingImageUrl = "https://pending/placeholder.jpg")
|
||||
{
|
||||
var options = Options.Create(new NovelUpdateServiceConfiguration
|
||||
{
|
||||
PendingImageUrl = pendingImageUrl
|
||||
});
|
||||
|
||||
return new NovelUpdateService(dbContext, NullLogger<NovelUpdateService>.Instance, new[] { adapter }, eventBus, options);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PullChapterContents_rewrites_images_and_publishes_requests()
|
||||
{
|
||||
using var dbContext = CreateDbContext();
|
||||
var source = new Source { Name = "Demo", Key = "demo", Url = "http://demo" };
|
||||
var (novel, chapter) = CreateNovelWithSingleChapter(dbContext, source);
|
||||
|
||||
var rawHtml = "<p>Hello</p><img src=\"http://img/x1.jpg\" alt=\"first\" /><img src=\"http://img/x2.jpg\" alt=\"second\" />";
|
||||
var image1 = new ImageData { Url = "http://img/x1.jpg", Data = new byte[] { 1, 2, 3 } };
|
||||
var image2 = new ImageData { Url = "http://img/x2.jpg", Data = new byte[] { 4, 5, 6 } };
|
||||
|
||||
var adapter = Substitute.For<ISourceAdapter>();
|
||||
adapter.SourceDescriptor.Returns(new SourceDescriptor { Key = "demo", Name = "Demo", Url = "http://demo" });
|
||||
adapter.GetRawChapter(chapter.Url).Returns(Task.FromResult(new ChapterFetchResult
|
||||
{
|
||||
Text = rawHtml,
|
||||
ImageData = new List<ImageData> { image1, image2 }
|
||||
}));
|
||||
|
||||
var publishedEvents = new List<FileUploadRequestCreatedEvent>();
|
||||
var eventBus = Substitute.For<IEventBus>();
|
||||
eventBus.Publish(Arg.Do<FileUploadRequestCreatedEvent>(publishedEvents.Add)).Returns(Task.CompletedTask);
|
||||
eventBus.Publish(Arg.Any<object>(), Arg.Any<string>()).Returns(Task.CompletedTask);
|
||||
|
||||
var pendingImageUrl = "https://pending/placeholder.jpg";
|
||||
var service = CreateService(dbContext, adapter, eventBus, pendingImageUrl);
|
||||
|
||||
var updatedChapter = await service.PullChapterContents(novel.Id, chapter.Order);
|
||||
|
||||
updatedChapter.Images.Should().HaveCount(2);
|
||||
updatedChapter.Images.Select(i => i.OriginalPath).Should().BeEquivalentTo(new[] { image1.Url, image2.Url });
|
||||
updatedChapter.Images.All(i => i.Id != Guid.Empty).Should().BeTrue();
|
||||
|
||||
var storedHtml = updatedChapter.Body.Texts.Single().Text;
|
||||
var doc = new HtmlDocument();
|
||||
doc.LoadHtml(storedHtml);
|
||||
var imgNodes = doc.DocumentNode.SelectNodes("//img");
|
||||
imgNodes.Should().NotBeNull();
|
||||
imgNodes!.Count.Should().Be(2);
|
||||
imgNodes.Should().OnlyContain(node => node.GetAttributeValue("src", string.Empty) == pendingImageUrl);
|
||||
imgNodes.Select(node => node.GetAttributeValue("alt", string.Empty))
|
||||
.Should()
|
||||
.BeEquivalentTo(updatedChapter.Images.Select(img => img.Id.ToString()));
|
||||
|
||||
publishedEvents.Should().HaveCount(2);
|
||||
publishedEvents.Select(e => e.RequestId).Should().BeEquivalentTo(updatedChapter.Images.Select(i => i.Id));
|
||||
publishedEvents.Select(e => e.FileData).Should().BeEquivalentTo(new[] { image1.Data, image2.Data });
|
||||
publishedEvents.Should().OnlyContain(e => e.FilePath.StartsWith($"{novel.Id}/Images/Chapter-{updatedChapter.Id}/"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PullChapterContents_adds_alt_when_missing()
|
||||
{
|
||||
using var dbContext = CreateDbContext();
|
||||
var source = new Source { Name = "Demo", Key = "demo", Url = "http://demo" };
|
||||
var (novel, chapter) = CreateNovelWithSingleChapter(dbContext, source);
|
||||
|
||||
var rawHtml = "<p>Hi</p><img src=\"http://img/x1.jpg\">";
|
||||
var image = new ImageData { Url = "http://img/x1.jpg", Data = new byte[] { 7, 8, 9 } };
|
||||
|
||||
var adapter = Substitute.For<ISourceAdapter>();
|
||||
adapter.SourceDescriptor.Returns(new SourceDescriptor { Key = "demo", Name = "Demo", Url = "http://demo" });
|
||||
adapter.GetRawChapter(chapter.Url).Returns(Task.FromResult(new ChapterFetchResult
|
||||
{
|
||||
Text = rawHtml,
|
||||
ImageData = new List<ImageData> { image }
|
||||
}));
|
||||
|
||||
var eventBus = Substitute.For<IEventBus>();
|
||||
eventBus.Publish(Arg.Any<FileUploadRequestCreatedEvent>()).Returns(Task.CompletedTask);
|
||||
eventBus.Publish(Arg.Any<object>(), Arg.Any<string>()).Returns(Task.CompletedTask);
|
||||
|
||||
var service = CreateService(dbContext, adapter, eventBus);
|
||||
|
||||
var updatedChapter = await service.PullChapterContents(novel.Id, chapter.Order);
|
||||
|
||||
var storedHtml = updatedChapter.Body.Texts.Single().Text;
|
||||
var doc = new HtmlDocument();
|
||||
doc.LoadHtml(storedHtml);
|
||||
var imgNode = doc.DocumentNode.SelectSingleNode("//img");
|
||||
imgNode.Should().NotBeNull();
|
||||
imgNode!.GetAttributeValue("alt", string.Empty).Should().Be(updatedChapter.Images.Single().Id.ToString());
|
||||
imgNode.GetAttributeValue("src", string.Empty).Should().Be("https://pending/placeholder.jpg");
|
||||
}
|
||||
|
||||
private record NovelCreateResult(Novel Novel, Chapter Chapter);
|
||||
}
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="HotChocolate.AspNetCore.CommandLine" Version="15.1.11" />
|
||||
<PackageReference Include="HtmlAgilityPack" Version="1.12.4" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.11">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
|
||||
@@ -6,32 +6,24 @@ using FictionArchive.Service.NovelService.Models.SourceAdapters;
|
||||
using FictionArchive.Service.NovelService.Services;
|
||||
using FictionArchive.Service.NovelService.Services.SourceAdapters;
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
using HotChocolate.Authorization;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace FictionArchive.Service.NovelService.GraphQL;
|
||||
|
||||
public class Mutation
|
||||
{
|
||||
public async Task<NovelUpdateRequestedEvent> ImportNovel(string novelUrl, IEventBus eventBus)
|
||||
[Authorize]
|
||||
public async Task<NovelUpdateRequestedEvent> ImportNovel(string novelUrl, NovelUpdateService service)
|
||||
{
|
||||
var importNovelRequestEvent = new NovelUpdateRequestedEvent()
|
||||
{
|
||||
NovelUrl = novelUrl
|
||||
};
|
||||
await eventBus.Publish(importNovelRequestEvent);
|
||||
return importNovelRequestEvent;
|
||||
return await service.QueueNovelImport(novelUrl);
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
public async Task<ChapterPullRequestedEvent> FetchChapterContents(uint novelId,
|
||||
uint chapterNumber,
|
||||
IEventBus eventBus)
|
||||
NovelUpdateService service)
|
||||
{
|
||||
var chapterPullEvent = new ChapterPullRequestedEvent()
|
||||
{
|
||||
NovelId = novelId,
|
||||
ChapterNumber = chapterNumber
|
||||
};
|
||||
await eventBus.Publish(chapterPullEvent);
|
||||
return chapterPullEvent;
|
||||
return await service.QueueChapterPull(novelId, chapterNumber);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using FictionArchive.Service.NovelService.Models.Novels;
|
||||
using FictionArchive.Service.NovelService.Services;
|
||||
using HotChocolate.Authorization;
|
||||
using HotChocolate.Data;
|
||||
using HotChocolate.Types;
|
||||
|
||||
@@ -7,6 +8,7 @@ namespace FictionArchive.Service.NovelService.GraphQL;
|
||||
|
||||
public class Query
|
||||
{
|
||||
[Authorize]
|
||||
[UsePaging]
|
||||
[UseProjection]
|
||||
[UseFiltering]
|
||||
|
||||
540
FictionArchive.Service.NovelService/Migrations/20251123203953_AddImages.Designer.cs
generated
Normal file
540
FictionArchive.Service.NovelService/Migrations/20251123203953_AddImages.Designer.cs
generated
Normal file
@@ -0,0 +1,540 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using FictionArchive.Service.NovelService.Services;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using NodaTime;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace FictionArchive.Service.NovelService.Migrations
|
||||
{
|
||||
[DbContext(typeof(NovelServiceDbContext))]
|
||||
[Migration("20251123203953_AddImages")]
|
||||
partial class AddImages
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.11")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<long?>("ChapterId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<string>("NewPath")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("OriginalPath")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("ChapterId");
|
||||
|
||||
b.ToTable("Images");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.ToTable("LocalizationKeys");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationRequest", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<long>("EngineId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<Guid>("KeyRequestedForTranslationId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<int>("TranslateTo")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("EngineId");
|
||||
|
||||
b.HasIndex("KeyRequestedForTranslationId");
|
||||
|
||||
b.ToTable("LocalizationRequests");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationText", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<int>("Language")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Guid?>("LocalizationKeyId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<string>("Text")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<long?>("TranslationEngineId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("LocalizationKeyId");
|
||||
|
||||
b.HasIndex("TranslationEngineId");
|
||||
|
||||
b.ToTable("LocalizationText");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
|
||||
{
|
||||
b.Property<long>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("bigint");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||
|
||||
b.Property<Guid>("BodyId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Guid>("NameId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<long?>("NovelId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<long>("Order")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<long>("Revision")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<string>("Url")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("BodyId");
|
||||
|
||||
b.HasIndex("NameId");
|
||||
|
||||
b.HasIndex("NovelId");
|
||||
|
||||
b.ToTable("Chapter");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
|
||||
{
|
||||
b.Property<long>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("bigint");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||
|
||||
b.Property<long>("AuthorId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<Guid?>("CoverImageId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Guid>("DescriptionId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<string>("ExternalId")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Guid>("NameId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<int>("RawLanguage")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("RawStatus")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<long>("SourceId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<int?>("StatusOverride")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<string>("Url")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("AuthorId");
|
||||
|
||||
b.HasIndex("CoverImageId");
|
||||
|
||||
b.HasIndex("DescriptionId");
|
||||
|
||||
b.HasIndex("NameId");
|
||||
|
||||
b.HasIndex("SourceId");
|
||||
|
||||
b.ToTable("Novels");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.NovelTag", b =>
|
||||
{
|
||||
b.Property<long>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("bigint");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Guid>("DisplayNameId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<string>("Key")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<long?>("SourceId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<int>("TagType")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("DisplayNameId");
|
||||
|
||||
b.HasIndex("SourceId");
|
||||
|
||||
b.ToTable("Tags");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Person", b =>
|
||||
{
|
||||
b.Property<long>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("bigint");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<string>("ExternalUrl")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Guid>("NameId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("NameId");
|
||||
|
||||
b.ToTable("Person");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Source", b =>
|
||||
{
|
||||
b.Property<long>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("bigint");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<string>("Key")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Url")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.ToTable("Sources");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", b =>
|
||||
{
|
||||
b.Property<long>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("bigint");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<string>("Key")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.ToTable("TranslationEngines");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("NovelNovelTag", b =>
|
||||
{
|
||||
b.Property<long>("NovelsId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<long>("TagsId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.HasKey("NovelsId", "TagsId");
|
||||
|
||||
b.HasIndex("TagsId");
|
||||
|
||||
b.ToTable("NovelNovelTag");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Chapter", "Chapter")
|
||||
.WithMany("Images")
|
||||
.HasForeignKey("ChapterId");
|
||||
|
||||
b.Navigation("Chapter");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationRequest", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", "Engine")
|
||||
.WithMany()
|
||||
.HasForeignKey("EngineId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "KeyRequestedForTranslation")
|
||||
.WithMany()
|
||||
.HasForeignKey("KeyRequestedForTranslationId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Engine");
|
||||
|
||||
b.Navigation("KeyRequestedForTranslation");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationText", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", null)
|
||||
.WithMany("Texts")
|
||||
.HasForeignKey("LocalizationKeyId");
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", "TranslationEngine")
|
||||
.WithMany()
|
||||
.HasForeignKey("TranslationEngineId");
|
||||
|
||||
b.Navigation("TranslationEngine");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Body")
|
||||
.WithMany()
|
||||
.HasForeignKey("BodyId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
|
||||
.WithMany()
|
||||
.HasForeignKey("NameId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", null)
|
||||
.WithMany("Chapters")
|
||||
.HasForeignKey("NovelId");
|
||||
|
||||
b.Navigation("Body");
|
||||
|
||||
b.Navigation("Name");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Person", "Author")
|
||||
.WithMany()
|
||||
.HasForeignKey("AuthorId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Images.Image", "CoverImage")
|
||||
.WithMany()
|
||||
.HasForeignKey("CoverImageId");
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Description")
|
||||
.WithMany()
|
||||
.HasForeignKey("DescriptionId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
|
||||
.WithMany()
|
||||
.HasForeignKey("NameId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Source", "Source")
|
||||
.WithMany()
|
||||
.HasForeignKey("SourceId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Author");
|
||||
|
||||
b.Navigation("CoverImage");
|
||||
|
||||
b.Navigation("Description");
|
||||
|
||||
b.Navigation("Name");
|
||||
|
||||
b.Navigation("Source");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.NovelTag", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "DisplayName")
|
||||
.WithMany()
|
||||
.HasForeignKey("DisplayNameId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Source", "Source")
|
||||
.WithMany()
|
||||
.HasForeignKey("SourceId");
|
||||
|
||||
b.Navigation("DisplayName");
|
||||
|
||||
b.Navigation("Source");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Person", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
|
||||
.WithMany()
|
||||
.HasForeignKey("NameId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Name");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("NovelNovelTag", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", null)
|
||||
.WithMany()
|
||||
.HasForeignKey("NovelsId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.NovelTag", null)
|
||||
.WithMany()
|
||||
.HasForeignKey("TagsId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", b =>
|
||||
{
|
||||
b.Navigation("Texts");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
|
||||
{
|
||||
b.Navigation("Images");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
|
||||
{
|
||||
b.Navigation("Chapters");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
using System;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using NodaTime;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace FictionArchive.Service.NovelService.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddImages : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<Guid>(
|
||||
name: "CoverImageId",
|
||||
table: "Novels",
|
||||
type: "uuid",
|
||||
nullable: true);
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "Images",
|
||||
columns: table => new
|
||||
{
|
||||
Id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
OriginalPath = table.Column<string>(type: "text", nullable: false),
|
||||
NewPath = table.Column<string>(type: "text", nullable: true),
|
||||
ChapterId = table.Column<long>(type: "bigint", nullable: true),
|
||||
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_Images", x => x.Id);
|
||||
table.ForeignKey(
|
||||
name: "FK_Images_Chapter_ChapterId",
|
||||
column: x => x.ChapterId,
|
||||
principalTable: "Chapter",
|
||||
principalColumn: "Id");
|
||||
});
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_Novels_CoverImageId",
|
||||
table: "Novels",
|
||||
column: "CoverImageId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_Images_ChapterId",
|
||||
table: "Images",
|
||||
column: "ChapterId");
|
||||
|
||||
migrationBuilder.AddForeignKey(
|
||||
name: "FK_Novels_Images_CoverImageId",
|
||||
table: "Novels",
|
||||
column: "CoverImageId",
|
||||
principalTable: "Images",
|
||||
principalColumn: "Id");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropForeignKey(
|
||||
name: "FK_Novels_Images_CoverImageId",
|
||||
table: "Novels");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "Images");
|
||||
|
||||
migrationBuilder.DropIndex(
|
||||
name: "IX_Novels_CoverImageId",
|
||||
table: "Novels");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "CoverImageId",
|
||||
table: "Novels");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,6 +23,35 @@ namespace FictionArchive.Service.NovelService.Migrations
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<long?>("ChapterId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<Instant>("LastUpdatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
b.Property<string>("NewPath")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("OriginalPath")
|
||||
.IsRequired()
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("ChapterId");
|
||||
|
||||
b.ToTable("Images");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
@@ -158,6 +187,9 @@ namespace FictionArchive.Service.NovelService.Migrations
|
||||
b.Property<long>("AuthorId")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b.Property<Guid?>("CoverImageId")
|
||||
.HasColumnType("uuid");
|
||||
|
||||
b.Property<Instant>("CreatedTime")
|
||||
.HasColumnType("timestamp with time zone");
|
||||
|
||||
@@ -194,6 +226,8 @@ namespace FictionArchive.Service.NovelService.Migrations
|
||||
|
||||
b.HasIndex("AuthorId");
|
||||
|
||||
b.HasIndex("CoverImageId");
|
||||
|
||||
b.HasIndex("DescriptionId");
|
||||
|
||||
b.HasIndex("NameId");
|
||||
@@ -335,6 +369,15 @@ namespace FictionArchive.Service.NovelService.Migrations
|
||||
b.ToTable("NovelNovelTag");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Chapter", "Chapter")
|
||||
.WithMany("Images")
|
||||
.HasForeignKey("ChapterId");
|
||||
|
||||
b.Navigation("Chapter");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationRequest", b =>
|
||||
{
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", "Engine")
|
||||
@@ -398,6 +441,10 @@ namespace FictionArchive.Service.NovelService.Migrations
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Images.Image", "CoverImage")
|
||||
.WithMany()
|
||||
.HasForeignKey("CoverImageId");
|
||||
|
||||
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Description")
|
||||
.WithMany()
|
||||
.HasForeignKey("DescriptionId")
|
||||
@@ -418,6 +465,8 @@ namespace FictionArchive.Service.NovelService.Migrations
|
||||
|
||||
b.Navigation("Author");
|
||||
|
||||
b.Navigation("CoverImage");
|
||||
|
||||
b.Navigation("Description");
|
||||
|
||||
b.Navigation("Name");
|
||||
@@ -473,6 +522,11 @@ namespace FictionArchive.Service.NovelService.Migrations
|
||||
b.Navigation("Texts");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
|
||||
{
|
||||
b.Navigation("Images");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
|
||||
{
|
||||
b.Navigation("Chapters");
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace FictionArchive.Service.NovelService.Models.Configuration;
|
||||
|
||||
public class NovelUpdateServiceConfiguration
|
||||
{
|
||||
public string PendingImageUrl { get; set; }
|
||||
}
|
||||
13
FictionArchive.Service.NovelService/Models/Images/Image.cs
Normal file
13
FictionArchive.Service.NovelService/Models/Images/Image.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
using FictionArchive.Service.NovelService.Models.Novels;
|
||||
using FictionArchive.Service.Shared.Models;
|
||||
|
||||
namespace FictionArchive.Service.NovelService.Models.Images;
|
||||
|
||||
public class Image : BaseEntity<Guid>
|
||||
{
|
||||
public string OriginalPath { get; set; }
|
||||
public string? NewPath { get; set; }
|
||||
|
||||
// Chapter link. Even if an image appears in another chapter, we should rehost it separately.
|
||||
public Chapter? Chapter { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
|
||||
namespace FictionArchive.Service.FileService.IntegrationEvents;
|
||||
|
||||
public class FileUploadRequestCreatedEvent : IIntegrationEvent
|
||||
{
|
||||
public Guid RequestId { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public byte[] FileData { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
using FictionArchive.Common.Enums;
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
|
||||
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||
|
||||
public class FileUploadRequestStatusUpdateEvent : IIntegrationEvent
|
||||
{
|
||||
public Guid RequestId { get; set; }
|
||||
public RequestStatus Status { get; set; }
|
||||
|
||||
#region Success
|
||||
|
||||
public string? FileAccessUrl { get; set; }
|
||||
|
||||
#endregion
|
||||
|
||||
#region Failure
|
||||
|
||||
public string? ErrorMessage { get; set; }
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
using FictionArchive.Service.NovelService.Models.Images;
|
||||
using FictionArchive.Service.NovelService.Models.Localization;
|
||||
using FictionArchive.Service.Shared.Models;
|
||||
|
||||
@@ -11,4 +12,7 @@ public class Chapter : BaseEntity<uint>
|
||||
|
||||
public LocalizationKey Name { get; set; }
|
||||
public LocalizationKey Body { get; set; }
|
||||
|
||||
// Images appearing in this chapter.
|
||||
public List<Image> Images { get; set; }
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
using FictionArchive.Common.Enums;
|
||||
using FictionArchive.Service.NovelService.Models.Images;
|
||||
using FictionArchive.Service.NovelService.Models.Localization;
|
||||
using FictionArchive.Service.Shared.Models;
|
||||
using NovelStatus = FictionArchive.Service.NovelService.Models.Enums.NovelStatus;
|
||||
@@ -22,4 +23,5 @@ public class Novel : BaseEntity<uint>
|
||||
|
||||
public List<Chapter> Chapters { get; set; }
|
||||
public List<NovelTag> Tags { get; set; }
|
||||
public Image? CoverImage { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
namespace FictionArchive.Service.NovelService.Models.SourceAdapters;
|
||||
|
||||
public class ChapterFetchResult
|
||||
{
|
||||
public string Text { get; set; }
|
||||
public List<ImageData> ImageData { get; set; }
|
||||
}
|
||||
@@ -6,5 +6,5 @@ public class ChapterMetadata
|
||||
public uint Order { get; set; }
|
||||
public string? Url { get; set; }
|
||||
public string Name { get; set; }
|
||||
|
||||
public List<string> ImageUrls { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
namespace FictionArchive.Service.NovelService.Models.SourceAdapters;
|
||||
|
||||
public class ImageData
|
||||
{
|
||||
public string Url { get; set; }
|
||||
public byte[] Data { get; set; }
|
||||
}
|
||||
@@ -11,6 +11,7 @@ public class NovelMetadata
|
||||
public string AuthorUrl { get; set; }
|
||||
public string Url { get; set; }
|
||||
public string ExternalId { get; set; }
|
||||
public ImageData? CoverImage { get; set; }
|
||||
|
||||
public Language RawLanguage { get; set; }
|
||||
public NovelStatus RawStatus { get; set; }
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
using FictionArchive.Common.Extensions;
|
||||
using FictionArchive.Service.NovelService.GraphQL;
|
||||
using FictionArchive.Service.NovelService.Models.Configuration;
|
||||
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||
using FictionArchive.Service.NovelService.Services;
|
||||
using FictionArchive.Service.NovelService.Services.EventHandlers;
|
||||
using FictionArchive.Service.NovelService.Services.SourceAdapters;
|
||||
using FictionArchive.Service.NovelService.Services.SourceAdapters.Novelpia;
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using FictionArchive.Service.Shared.Services.GraphQL;
|
||||
@@ -15,31 +18,41 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
builder.AddLocalAppsettings();
|
||||
|
||||
builder.Services.AddMemoryCache();
|
||||
|
||||
#region Event Bus
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<TranslationRequestCompletedEvent, TranslationRequestCompletedEventHandler>()
|
||||
.Subscribe<NovelUpdateRequestedEvent, NovelUpdateRequestedEventHandler>()
|
||||
.Subscribe<ChapterPullRequestedEvent, ChapterPullRequestedEventHandler>();
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<TranslationRequestCompletedEvent, TranslationRequestCompletedEventHandler>()
|
||||
.Subscribe<NovelUpdateRequestedEvent, NovelUpdateRequestedEventHandler>()
|
||||
.Subscribe<ChapterPullRequestedEvent, ChapterPullRequestedEventHandler>()
|
||||
.Subscribe<FileUploadRequestStatusUpdateEvent, FileUploadRequestStatusUpdateEventHandler>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GraphQL
|
||||
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>();
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>()
|
||||
.AddAuthorization();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Database
|
||||
|
||||
builder.Services.RegisterDbContext<NovelServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<NovelServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -56,17 +69,23 @@ public class Program
|
||||
})
|
||||
.AddHttpMessageHandler<NovelpiaAuthMessageHandler>();
|
||||
|
||||
builder.Services.Configure<NovelUpdateServiceConfiguration>(builder.Configuration.GetSection("UpdateService"));
|
||||
builder.Services.AddTransient<NovelUpdateService>();
|
||||
|
||||
#endregion
|
||||
|
||||
builder.Services.AddHealthChecks();
|
||||
|
||||
// Authentication & Authorization
|
||||
builder.Services.AddOidcAuthentication(builder.Configuration);
|
||||
builder.Services.AddFictionArchiveAuthorization();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
// Update database
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<NovelServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
@@ -74,7 +93,10 @@ public class Program
|
||||
app.UseHttpsRedirection();
|
||||
|
||||
app.MapHealthChecks("/healthz");
|
||||
|
||||
|
||||
app.UseAuthentication();
|
||||
app.UseAuthorization();
|
||||
|
||||
app.MapGraphQL();
|
||||
|
||||
app.RunWithGraphQLCommands(args);
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
using FictionArchive.Common.Enums;
|
||||
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
|
||||
namespace FictionArchive.Service.NovelService.Services.EventHandlers;
|
||||
|
||||
public class FileUploadRequestStatusUpdateEventHandler : IIntegrationEventHandler<FileUploadRequestStatusUpdateEvent>
|
||||
{
|
||||
private readonly ILogger<FileUploadRequestStatusUpdateEventHandler> _logger;
|
||||
private readonly NovelServiceDbContext _context;
|
||||
private readonly NovelUpdateService _novelUpdateService;
|
||||
|
||||
public FileUploadRequestStatusUpdateEventHandler(ILogger<FileUploadRequestStatusUpdateEventHandler> logger, NovelServiceDbContext context, NovelUpdateService novelUpdateService)
|
||||
{
|
||||
_logger = logger;
|
||||
_context = context;
|
||||
_novelUpdateService = novelUpdateService;
|
||||
}
|
||||
|
||||
public async Task Handle(FileUploadRequestStatusUpdateEvent @event)
|
||||
{
|
||||
var image = await _context.Images.FindAsync(@event.RequestId);
|
||||
if (image == null)
|
||||
{
|
||||
// Not a request we care about.
|
||||
return;
|
||||
}
|
||||
if (@event.Status == RequestStatus.Failed)
|
||||
{
|
||||
_logger.LogError("Image upload failed for image with id {imageId}", image.Id);
|
||||
return;
|
||||
}
|
||||
else if (@event.Status == RequestStatus.Success)
|
||||
{
|
||||
_logger.LogInformation("Image upload succeeded for image with id {imageId}", image.Id);
|
||||
await _novelUpdateService.UpdateImage(image.Id, @event.FileAccessUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
using FictionArchive.Service.NovelService.Models.Images;
|
||||
using FictionArchive.Service.NovelService.Models.Localization;
|
||||
using FictionArchive.Service.NovelService.Models.Novels;
|
||||
using FictionArchive.Service.Shared.Services.Database;
|
||||
@@ -14,4 +15,5 @@ public class NovelServiceDbContext(DbContextOptions options, ILogger<NovelServic
|
||||
public DbSet<NovelTag> Tags { get; set; }
|
||||
public DbSet<LocalizationKey> LocalizationKeys { get; set; }
|
||||
public DbSet<LocalizationRequest> LocalizationRequests { get; set; }
|
||||
public DbSet<Image> Images { get; set; }
|
||||
}
|
||||
@@ -1,9 +1,16 @@
|
||||
using FictionArchive.Service.FileService.IntegrationEvents;
|
||||
using FictionArchive.Service.NovelService.Models.Configuration;
|
||||
using FictionArchive.Service.NovelService.Models.Enums;
|
||||
using FictionArchive.Service.NovelService.Models.Images;
|
||||
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||
using FictionArchive.Service.NovelService.Models.Localization;
|
||||
using FictionArchive.Service.NovelService.Models.Novels;
|
||||
using FictionArchive.Service.NovelService.Models.SourceAdapters;
|
||||
using FictionArchive.Service.NovelService.Services.SourceAdapters;
|
||||
using FictionArchive.Service.Shared.Services.EventBus;
|
||||
using HtmlAgilityPack;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace FictionArchive.Service.NovelService.Services;
|
||||
|
||||
@@ -12,12 +19,16 @@ public class NovelUpdateService
|
||||
private readonly NovelServiceDbContext _dbContext;
|
||||
private readonly ILogger<NovelUpdateService> _logger;
|
||||
private readonly IEnumerable<ISourceAdapter> _sourceAdapters;
|
||||
private readonly IEventBus _eventBus;
|
||||
private readonly NovelUpdateServiceConfiguration _novelUpdateServiceConfiguration;
|
||||
|
||||
public NovelUpdateService(NovelServiceDbContext dbContext, ILogger<NovelUpdateService> logger, IEnumerable<ISourceAdapter> sourceAdapters)
|
||||
public NovelUpdateService(NovelServiceDbContext dbContext, ILogger<NovelUpdateService> logger, IEnumerable<ISourceAdapter> sourceAdapters, IEventBus eventBus, IOptions<NovelUpdateServiceConfiguration> novelUpdateServiceConfiguration)
|
||||
{
|
||||
_dbContext = dbContext;
|
||||
_logger = logger;
|
||||
_sourceAdapters = sourceAdapters;
|
||||
_eventBus = eventBus;
|
||||
_novelUpdateServiceConfiguration = novelUpdateServiceConfiguration.Value;
|
||||
}
|
||||
|
||||
public async Task<Novel> ImportNovel(string novelUrl)
|
||||
@@ -59,6 +70,10 @@ public class NovelUpdateService
|
||||
RawLanguage = metadata.RawLanguage,
|
||||
Url = metadata.Url,
|
||||
ExternalId = metadata.ExternalId,
|
||||
CoverImage = metadata.CoverImage != null ? new Image()
|
||||
{
|
||||
OriginalPath = metadata.CoverImage.Url,
|
||||
} : null,
|
||||
Chapters = metadata.Chapters.Select(chapter =>
|
||||
{
|
||||
return new Chapter()
|
||||
@@ -85,7 +100,18 @@ public class NovelUpdateService
|
||||
}
|
||||
});
|
||||
await _dbContext.SaveChangesAsync();
|
||||
|
||||
|
||||
// Signal request for cover image if present
|
||||
if (addedNovel.Entity.CoverImage != null)
|
||||
{
|
||||
await _eventBus.Publish(new FileUploadRequestCreatedEvent()
|
||||
{
|
||||
RequestId = addedNovel.Entity.CoverImage.Id,
|
||||
FileData = metadata.CoverImage.Data,
|
||||
FilePath = $"Novels/{addedNovel.Entity.Id}/Images/cover.jpg"
|
||||
});
|
||||
}
|
||||
|
||||
return addedNovel.Entity;
|
||||
}
|
||||
|
||||
@@ -95,17 +121,106 @@ public class NovelUpdateService
|
||||
.Include(novel => novel.Chapters)
|
||||
.ThenInclude(chapter => chapter.Body)
|
||||
.ThenInclude(body => body.Texts)
|
||||
.Include(novel => novel.Source)
|
||||
.Include(novel => novel.Source).Include(novel => novel.Chapters).ThenInclude(chapter => chapter.Images)
|
||||
.FirstOrDefaultAsync();
|
||||
var chapter = novel.Chapters.Where(chapter => chapter.Order == chapterNumber).FirstOrDefault();
|
||||
var adapter = _sourceAdapters.FirstOrDefault(adapter => adapter.SourceDescriptor.Key == novel.Source.Key);
|
||||
var rawChapter = await adapter.GetRawChapter(chapter.Url);
|
||||
chapter.Body.Texts.Add(new LocalizationText()
|
||||
var localizationText = new LocalizationText()
|
||||
{
|
||||
Text = rawChapter,
|
||||
Text = rawChapter.Text,
|
||||
Language = novel.RawLanguage
|
||||
});
|
||||
};
|
||||
chapter.Body.Texts.Add(localizationText);
|
||||
chapter.Images = rawChapter.ImageData.Select(img => new Image()
|
||||
{
|
||||
OriginalPath = img.Url
|
||||
}).ToList();
|
||||
await _dbContext.SaveChangesAsync();
|
||||
|
||||
// Images are saved and have ids, update the chapter body to replace image tags
|
||||
var chapterDoc = new HtmlDocument();
|
||||
chapterDoc.LoadHtml(rawChapter.Text);
|
||||
foreach (var image in chapter.Images)
|
||||
{
|
||||
var match = chapterDoc.DocumentNode.SelectSingleNode(@$"//img[@src='{image.OriginalPath}']");
|
||||
if (match != null)
|
||||
{
|
||||
match.Attributes["src"].Value = _novelUpdateServiceConfiguration.PendingImageUrl;
|
||||
if (match.Attributes.Contains("alt"))
|
||||
{
|
||||
match.Attributes["alt"].Value = image.Id.ToString();
|
||||
}
|
||||
else
|
||||
{
|
||||
match.Attributes.Add("alt", image.Id.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
localizationText.Text = chapterDoc.DocumentNode.OuterHtml;
|
||||
await _dbContext.SaveChangesAsync();
|
||||
|
||||
// Body was updated, raise image request
|
||||
int imgCount = 0;
|
||||
foreach (var image in chapter.Images)
|
||||
{
|
||||
var data = rawChapter.ImageData.FirstOrDefault(img => img.Url == image.OriginalPath);
|
||||
await _eventBus.Publish(new FileUploadRequestCreatedEvent()
|
||||
{
|
||||
FileData = data.Data,
|
||||
FilePath = $"{novel.Id}/Images/Chapter-{chapter.Id}/{imgCount++}.jpg",
|
||||
RequestId = image.Id
|
||||
});
|
||||
}
|
||||
|
||||
return chapter;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task UpdateImage(Guid imageId, string newUrl)
|
||||
{
|
||||
var image = await _dbContext.Images
|
||||
.Include(img => img.Chapter)
|
||||
.ThenInclude(chapter => chapter.Body)
|
||||
.ThenInclude(body => body.Texts)
|
||||
.FirstOrDefaultAsync(image => image.Id == imageId);
|
||||
image.NewPath = newUrl;
|
||||
|
||||
// If this is an image from a chapter, let's update the chapter body(s)
|
||||
if (image.Chapter != null)
|
||||
{
|
||||
foreach (var bodyText in image.Chapter.Body.Texts)
|
||||
{
|
||||
var chapterDoc = new HtmlDocument();
|
||||
chapterDoc.LoadHtml(bodyText.Text);
|
||||
var match = chapterDoc.DocumentNode.SelectSingleNode(@$"//img[@alt='{image.Id}']");
|
||||
if (match != null)
|
||||
{
|
||||
match.Attributes["src"].Value = newUrl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await _dbContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
public async Task<NovelUpdateRequestedEvent> QueueNovelImport(string novelUrl)
|
||||
{
|
||||
var importNovelRequestEvent = new NovelUpdateRequestedEvent()
|
||||
{
|
||||
NovelUrl = novelUrl
|
||||
};
|
||||
await _eventBus.Publish(importNovelRequestEvent);
|
||||
return importNovelRequestEvent;
|
||||
}
|
||||
|
||||
public async Task<ChapterPullRequestedEvent> QueueChapterPull(uint novelId, uint chapterNumber)
|
||||
{
|
||||
var chapterPullEvent = new ChapterPullRequestedEvent()
|
||||
{
|
||||
NovelId = novelId,
|
||||
ChapterNumber = chapterNumber
|
||||
};
|
||||
await _eventBus.Publish(chapterPullEvent);
|
||||
return chapterPullEvent;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@ public interface ISourceAdapter
|
||||
public SourceDescriptor SourceDescriptor { get; }
|
||||
public Task<bool> CanProcessNovel(string url);
|
||||
public Task<NovelMetadata> GetMetadata(string novelUrl);
|
||||
public Task<string> GetRawChapter(string chapterUrl);
|
||||
public Task<ChapterFetchResult> GetRawChapter(string chapterUrl);
|
||||
}
|
||||
@@ -81,6 +81,21 @@ public class NovelpiaAdapter : ISourceAdapter
|
||||
novel.AuthorName = authorMatch.Groups[2].Value;
|
||||
novel.AuthorUrl = authorMatch.Groups[2].Value;
|
||||
|
||||
// Cover image URL
|
||||
var coverMatch = Regex.Match(novelData, @"href=""(//images\.novelpia\.com/imagebox/cover/.+?\.file)""");
|
||||
string coverImageUrl = coverMatch.Groups[1].Value;
|
||||
if (string.IsNullOrEmpty(coverImageUrl))
|
||||
{
|
||||
coverMatch = Regex.Match(novelData, @"src=""(//images\.novelpia\.com/imagebox/cover/.+?\.file)""");
|
||||
coverImageUrl = coverMatch.Groups[1].Value;
|
||||
}
|
||||
|
||||
novel.CoverImage = new ImageData()
|
||||
{
|
||||
Url = coverImageUrl,
|
||||
Data = await GetImageData(coverImageUrl),
|
||||
};
|
||||
|
||||
// Some badge info
|
||||
var badgeSet = Regex.Match(novelData, @"(?s)<p\s+class=""in-badge"">(.*?)<\/p>");
|
||||
var badgeMatches = Regex.Matches(badgeSet.Groups[1].Value, @"<span[^>]*>(.*?)<\/span>");
|
||||
@@ -160,7 +175,7 @@ public class NovelpiaAdapter : ISourceAdapter
|
||||
return novel;
|
||||
}
|
||||
|
||||
public async Task<string> GetRawChapter(string chapterUrl)
|
||||
public async Task<ChapterFetchResult> GetRawChapter(string chapterUrl)
|
||||
{
|
||||
var chapterId = uint.Parse(Regex.Match(chapterUrl, ChapterIdRegex).Groups[1].Value);
|
||||
var endpoint = ChapterDownloadEndpoint + chapterId;
|
||||
@@ -171,6 +186,11 @@ public class NovelpiaAdapter : ISourceAdapter
|
||||
{
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
var fetchResult = new ChapterFetchResult()
|
||||
{
|
||||
ImageData = new List<ImageData>()
|
||||
};
|
||||
|
||||
StringBuilder builder = new StringBuilder();
|
||||
using var doc = JsonDocument.Parse(responseContent);
|
||||
@@ -182,10 +202,20 @@ public class NovelpiaAdapter : ISourceAdapter
|
||||
foreach (JsonElement item in sArray.EnumerateArray())
|
||||
{
|
||||
string text = item.GetProperty("text").GetString();
|
||||
var imageMatch = Regex.Match(text, @"<img.+?src=\""(.+?)\"".+?>");
|
||||
if (text.Contains("cover-wrapper"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (imageMatch.Success)
|
||||
{
|
||||
var url = imageMatch.Groups[1].Value;
|
||||
fetchResult.ImageData.Add(new ImageData()
|
||||
{
|
||||
Url = url,
|
||||
Data = await GetImageData(url)
|
||||
});
|
||||
}
|
||||
if (text.Contains("opacity: 0"))
|
||||
{
|
||||
continue;
|
||||
@@ -193,8 +223,24 @@ public class NovelpiaAdapter : ISourceAdapter
|
||||
|
||||
builder.Append(WebUtility.HtmlDecode(text));
|
||||
}
|
||||
fetchResult.Text = builder.ToString();
|
||||
|
||||
return builder.ToString();
|
||||
|
||||
return fetchResult;
|
||||
}
|
||||
|
||||
private async Task<byte[]> GetImageData(string url)
|
||||
{
|
||||
if (!url.StartsWith("http"))
|
||||
{
|
||||
url = "https:" + url;
|
||||
}
|
||||
|
||||
var image = await _httpClient.GetAsync(url);
|
||||
if (!image.IsSuccessStatusCode)
|
||||
{
|
||||
_logger.LogError("Attempting to fetch image with url {imgUrl} returned status code {code}.", url, image.StatusCode);
|
||||
throw new Exception();
|
||||
}
|
||||
return await image.Content.ReadAsByteArrayAsync();
|
||||
}
|
||||
}
|
||||
@@ -52,10 +52,15 @@ public class NovelpiaAuthMessageHandler : DelegatingHandler
|
||||
var response = await _httpClient.SendAsync(loginMessage);
|
||||
using (var streamReader = new StreamReader(response.Content.ReadAsStream()))
|
||||
{
|
||||
if (streamReader.ReadToEnd().Contains(LoginSuccessMessage))
|
||||
var message = await streamReader.ReadToEndAsync();
|
||||
if (message.Contains(LoginSuccessMessage))
|
||||
{
|
||||
_cache.Set(CacheKey, loginKey);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception("An error occured while retrieving the login key. Message: " + message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,9 @@
|
||||
"Username": "REPLACE_ME",
|
||||
"Password": "REPLACE_ME"
|
||||
},
|
||||
"UpdateService": {
|
||||
"PendingImageUrl": "https://localhost:7247/api/pendingupload.png"
|
||||
},
|
||||
"ConnectionStrings": {
|
||||
"DefaultConnection": "Host=localhost;Database=FictionArchive_NovelService;Username=postgres;password=postgres"
|
||||
},
|
||||
@@ -16,5 +19,15 @@
|
||||
"ConnectionString": "amqp://localhost",
|
||||
"ClientIdentifier": "NovelService"
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
"AllowedHosts": "*",
|
||||
"OIDC": {
|
||||
"Authority": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ClientId": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"Audience": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"ValidIssuer": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ValidateIssuer": true,
|
||||
"ValidateAudience": true,
|
||||
"ValidateLifetime": true,
|
||||
"ValidateIssuerSigningKey": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"subgraph": "Novels",
|
||||
"http": {
|
||||
"baseAddress": "http://localhost:5101/graphql"
|
||||
"baseAddress": "https://localhost:7208/graphql"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
using System.Data;
|
||||
using FictionArchive.Service.SchedulerService.Models;
|
||||
using FictionArchive.Service.SchedulerService.Services;
|
||||
using FictionArchive.Service.Shared.Constants;
|
||||
using HotChocolate.Authorization;
|
||||
using HotChocolate.Types;
|
||||
using Quartz;
|
||||
|
||||
@@ -10,18 +12,21 @@ public class Mutation
|
||||
{
|
||||
[Error<DuplicateNameException>]
|
||||
[Error<FormatException>]
|
||||
[Authorize(Roles = [AuthorizationConstants.Roles.Admin])]
|
||||
public async Task<SchedulerJob> ScheduleEventJob(string key, string description, string eventType, string eventData, string cronSchedule, JobManagerService jobManager)
|
||||
{
|
||||
return await jobManager.ScheduleEventJob(key, description, eventType, eventData, cronSchedule);
|
||||
}
|
||||
|
||||
[Error<JobPersistenceException>]
|
||||
[Authorize(Roles = [AuthorizationConstants.Roles.Admin])]
|
||||
public async Task<bool> RunJob(string jobKey, JobManagerService jobManager)
|
||||
{
|
||||
return await jobManager.TriggerJob(jobKey);
|
||||
}
|
||||
|
||||
[Error<KeyNotFoundException>]
|
||||
[Authorize(Roles = [AuthorizationConstants.Roles.Admin])]
|
||||
public async Task<bool> DeleteJob(string jobKey, JobManagerService jobManager)
|
||||
{
|
||||
bool deleted = await jobManager.DeleteJob(jobKey);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using FictionArchive.Service.SchedulerService.GraphQL;
|
||||
using FictionArchive.Service.SchedulerService.Services;
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using Quartz;
|
||||
@@ -11,54 +12,79 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
// Services
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>();
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>()
|
||||
.AddAuthorization();
|
||||
builder.Services.AddHealthChecks();
|
||||
builder.Services.AddTransient<JobManagerService>();
|
||||
|
||||
|
||||
// Authentication & Authorization
|
||||
builder.Services.AddOidcAuthentication(builder.Configuration);
|
||||
builder.Services.AddFictionArchiveAuthorization();
|
||||
|
||||
#region Database
|
||||
|
||||
builder.Services.RegisterDbContext<SchedulerServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<SchedulerServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Event Bus
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
});
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Quartz
|
||||
|
||||
builder.Services.AddQuartz(opt =>
|
||||
if (isSchemaExport)
|
||||
{
|
||||
opt.UsePersistentStore(pso =>
|
||||
// Schema export mode: use in-memory store (no DB connection needed)
|
||||
builder.Services.AddQuartz(opt =>
|
||||
{
|
||||
pso.UsePostgres(pgsql =>
|
||||
{
|
||||
pgsql.ConnectionString = builder.Configuration.GetConnectionString("DefaultConnection");
|
||||
pgsql.UseDriverDelegate<PostgreSQLDelegate>();
|
||||
pgsql.TablePrefix = "quartz.qrtz_"; // Needed for Postgres due to the differing schema used
|
||||
});
|
||||
pso.UseNewtonsoftJsonSerializer();
|
||||
opt.UseInMemoryStore();
|
||||
});
|
||||
});
|
||||
builder.Services.AddQuartzHostedService(opt =>
|
||||
}
|
||||
else
|
||||
{
|
||||
opt.WaitForJobsToComplete = true;
|
||||
});
|
||||
|
||||
builder.Services.AddQuartz(opt =>
|
||||
{
|
||||
opt.UsePersistentStore(pso =>
|
||||
{
|
||||
pso.UsePostgres(pgsql =>
|
||||
{
|
||||
pgsql.ConnectionString = builder.Configuration.GetConnectionString("DefaultConnection");
|
||||
pgsql.UseDriverDelegate<PostgreSQLDelegate>();
|
||||
pgsql.TablePrefix = "quartz.qrtz_"; // Needed for Postgres due to the differing schema used
|
||||
});
|
||||
pso.UseNewtonsoftJsonSerializer();
|
||||
});
|
||||
});
|
||||
builder.Services.AddQuartzHostedService(opt =>
|
||||
{
|
||||
opt.WaitForJobsToComplete = true;
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<SchedulerServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
@@ -66,7 +92,10 @@ public class Program
|
||||
app.UseHttpsRedirection();
|
||||
|
||||
app.MapHealthChecks("/healthz");
|
||||
|
||||
|
||||
app.UseAuthentication();
|
||||
app.UseAuthorization();
|
||||
|
||||
app.MapGraphQL();
|
||||
|
||||
app.RunWithGraphQLCommands(args);
|
||||
|
||||
@@ -12,5 +12,15 @@
|
||||
"ConnectionStrings": {
|
||||
"DefaultConnection": "Host=localhost;Database=FictionArchive_SchedulerService;Username=postgres;password=postgres"
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
"AllowedHosts": "*",
|
||||
"OIDC": {
|
||||
"Authority": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ClientId": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"Audience": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"ValidIssuer": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ValidateIssuer": true,
|
||||
"ValidateAudience": true,
|
||||
"ValidateLifetime": true,
|
||||
"ValidateIssuerSigningKey": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
namespace FictionArchive.Service.Shared.Constants;
|
||||
|
||||
public static class AuthorizationConstants
|
||||
{
|
||||
public static class Roles
|
||||
{
|
||||
public const string Admin = "admin";
|
||||
}
|
||||
|
||||
public static class Policies
|
||||
{
|
||||
public const string Admin = "Admin";
|
||||
public const string User = "User";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
using Microsoft.AspNetCore.Authentication.JwtBearer;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using FictionArchive.Service.Shared.Constants;
|
||||
using FictionArchive.Service.Shared.Models.Authentication;
|
||||
using System.Linq;
|
||||
|
||||
namespace FictionArchive.Service.Shared.Extensions;
|
||||
|
||||
public static class AuthenticationExtensions
|
||||
{
|
||||
public static IServiceCollection AddOidcAuthentication(this IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
var oidcConfig = configuration.GetSection("OIDC").Get<OidcConfiguration>();
|
||||
|
||||
if (oidcConfig == null)
|
||||
{
|
||||
throw new InvalidOperationException("OIDC configuration is required but not found in app settings");
|
||||
}
|
||||
|
||||
ValidateOidcConfiguration(oidcConfig);
|
||||
|
||||
services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme)
|
||||
.AddJwtBearer(options =>
|
||||
{
|
||||
options.Authority = oidcConfig.Authority;
|
||||
options.Audience = oidcConfig.Audience;
|
||||
options.RequireHttpsMetadata = !string.IsNullOrEmpty(oidcConfig.Authority) && oidcConfig.Authority.StartsWith("https://");
|
||||
|
||||
options.TokenValidationParameters = new TokenValidationParameters
|
||||
{
|
||||
ValidateIssuer = oidcConfig.ValidateIssuer,
|
||||
ValidIssuer = oidcConfig.ValidIssuer,
|
||||
ValidateAudience = oidcConfig.ValidateAudience,
|
||||
ValidateLifetime = oidcConfig.ValidateLifetime,
|
||||
ValidateIssuerSigningKey = oidcConfig.ValidateIssuerSigningKey,
|
||||
ClockSkew = TimeSpan.FromMinutes(5)
|
||||
};
|
||||
|
||||
options.Events = CreateLoggingJwtBearerEvents();
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static JwtBearerEvents CreateLoggingJwtBearerEvents(JwtBearerEvents? existingEvents = null)
|
||||
{
|
||||
return new JwtBearerEvents
|
||||
{
|
||||
OnMessageReceived = existingEvents?.OnMessageReceived ?? (_ => Task.CompletedTask),
|
||||
OnAuthenticationFailed = context =>
|
||||
{
|
||||
var logger = context.HttpContext.RequestServices.GetRequiredService<ILoggerFactory>()
|
||||
.CreateLogger("JwtBearerAuthentication");
|
||||
|
||||
logger.LogWarning(context.Exception, "JWT authentication failed: {Message}", context.Exception.Message);
|
||||
|
||||
return existingEvents?.OnAuthenticationFailed?.Invoke(context) ?? Task.CompletedTask;
|
||||
},
|
||||
OnChallenge = context =>
|
||||
{
|
||||
var logger = context.HttpContext.RequestServices.GetRequiredService<ILoggerFactory>()
|
||||
.CreateLogger("JwtBearerAuthentication");
|
||||
|
||||
logger.LogDebug(
|
||||
"JWT challenge issued. Error: {Error}, ErrorDescription: {ErrorDescription}",
|
||||
context.Error,
|
||||
context.ErrorDescription);
|
||||
|
||||
return existingEvents?.OnChallenge?.Invoke(context) ?? Task.CompletedTask;
|
||||
},
|
||||
OnTokenValidated = context =>
|
||||
{
|
||||
var logger = context.HttpContext.RequestServices.GetRequiredService<ILoggerFactory>()
|
||||
.CreateLogger("JwtBearerAuthentication");
|
||||
|
||||
logger.LogDebug(
|
||||
"JWT token validated for subject: {Subject}",
|
||||
context.Principal?.FindFirst("sub")?.Value ?? "unknown");
|
||||
|
||||
return existingEvents?.OnTokenValidated?.Invoke(context) ?? Task.CompletedTask;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static IServiceCollection AddOidcCookieAuthentication(this IServiceCollection services, IConfiguration configuration, string cookieName = "fa_session")
|
||||
{
|
||||
var oidcConfig = configuration.GetSection("OIDC").Get<OidcConfiguration>();
|
||||
|
||||
if (oidcConfig == null)
|
||||
{
|
||||
throw new InvalidOperationException("OIDC configuration is required but not found in app settings");
|
||||
}
|
||||
|
||||
ValidateOidcConfiguration(oidcConfig);
|
||||
|
||||
services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme)
|
||||
.AddJwtBearer(options =>
|
||||
{
|
||||
options.Authority = oidcConfig.Authority;
|
||||
options.Audience = oidcConfig.Audience;
|
||||
options.RequireHttpsMetadata = !string.IsNullOrEmpty(oidcConfig.Authority) && oidcConfig.Authority.StartsWith("https://");
|
||||
|
||||
var cookieEvents = new JwtBearerEvents
|
||||
{
|
||||
OnMessageReceived = context =>
|
||||
{
|
||||
// Try to get token from cookie first, then from Authorization header
|
||||
if (context.Request.Cookies.ContainsKey(cookieName))
|
||||
{
|
||||
context.Token = context.Request.Cookies[cookieName];
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
};
|
||||
options.Events = CreateLoggingJwtBearerEvents(cookieEvents);
|
||||
|
||||
options.TokenValidationParameters = new TokenValidationParameters
|
||||
{
|
||||
ValidateIssuer = oidcConfig.ValidateIssuer,
|
||||
ValidIssuer = oidcConfig.ValidIssuer,
|
||||
ValidateAudience = oidcConfig.ValidateAudience,
|
||||
ValidateLifetime = oidcConfig.ValidateLifetime,
|
||||
ValidateIssuerSigningKey = oidcConfig.ValidateIssuerSigningKey,
|
||||
ClockSkew = TimeSpan.FromMinutes(5)
|
||||
};
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
public static IServiceCollection AddFictionArchiveAuthorization(this IServiceCollection services)
|
||||
{
|
||||
services.AddAuthorizationBuilder()
|
||||
.AddPolicy(AuthorizationConstants.Policies.Admin, policy => policy.RequireRole(AuthorizationConstants.Roles.Admin))
|
||||
.AddPolicy(AuthorizationConstants.Policies.User, policy => policy.RequireAuthenticatedUser());
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static void ValidateOidcConfiguration(OidcConfiguration config)
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(config.Authority))
|
||||
errors.Add("OIDC Authority is required");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(config.ClientId))
|
||||
errors.Add("OIDC ClientId is required");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(config.Audience))
|
||||
errors.Add("OIDC Audience is required");
|
||||
|
||||
if (!Uri.TryCreate(config.Authority, UriKind.Absolute, out var authorityUri))
|
||||
errors.Add($"OIDC Authority '{config.Authority}' is not a valid URI");
|
||||
else if (!authorityUri.Scheme.Equals("https", StringComparison.OrdinalIgnoreCase) &&
|
||||
!authorityUri.Host.Equals("localhost", StringComparison.OrdinalIgnoreCase))
|
||||
errors.Add("OIDC Authority must use HTTPS unless running on localhost");
|
||||
|
||||
if (errors.Any())
|
||||
{
|
||||
throw new InvalidOperationException($"OIDC configuration validation failed:{Environment.NewLine}{string.Join(Environment.NewLine, errors)}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,16 +6,29 @@ namespace FictionArchive.Service.Shared.Extensions;
|
||||
|
||||
public static class DatabaseExtensions
|
||||
{
|
||||
public static IServiceCollection RegisterDbContext<TContext>(this IServiceCollection services,
|
||||
string connectionString) where TContext : FictionArchiveDbContext
|
||||
public static IServiceCollection RegisterDbContext<TContext>(
|
||||
this IServiceCollection services,
|
||||
string connectionString,
|
||||
bool skipInfrastructure = false) where TContext : FictionArchiveDbContext
|
||||
{
|
||||
services.AddDbContext<TContext>(options =>
|
||||
if (skipInfrastructure)
|
||||
{
|
||||
options.UseNpgsql(connectionString, o =>
|
||||
// For schema export: use in-memory provider to allow EF Core entity discovery
|
||||
services.AddDbContext<TContext>(options =>
|
||||
{
|
||||
o.UseNodaTime();
|
||||
options.UseInMemoryDatabase($"SchemaExport_{typeof(TContext).Name}");
|
||||
});
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
services.AddDbContext<TContext>(options =>
|
||||
{
|
||||
options.UseNpgsql(connectionString, o =>
|
||||
{
|
||||
o.UseNodaTime();
|
||||
});
|
||||
});
|
||||
}
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="GraphQL.Server.Ui.GraphiQL" Version="8.3.3" />
|
||||
<PackageReference Include="HotChocolate.AspNetCore" Version="15.1.11" />
|
||||
<PackageReference Include="HotChocolate.AspNetCore.Authorization" Version="15.1.11" />
|
||||
<PackageReference Include="HotChocolate.AspNetCore.CommandLine" Version="15.1.11" />
|
||||
<PackageReference Include="HotChocolate.Data.EntityFramework" Version="15.1.11" />
|
||||
<PackageReference Include="HotChocolate.Types.Scalars" Version="15.1.11" />
|
||||
@@ -18,6 +19,7 @@
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Relational" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="9.0.11">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
@@ -28,6 +30,7 @@
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.4" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.NodaTime" Version="9.0.4" />
|
||||
<PackageReference Include="RabbitMQ.Client" Version="7.2.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.11" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
namespace FictionArchive.Service.Shared.Models.Authentication;
|
||||
|
||||
public class OidcConfiguration
|
||||
{
|
||||
public string Authority { get; set; } = string.Empty;
|
||||
public string ClientId { get; set; } = string.Empty;
|
||||
public string Audience { get; set; } = string.Empty;
|
||||
public string? ValidIssuer { get; set; }
|
||||
public bool ValidateIssuer { get; set; } = true;
|
||||
public bool ValidateAudience { get; set; } = true;
|
||||
public bool ValidateLifetime { get; set; } = true;
|
||||
public bool ValidateIssuerSigningKey { get; set; } = true;
|
||||
}
|
||||
22
FictionArchive.Service.Shared/SchemaExportDetector.cs
Normal file
22
FictionArchive.Service.Shared/SchemaExportDetector.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
namespace FictionArchive.Service.Shared;
|
||||
|
||||
/// <summary>
|
||||
/// Detects if the application is running in schema export mode (for HotChocolate CLI commands).
|
||||
/// In this mode, infrastructure like RabbitMQ and databases should not be initialized.
|
||||
/// </summary>
|
||||
public static class SchemaExportDetector
|
||||
{
|
||||
/// <summary>
|
||||
/// Checks if the current run is a schema export command.
|
||||
/// </summary>
|
||||
/// <param name="args">Command line arguments passed to Main()</param>
|
||||
/// <returns>True if running schema export, false otherwise</returns>
|
||||
public static bool IsSchemaExportMode(string[] args)
|
||||
{
|
||||
// HotChocolate CLI pattern: "schema export" after "--" delimiter
|
||||
// Handles: dotnet run -- schema export --output schema.graphql
|
||||
var normalizedArgs = args.SkipWhile(a => a == "--").ToArray();
|
||||
return normalizedArgs.Length > 0 &&
|
||||
normalizedArgs[0].Equals("schema", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
@@ -5,15 +5,17 @@ using FictionArchive.Service.TranslationService.Models.Enums;
|
||||
using FictionArchive.Service.TranslationService.Services;
|
||||
using FictionArchive.Service.TranslationService.Services.Database;
|
||||
using FictionArchive.Service.TranslationService.Services.TranslationEngines;
|
||||
using HotChocolate.Authorization;
|
||||
|
||||
namespace FictionArchive.Service.TranslationService.GraphQL;
|
||||
|
||||
public class Mutation
|
||||
{
|
||||
[Authorize]
|
||||
public async Task<TranslationResult> TranslateText(string text, Language from, Language to, string translationEngineKey, TranslationEngineService translationEngineService)
|
||||
{
|
||||
var result = await translationEngineService.Translate(from, to, text, translationEngineKey);
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -2,19 +2,22 @@ using FictionArchive.Service.TranslationService.Models;
|
||||
using FictionArchive.Service.TranslationService.Models.Database;
|
||||
using FictionArchive.Service.TranslationService.Services.Database;
|
||||
using FictionArchive.Service.TranslationService.Services.TranslationEngines;
|
||||
using HotChocolate.Authorization;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace FictionArchive.Service.TranslationService.GraphQL;
|
||||
|
||||
public class Query
|
||||
{
|
||||
[Authorize]
|
||||
[UseFiltering]
|
||||
[UseSorting]
|
||||
public IEnumerable<TranslationEngineDescriptor> GetTranslationEngines(IEnumerable<ITranslationEngine> engines)
|
||||
{
|
||||
return engines.Select(engine => engine.Descriptor);
|
||||
}
|
||||
|
||||
|
||||
[Authorize]
|
||||
[UsePaging]
|
||||
[UseProjection]
|
||||
[UseFiltering]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using DeepL;
|
||||
using FictionArchive.Common.Extensions;
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using FictionArchive.Service.Shared.Services.GraphQL;
|
||||
@@ -18,6 +19,8 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
builder.AddLocalAppsettings();
|
||||
|
||||
@@ -25,24 +28,30 @@ public class Program
|
||||
|
||||
#region Event Bus
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<TranslationRequestCreatedEvent, TranslationRequestCreatedEventHandler>();
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<TranslationRequestCreatedEvent, TranslationRequestCreatedEventHandler>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
#region Database
|
||||
|
||||
builder.Services.RegisterDbContext<TranslationServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<TranslationServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
|
||||
#endregion
|
||||
|
||||
#region GraphQL
|
||||
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>();
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>()
|
||||
.AddAuthorization();
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -55,14 +64,19 @@ public class Program
|
||||
builder.Services.AddTransient<ITranslationEngine, DeepLTranslationEngine>();
|
||||
|
||||
builder.Services.AddTransient<TranslationEngineService>();
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
// Authentication & Authorization
|
||||
builder.Services.AddOidcAuthentication(builder.Configuration);
|
||||
builder.Services.AddFictionArchiveAuthorization();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
// Update database
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<TranslationServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
@@ -70,7 +84,10 @@ public class Program
|
||||
app.UseHttpsRedirection();
|
||||
|
||||
app.MapHealthChecks("/healthz");
|
||||
|
||||
|
||||
app.UseAuthentication();
|
||||
app.UseAuthorization();
|
||||
|
||||
app.MapGraphQL();
|
||||
|
||||
app.RunWithGraphQLCommands(args);
|
||||
|
||||
@@ -15,5 +15,15 @@
|
||||
"ConnectionString": "amqp://localhost",
|
||||
"ClientIdentifier": "TranslationService"
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
"AllowedHosts": "*",
|
||||
"OIDC": {
|
||||
"Authority": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ClientId": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"Audience": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"ValidIssuer": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ValidateIssuer": true,
|
||||
"ValidateAudience": true,
|
||||
"ValidateLifetime": true,
|
||||
"ValidateIssuerSigningKey": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
using FictionArchive.Service.Shared.Constants;
|
||||
using FictionArchive.Service.UserService.Models.Database;
|
||||
using FictionArchive.Service.UserService.Services;
|
||||
using HotChocolate.Authorization;
|
||||
|
||||
namespace FictionArchive.Service.UserService.GraphQL;
|
||||
|
||||
public class Mutation
|
||||
{
|
||||
[Authorize(Roles = [AuthorizationConstants.Roles.Admin])]
|
||||
public async Task<User> RegisterUser(string username, string email, string oAuthProviderId,
|
||||
string? inviterOAuthProviderId, UserManagementService userManagementService)
|
||||
{
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
using FictionArchive.Service.UserService.Models.Database;
|
||||
using FictionArchive.Service.UserService.Services;
|
||||
using HotChocolate.Authorization;
|
||||
|
||||
namespace FictionArchive.Service.UserService.GraphQL;
|
||||
|
||||
public class Query
|
||||
{
|
||||
[Authorize]
|
||||
public async Task<IQueryable<User>> GetUsers(UserManagementService userManagementService)
|
||||
{
|
||||
return userManagementService.GetUsers();
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using FictionArchive.Common.Extensions;
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using FictionArchive.Service.UserService.GraphQL;
|
||||
@@ -11,40 +13,57 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
builder.AddLocalAppsettings();
|
||||
|
||||
#region Event Bus
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<AuthUserAddedEvent, AuthUserAddedEventHandler>();
|
||||
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<AuthUserAddedEvent, AuthUserAddedEventHandler>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GraphQL
|
||||
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>();
|
||||
builder.Services.AddDefaultGraphQl<Query, Mutation>()
|
||||
.AddAuthorization();
|
||||
|
||||
#endregion
|
||||
|
||||
builder.Services.RegisterDbContext<UserServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<UserServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
builder.Services.AddTransient<UserManagementService>();
|
||||
|
||||
builder.Services.AddHealthChecks();
|
||||
|
||||
|
||||
// Authentication & Authorization
|
||||
builder.Services.AddOidcAuthentication(builder.Configuration);
|
||||
builder.Services.AddFictionArchiveAuthorization();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
// Update database
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<UserServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
|
||||
app.UseAuthentication();
|
||||
app.UseAuthorization();
|
||||
|
||||
app.MapGraphQL();
|
||||
|
||||
|
||||
app.MapHealthChecks("/healthz");
|
||||
|
||||
app.RunWithGraphQLCommands(args);
|
||||
|
||||
@@ -12,5 +12,15 @@
|
||||
"ConnectionString": "amqp://localhost",
|
||||
"ClientIdentifier": "UserService"
|
||||
},
|
||||
"AllowedHosts": "*"
|
||||
"AllowedHosts": "*",
|
||||
"OIDC": {
|
||||
"Authority": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ClientId": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"Audience": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
|
||||
"ValidIssuer": "https://auth.orfl.xyz/application/o/fiction-archive/",
|
||||
"ValidateIssuer": true,
|
||||
"ValidateAudience": true,
|
||||
"ValidateLifetime": true,
|
||||
"ValidateIssuerSigningKey": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.User
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.AuthenticationService", "FictionArchive.Service.AuthenticationService\FictionArchive.Service.AuthenticationService.csproj", "{70C4AE82-B01E-421D-B590-C0F47E63CD0C}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.FileService", "FictionArchive.Service.FileService\FictionArchive.Service.FileService.csproj", "{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FictionArchive.Service.NovelService.Tests", "FictionArchive.Service.NovelService.Tests\FictionArchive.Service.NovelService.Tests.csproj", "{166E645E-9DFB-44E8-8CC8-FA249A11679F}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -54,5 +58,13 @@ Global
|
||||
{70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{70C4AE82-B01E-421D-B590-C0F47E63CD0C}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{EC64A336-F8A0-4BED-9CA3-1B05AD00631D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{166E645E-9DFB-44E8-8CC8-FA249A11679F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{166E645E-9DFB-44E8-8CC8-FA249A11679F}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{166E645E-9DFB-44E8-8CC8-FA249A11679F}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{166E645E-9DFB-44E8-8CC8-FA249A11679F}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
10
README.md
Normal file
10
README.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# FictionArchive
|
||||
|
||||
A distributed microservices-based web application for managing fiction and novel content.
|
||||
|
||||
## Documentation
|
||||
|
||||
- [README](Documentation/README.md) - Getting started and project overview
|
||||
- [ARCHITECTURE](Documentation/ARCHITECTURE.md) - System architecture and design
|
||||
- [CICD](Documentation/CICD.md) - CI/CD pipeline configuration
|
||||
- [AGENTS](Documentation/AGENTS.md) - Development guidelines and coding standards
|
||||
202
docker-compose.yml
Normal file
202
docker-compose.yml
Normal file
@@ -0,0 +1,202 @@
|
||||
services:
|
||||
# ===========================================
|
||||
# Infrastructure
|
||||
# ===========================================
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
|
||||
rabbitmq:
|
||||
image: rabbitmq:3-management-alpine
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER:-guest}
|
||||
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD:-guest}
|
||||
volumes:
|
||||
- rabbitmq_data:/var/lib/rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "rabbitmq-diagnostics", "check_running"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
|
||||
# ===========================================
|
||||
# Backend Services
|
||||
# ===========================================
|
||||
novel-service:
|
||||
image: git.orfl.xyz/conco/fictionarchive-novel-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_NovelService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
Novelpia__Username: ${NOVELPIA_USERNAME}
|
||||
Novelpia__Password: ${NOVELPIA_PASSWORD}
|
||||
NovelUpdateService__PendingImageUrl: https://files.fictionarchive.orfl.xyz/api/pendingupload.png
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
translation-service:
|
||||
image: git.orfl.xyz/conco/fictionarchive-translation-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_TranslationService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
DeepL__ApiKey: ${DEEPL_API_KEY}
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
scheduler-service:
|
||||
image: git.orfl.xyz/conco/fictionarchive-scheduler-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_SchedulerService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
user-service:
|
||||
image: git.orfl.xyz/conco/fictionarchive-user-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_UserService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
authentication-service:
|
||||
image: git.orfl.xyz/conco/fictionarchive-authentication-service:latest
|
||||
environment:
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
file-service:
|
||||
image: git.orfl.xyz/conco/fictionarchive-file-service:latest
|
||||
environment:
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
S3__Endpoint: ${S3_ENDPOINT:-https://s3.orfl.xyz}
|
||||
S3__Bucket: ${S3_BUCKET:-fictionarchive}
|
||||
S3__AccessKey: ${S3_ACCESS_KEY}
|
||||
S3__SecretKey: ${S3_SECRET_KEY}
|
||||
Proxy__BaseUrl: https://files.orfl.xyz/api
|
||||
OIDC__Authority: https://auth.orfl.xyz/application/o/fictionarchive/
|
||||
OIDC__ClientId: fictionarchive-files
|
||||
OIDC__Audience: fictionarchive-api
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.file-service.rule=Host(`files.orfl.xyz`)"
|
||||
- "traefik.http.routers.file-service.entrypoints=websecure"
|
||||
- "traefik.http.routers.file-service.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.services.file-service.loadbalancer.server.port=8080"
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
# ===========================================
|
||||
# API Gateway
|
||||
# ===========================================
|
||||
api-gateway:
|
||||
image: git.orfl.xyz/conco/fictionarchive-api:latest
|
||||
environment:
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
OIDC__Authority: https://auth.orfl.xyz/application/o/fictionarchive/
|
||||
OIDC__ClientId: fictionarchive-api
|
||||
OIDC__Audience: fictionarchive-api
|
||||
Cors__AllowedOrigin: https://fictionarchive.orfl.xyz
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api-gateway.rule=Host(`api.fictionarchive.orfl.xyz`)"
|
||||
- "traefik.http.routers.api-gateway.entrypoints=websecure"
|
||||
- "traefik.http.routers.api-gateway.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.services.api-gateway.loadbalancer.server.port=8080"
|
||||
depends_on:
|
||||
- novel-service
|
||||
- translation-service
|
||||
- scheduler-service
|
||||
- user-service
|
||||
- authentication-service
|
||||
- file-service
|
||||
restart: unless-stopped
|
||||
|
||||
# ===========================================
|
||||
# Frontend
|
||||
# ===========================================
|
||||
frontend:
|
||||
image: git.orfl.xyz/conco/fictionarchive-frontend:latest
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost/"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.frontend.rule=Host(`fictionarchive.orfl.xyz`)"
|
||||
- "traefik.http.routers.frontend.entrypoints=websecure"
|
||||
- "traefik.http.routers.frontend.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.services.frontend.loadbalancer.server.port=80"
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
rabbitmq_data:
|
||||
letsencrypt:
|
||||
38
fictionarchive-web-astro/.dockerignore
Normal file
38
fictionarchive-web-astro/.dockerignore
Normal file
@@ -0,0 +1,38 @@
|
||||
# Dependencies
|
||||
node_modules
|
||||
|
||||
# Build output
|
||||
dist
|
||||
|
||||
# Development files
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
|
||||
# IDE
|
||||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
|
||||
# Test files
|
||||
*.test.*
|
||||
*.spec.*
|
||||
__tests__
|
||||
coverage
|
||||
|
||||
# Documentation
|
||||
README.md
|
||||
CHANGELOG.md
|
||||
12
fictionarchive-web-astro/.env.example
Normal file
12
fictionarchive-web-astro/.env.example
Normal file
@@ -0,0 +1,12 @@
|
||||
# GraphQL endpoint
|
||||
PUBLIC_GRAPHQL_URI=https://localhost:7063/graphql/
|
||||
|
||||
# OIDC Configuration
|
||||
PUBLIC_OIDC_AUTHORITY=https://auth.orfl.xyz/application/o/fiction-archive/
|
||||
PUBLIC_OIDC_CLIENT_ID=ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh
|
||||
PUBLIC_OIDC_REDIRECT_URI=http://localhost:4321/
|
||||
PUBLIC_OIDC_POST_LOGOUT_REDIRECT_URI=http://localhost:4321/
|
||||
PUBLIC_OIDC_SCOPE=openid profile email
|
||||
|
||||
# Optional: Token for GraphQL codegen (for authenticated schema introspection)
|
||||
# CODEGEN_TOKEN=your_token_here
|
||||
24
fictionarchive-web-astro/.gitignore
vendored
Normal file
24
fictionarchive-web-astro/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# build output
|
||||
dist/
|
||||
# generated types
|
||||
.astro/
|
||||
|
||||
# dependencies
|
||||
node_modules/
|
||||
|
||||
# logs
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
|
||||
|
||||
# environment variables
|
||||
.env
|
||||
.env.production
|
||||
|
||||
# macOS-specific files
|
||||
.DS_Store
|
||||
|
||||
# jetbrains setting folder
|
||||
.idea/
|
||||
4
fictionarchive-web-astro/.vscode/extensions.json
vendored
Normal file
4
fictionarchive-web-astro/.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"recommendations": ["astro-build.astro-vscode"],
|
||||
"unwantedRecommendations": []
|
||||
}
|
||||
11
fictionarchive-web-astro/.vscode/launch.json
vendored
Normal file
11
fictionarchive-web-astro/.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"command": "./node_modules/.bin/astro dev",
|
||||
"name": "Development server",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
45
fictionarchive-web-astro/Dockerfile
Normal file
45
fictionarchive-web-astro/Dockerfile
Normal file
@@ -0,0 +1,45 @@
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Build arguments for environment variables
|
||||
ARG PUBLIC_GRAPHQL_URI
|
||||
ARG PUBLIC_OIDC_AUTHORITY
|
||||
ARG PUBLIC_OIDC_CLIENT_ID
|
||||
ARG PUBLIC_OIDC_REDIRECT_URI
|
||||
ARG PUBLIC_OIDC_POST_LOGOUT_REDIRECT_URI
|
||||
ARG PUBLIC_OIDC_SCOPE
|
||||
|
||||
# Set environment variables for build
|
||||
ENV PUBLIC_GRAPHQL_URI=$PUBLIC_GRAPHQL_URI
|
||||
ENV PUBLIC_OIDC_AUTHORITY=$PUBLIC_OIDC_AUTHORITY
|
||||
ENV PUBLIC_OIDC_CLIENT_ID=$PUBLIC_OIDC_CLIENT_ID
|
||||
ENV PUBLIC_OIDC_REDIRECT_URI=$PUBLIC_OIDC_REDIRECT_URI
|
||||
ENV PUBLIC_OIDC_POST_LOGOUT_REDIRECT_URI=$PUBLIC_OIDC_POST_LOGOUT_REDIRECT_URI
|
||||
ENV PUBLIC_OIDC_SCOPE=$PUBLIC_OIDC_SCOPE
|
||||
|
||||
# Install dependencies
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
|
||||
# Copy source and build
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production runtime
|
||||
FROM node:20-alpine AS runtime
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy built output and production dependencies
|
||||
COPY --from=build /app/dist ./dist
|
||||
COPY --from=build /app/node_modules ./node_modules
|
||||
COPY --from=build /app/package.json ./
|
||||
|
||||
# Runtime configuration
|
||||
ENV HOST=0.0.0.0
|
||||
ENV PORT=80
|
||||
EXPOSE 80
|
||||
|
||||
# Start the Node.js server
|
||||
CMD ["node", "./dist/server/entry.mjs"]
|
||||
43
fictionarchive-web-astro/README.md
Normal file
43
fictionarchive-web-astro/README.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Astro Starter Kit: Minimal
|
||||
|
||||
```sh
|
||||
npm create astro@latest -- --template minimal
|
||||
```
|
||||
|
||||
> 🧑🚀 **Seasoned astronaut?** Delete this file. Have fun!
|
||||
|
||||
## 🚀 Project Structure
|
||||
|
||||
Inside of your Astro project, you'll see the following folders and files:
|
||||
|
||||
```text
|
||||
/
|
||||
├── public/
|
||||
├── src/
|
||||
│ └── pages/
|
||||
│ └── index.astro
|
||||
└── package.json
|
||||
```
|
||||
|
||||
Astro looks for `.astro` or `.md` files in the `src/pages/` directory. Each page is exposed as a route based on its file name.
|
||||
|
||||
There's nothing special about `src/components/`, but that's where we like to put any Astro/React/Vue/Svelte/Preact components.
|
||||
|
||||
Any static assets, like images, can be placed in the `public/` directory.
|
||||
|
||||
## 🧞 Commands
|
||||
|
||||
All commands are run from the root of the project, from a terminal:
|
||||
|
||||
| Command | Action |
|
||||
| :------------------------ | :----------------------------------------------- |
|
||||
| `npm install` | Installs dependencies |
|
||||
| `npm run dev` | Starts local dev server at `localhost:4321` |
|
||||
| `npm run build` | Build your production site to `./dist/` |
|
||||
| `npm run preview` | Preview your build locally, before deploying |
|
||||
| `npm run astro ...` | Run CLI commands like `astro add`, `astro check` |
|
||||
| `npm run astro -- --help` | Get help using the Astro CLI |
|
||||
|
||||
## 👀 Want to learn more?
|
||||
|
||||
Feel free to check [our documentation](https://docs.astro.build) or jump into our [Discord server](https://astro.build/chat).
|
||||
17
fictionarchive-web-astro/astro.config.mjs
Normal file
17
fictionarchive-web-astro/astro.config.mjs
Normal file
@@ -0,0 +1,17 @@
|
||||
import { defineConfig } from 'astro/config';
|
||||
import svelte from '@astrojs/svelte';
|
||||
import tailwindcss from '@tailwindcss/vite';
|
||||
import node from '@astrojs/node';
|
||||
|
||||
export default defineConfig({
|
||||
output: 'server', // SSR mode - use prerender = true for static pages
|
||||
adapter: node({
|
||||
mode: 'standalone',
|
||||
}),
|
||||
integrations: [
|
||||
svelte(),
|
||||
],
|
||||
vite: {
|
||||
plugins: [tailwindcss()],
|
||||
},
|
||||
});
|
||||
28
fictionarchive-web-astro/codegen.ts
Normal file
28
fictionarchive-web-astro/codegen.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import type { CodegenConfig } from '@graphql-codegen/cli';
|
||||
import * as dotenv from 'dotenv';
|
||||
|
||||
dotenv.config({ path: '.env.local' });
|
||||
dotenv.config();
|
||||
|
||||
const schema = process.env.PUBLIC_GRAPHQL_URI ?? 'https://localhost:7063/graphql/';
|
||||
const authToken = process.env.CODEGEN_TOKEN;
|
||||
|
||||
const config: CodegenConfig = {
|
||||
schema: {
|
||||
[schema]: authToken ? { headers: { Authorization: `Bearer ${authToken}` } } : {},
|
||||
},
|
||||
documents: 'src/**/*.graphql',
|
||||
generates: {
|
||||
'src/lib/graphql/__generated__/graphql.ts': {
|
||||
plugins: ['typescript', 'typescript-operations', 'typed-document-node'],
|
||||
config: {
|
||||
avoidOptionals: { field: true },
|
||||
enumsAsConst: true,
|
||||
skipTypename: true,
|
||||
useTypeImports: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
16
fictionarchive-web-astro/components.json
Normal file
16
fictionarchive-web-astro/components.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://shadcn-svelte.com/schema.json",
|
||||
"tailwind": {
|
||||
"css": "src\\styles\\global.css",
|
||||
"baseColor": "gray"
|
||||
},
|
||||
"aliases": {
|
||||
"components": "$lib/components",
|
||||
"utils": "$lib/utils",
|
||||
"ui": "$lib/components/ui",
|
||||
"hooks": "$lib/hooks",
|
||||
"lib": "$lib"
|
||||
},
|
||||
"typescript": true,
|
||||
"registry": "https://shadcn-svelte.com/registry"
|
||||
}
|
||||
35
fictionarchive-web-astro/eslint.config.js
Normal file
35
fictionarchive-web-astro/eslint.config.js
Normal file
@@ -0,0 +1,35 @@
|
||||
import js from '@eslint/js';
|
||||
import tseslint from 'typescript-eslint';
|
||||
import svelte from 'eslint-plugin-svelte';
|
||||
import astro from 'eslint-plugin-astro';
|
||||
import globals from 'globals';
|
||||
|
||||
export default tseslint.config(
|
||||
js.configs.recommended,
|
||||
...tseslint.configs.recommended,
|
||||
...svelte.configs['flat/recommended'],
|
||||
...astro.configs.recommended,
|
||||
{
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.node
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
files: ['**/*.svelte'],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
parser: tseslint.parser
|
||||
}
|
||||
},
|
||||
rules: {
|
||||
// Disabled because we sanitize HTML with DOMPurify before rendering
|
||||
'svelte/no-at-html-tags': 'off'
|
||||
}
|
||||
},
|
||||
{
|
||||
ignores: ['node_modules/', 'dist/', '.astro/', 'src/lib/graphql/__generated__/']
|
||||
}
|
||||
);
|
||||
12253
fictionarchive-web-astro/package-lock.json
generated
Normal file
12253
fictionarchive-web-astro/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
51
fictionarchive-web-astro/package.json
Normal file
51
fictionarchive-web-astro/package.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "fictionarchive-web-astro",
|
||||
"type": "module",
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"dev": "astro dev",
|
||||
"build": "astro build",
|
||||
"preview": "astro preview",
|
||||
"astro": "astro",
|
||||
"codegen": "graphql-codegen --config codegen.ts -r dotenv/config --use-system-ca",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/node": "^9.5.1",
|
||||
"@astrojs/svelte": "^7.2.2",
|
||||
"@tailwindcss/vite": "^4.1.17",
|
||||
"@urql/core": "^6.0.1",
|
||||
"@urql/svelte": "^5.0.0",
|
||||
"astro": "^5.16.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"date-fns": "^4.1.0",
|
||||
"dompurify": "^3.3.0",
|
||||
"graphql": "^16.12.0",
|
||||
"oidc-client-ts": "^3.4.1",
|
||||
"svelte": "^5.45.2",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tailwindcss": "^4.1.17",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@graphql-codegen/cli": "^6.1.0",
|
||||
"@graphql-codegen/typed-document-node": "^6.1.3",
|
||||
"@graphql-codegen/typescript": "^5.0.5",
|
||||
"@graphql-codegen/typescript-operations": "^5.0.5",
|
||||
"@internationalized/date": "^3.10.0",
|
||||
"@lucide/svelte": "^0.544.0",
|
||||
"@types/dompurify": "^3.0.5",
|
||||
"bits-ui": "^2.14.4",
|
||||
"dotenv": "^16.6.1",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-plugin-astro": "^1.5.0",
|
||||
"eslint-plugin-svelte": "^3.13.0",
|
||||
"globals": "^16.5.0",
|
||||
"tailwind-variants": "^3.2.2",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript-eslint": "^8.48.0"
|
||||
}
|
||||
}
|
||||
9
fictionarchive-web-astro/public/favicon.svg
Normal file
9
fictionarchive-web-astro/public/favicon.svg
Normal file
@@ -0,0 +1,9 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 128 128">
|
||||
<path d="M50.4 78.5a75.1 75.1 0 0 0-28.5 6.9l24.2-65.7c.7-2 1.9-3.2 3.4-3.2h29c1.5 0 2.7 1.2 3.4 3.2l24.2 65.7s-11.6-7-28.5-7L67 45.5c-.4-1.7-1.6-2.8-2.9-2.8-1.3 0-2.5 1.1-2.9 2.7L50.4 78.5Zm-1.1 28.2Zm-4.2-20.2c-2 6.6-.6 15.8 4.2 20.2a17.5 17.5 0 0 1 .2-.7 5.5 5.5 0 0 1 5.7-4.5c2.8.1 4.3 1.5 4.7 4.7.2 1.1.2 2.3.2 3.5v.4c0 2.7.7 5.2 2.2 7.4a13 13 0 0 0 5.7 4.9v-.3l-.2-.3c-1.8-5.6-.5-9.5 4.4-12.8l1.5-1a73 73 0 0 0 3.2-2.2 16 16 0 0 0 6.8-11.4c.3-2 .1-4-.6-6l-.8.6-1.6 1a37 37 0 0 1-22.4 2.7c-5-.7-9.7-2-13.2-6.2Z" />
|
||||
<style>
|
||||
path { fill: #000; }
|
||||
@media (prefers-color-scheme: dark) {
|
||||
path { fill: #FFF; }
|
||||
}
|
||||
</style>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 749 B |
29
fictionarchive-web-astro/src/layouts/AppLayout.astro
Normal file
29
fictionarchive-web-astro/src/layouts/AppLayout.astro
Normal file
@@ -0,0 +1,29 @@
|
||||
---
|
||||
import Navbar from '../lib/components/Navbar.svelte';
|
||||
import AuthInit from '../lib/components/AuthInit.svelte';
|
||||
import '../styles/global.css';
|
||||
|
||||
interface Props {
|
||||
title?: string;
|
||||
}
|
||||
|
||||
const { title = 'FictionArchive' } = Astro.props;
|
||||
---
|
||||
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||
<meta name="generator" content={Astro.generator} />
|
||||
<title>{title}</title>
|
||||
</head>
|
||||
<body class="min-h-screen bg-background">
|
||||
<AuthInit client:load />
|
||||
<Navbar client:load />
|
||||
<main class="mx-auto flex max-w-6xl flex-col gap-6 px-4 py-8 sm:px-6 lg:px-8">
|
||||
<slot />
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
115
fictionarchive-web-astro/src/lib/auth/authStore.ts
Normal file
115
fictionarchive-web-astro/src/lib/auth/authStore.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { writable, derived } from 'svelte/store';
|
||||
import type { User } from 'oidc-client-ts';
|
||||
import { userManager, isOidcConfigured } from './oidcConfig';
|
||||
|
||||
// Stores
|
||||
export const user = writable<User | null>(null);
|
||||
export const isLoading = writable(true);
|
||||
export const isAuthenticated = derived(user, ($user) => $user !== null);
|
||||
export const isConfigured = isOidcConfigured;
|
||||
|
||||
// Cookie management
|
||||
function setCookieFromUser(u: User) {
|
||||
if (!u?.access_token) return;
|
||||
|
||||
const isProduction = window.location.hostname !== 'localhost';
|
||||
const domain = isProduction ? '.orfl.xyz' : undefined;
|
||||
const secure = isProduction;
|
||||
const sameSite = isProduction ? 'None' : 'Lax';
|
||||
|
||||
const cookieValue = `fa_session=${u.access_token}; path=/; ${secure ? 'secure; ' : ''}samesite=${sameSite}${domain ? `; domain=${domain}` : ''}`;
|
||||
document.cookie = cookieValue;
|
||||
}
|
||||
|
||||
function clearFaSessionCookie() {
|
||||
const isProduction = window.location.hostname !== 'localhost';
|
||||
const domain = isProduction ? '.orfl.xyz' : undefined;
|
||||
|
||||
const cookieValue = `fa_session=; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT${domain ? `; domain=${domain}` : ''}`;
|
||||
document.cookie = cookieValue;
|
||||
}
|
||||
|
||||
// Track if callback has been handled to prevent double processing
|
||||
let callbackHandled = false;
|
||||
|
||||
export async function initAuth() {
|
||||
if (!userManager) {
|
||||
isLoading.set(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle callback if auth params are present
|
||||
const url = new URL(window.location.href);
|
||||
const hasAuthParams =
|
||||
url.searchParams.has('code') ||
|
||||
url.searchParams.has('id_token') ||
|
||||
url.searchParams.has('error');
|
||||
|
||||
if (hasAuthParams && !callbackHandled) {
|
||||
callbackHandled = true;
|
||||
try {
|
||||
const result = await userManager.signinRedirectCallback();
|
||||
user.set(result ?? null);
|
||||
if (result) {
|
||||
setCookieFromUser(result);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to complete sign-in redirect', e);
|
||||
} finally {
|
||||
const cleanUrl = `${url.origin}${url.pathname}`;
|
||||
window.history.replaceState({}, document.title, cleanUrl);
|
||||
}
|
||||
}
|
||||
|
||||
// Load existing user
|
||||
try {
|
||||
const loadedUser = await userManager.getUser();
|
||||
user.set(loadedUser ?? null);
|
||||
if (loadedUser) {
|
||||
setCookieFromUser(loadedUser);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to load user', e);
|
||||
}
|
||||
|
||||
isLoading.set(false);
|
||||
|
||||
// Event listeners
|
||||
userManager.events.addUserLoaded((u) => {
|
||||
user.set(u);
|
||||
setCookieFromUser(u);
|
||||
});
|
||||
|
||||
userManager.events.addUserUnloaded(() => {
|
||||
user.set(null);
|
||||
clearFaSessionCookie();
|
||||
});
|
||||
|
||||
userManager.events.addUserSignedOut(() => {
|
||||
user.set(null);
|
||||
clearFaSessionCookie();
|
||||
});
|
||||
}
|
||||
|
||||
export async function login() {
|
||||
if (!userManager) {
|
||||
console.warn('OIDC is not configured; set PUBLIC_OIDC_* environment variables.');
|
||||
return;
|
||||
}
|
||||
await userManager.signinRedirect();
|
||||
}
|
||||
|
||||
export async function logout() {
|
||||
if (!userManager) {
|
||||
console.warn('OIDC is not configured; set PUBLIC_OIDC_* environment variables.');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
clearFaSessionCookie();
|
||||
await userManager.signoutRedirect();
|
||||
} catch (error) {
|
||||
console.error('Failed to sign out via redirect, clearing local session instead.', error);
|
||||
await userManager.removeUser();
|
||||
user.set(null);
|
||||
}
|
||||
}
|
||||
35
fictionarchive-web-astro/src/lib/auth/oidcConfig.ts
Normal file
35
fictionarchive-web-astro/src/lib/auth/oidcConfig.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { UserManager, WebStorageStateStore, type UserManagerSettings } from 'oidc-client-ts';
|
||||
|
||||
const authority = import.meta.env.PUBLIC_OIDC_AUTHORITY;
|
||||
const clientId = import.meta.env.PUBLIC_OIDC_CLIENT_ID;
|
||||
const redirectUri = import.meta.env.PUBLIC_OIDC_REDIRECT_URI;
|
||||
const postLogoutRedirectUri = import.meta.env.PUBLIC_OIDC_POST_LOGOUT_REDIRECT_URI ?? redirectUri;
|
||||
const scope = import.meta.env.PUBLIC_OIDC_SCOPE ?? 'openid profile email';
|
||||
|
||||
export const isOidcConfigured =
|
||||
Boolean(authority) && Boolean(clientId) && Boolean(redirectUri);
|
||||
|
||||
function buildSettings(): UserManagerSettings | null {
|
||||
if (!isOidcConfigured) return null;
|
||||
|
||||
return {
|
||||
authority: authority!,
|
||||
client_id: clientId!,
|
||||
redirect_uri: redirectUri!,
|
||||
post_logout_redirect_uri: postLogoutRedirectUri,
|
||||
response_type: 'code',
|
||||
scope,
|
||||
loadUserInfo: true,
|
||||
automaticSilentRenew: true,
|
||||
userStore:
|
||||
typeof window !== 'undefined'
|
||||
? new WebStorageStateStore({ store: window.localStorage })
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export const userManager = (() => {
|
||||
const settings = buildSettings();
|
||||
if (!settings) return null;
|
||||
return new UserManager(settings);
|
||||
})();
|
||||
@@ -0,0 +1,8 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from 'svelte';
|
||||
import { initAuth } from '$lib/auth/authStore';
|
||||
|
||||
onMount(() => {
|
||||
initAuth();
|
||||
});
|
||||
</script>
|
||||
@@ -0,0 +1,55 @@
|
||||
<script lang="ts">
|
||||
import { user, isLoading, isConfigured, login, logout } from '$lib/auth/authStore';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
|
||||
let isOpen = $state(false);
|
||||
|
||||
const email = $derived(
|
||||
$user?.profile?.email ??
|
||||
$user?.profile?.preferred_username ??
|
||||
$user?.profile?.name ??
|
||||
$user?.profile?.sub ??
|
||||
'User'
|
||||
);
|
||||
|
||||
function handleClickOutside(event: MouseEvent) {
|
||||
const target = event.target as HTMLElement;
|
||||
if (!target.closest('.auth-dropdown')) {
|
||||
isOpen = false;
|
||||
}
|
||||
}
|
||||
|
||||
function toggleDropdown() {
|
||||
isOpen = !isOpen;
|
||||
}
|
||||
|
||||
async function handleLogout() {
|
||||
isOpen = false;
|
||||
await logout();
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:window onclick={handleClickOutside} />
|
||||
|
||||
{#if $isLoading}
|
||||
<Button variant="outline" disabled>Loading...</Button>
|
||||
{:else if !isConfigured}
|
||||
<span class="text-sm text-yellow-600">Auth not configured</span>
|
||||
{:else if $user}
|
||||
<div class="auth-dropdown relative">
|
||||
<Button variant="outline" onclick={toggleDropdown}>
|
||||
{email}
|
||||
</Button>
|
||||
{#if isOpen}
|
||||
<div
|
||||
class="absolute right-0 z-50 mt-2 w-48 rounded-md bg-white p-2 shadow-lg dark:bg-gray-800"
|
||||
>
|
||||
<Button variant="ghost" class="w-full justify-start" onclick={handleLogout}>
|
||||
Sign out
|
||||
</Button>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{:else}
|
||||
<Button onclick={login}>Sign in</Button>
|
||||
{/if}
|
||||
@@ -0,0 +1,21 @@
|
||||
<script lang="ts">
|
||||
import { user, isLoading } from '$lib/auth/authStore';
|
||||
|
||||
const greeting = $derived.by(() => {
|
||||
if ($isLoading) return 'Welcome to FictionArchive';
|
||||
if ($user) {
|
||||
const name = $user.profile?.name || $user.profile?.preferred_username;
|
||||
return name ? `Welcome back, ${name}` : 'Welcome back';
|
||||
}
|
||||
return 'Welcome to FictionArchive';
|
||||
});
|
||||
</script>
|
||||
|
||||
<section class="py-8 text-center sm:py-12">
|
||||
<h1 class="text-3xl font-bold tracking-tight sm:text-4xl">
|
||||
{greeting}
|
||||
</h1>
|
||||
<p class="mt-2 text-lg text-muted-foreground">
|
||||
Your personal fiction library
|
||||
</p>
|
||||
</section>
|
||||
38
fictionarchive-web-astro/src/lib/components/HomePage.svelte
Normal file
38
fictionarchive-web-astro/src/lib/components/HomePage.svelte
Normal file
@@ -0,0 +1,38 @@
|
||||
<script lang="ts">
|
||||
// Direct imports for faster Astro builds
|
||||
import BookOpen from '@lucide/svelte/icons/book-open';
|
||||
import List from '@lucide/svelte/icons/list';
|
||||
import Sparkles from '@lucide/svelte/icons/sparkles';
|
||||
import HeroSection from './HeroSection.svelte';
|
||||
import NavigationCard from './NavigationCard.svelte';
|
||||
import RecentlyUpdatedSection from './RecentlyUpdatedSection.svelte';
|
||||
</script>
|
||||
|
||||
<div class="flex flex-col gap-8">
|
||||
<HeroSection />
|
||||
|
||||
<nav class="mx-auto flex w-full max-w-3xl flex-col gap-4">
|
||||
<NavigationCard
|
||||
href="/novels"
|
||||
icon={BookOpen}
|
||||
title="Novels"
|
||||
description="Explore and read archived novels."
|
||||
/>
|
||||
<NavigationCard
|
||||
href="/lists"
|
||||
icon={List}
|
||||
title="Reading Lists"
|
||||
description="Organize stories into custom collections."
|
||||
disabled
|
||||
/>
|
||||
<NavigationCard
|
||||
href="/recommendations"
|
||||
icon={Sparkles}
|
||||
title="Recommendations"
|
||||
description="Get suggestions based on your reading."
|
||||
disabled
|
||||
/>
|
||||
</nav>
|
||||
|
||||
<RecentlyUpdatedSection />
|
||||
</div>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user