14 Commits

Author SHA1 Message Date
gamer147
15a8185621 [FA-11] Fix react build issues
All checks were successful
CI / build-backend (pull_request) Successful in 1m7s
CI / build-frontend (pull_request) Successful in 26s
2025-11-26 08:48:00 -05:00
gamer147
0180a58084 [FA-11] Hopefully resolves build issues, although I don't know why the build_gateway was necessarily failing in build.yml and trying to access Debug bins
Some checks failed
CI / build-backend (pull_request) Successful in 56s
CI / build-frontend (pull_request) Failing after 23s
2025-11-26 07:26:57 -05:00
gamer147
573f3fc7b0 [FA-11] That causes an error so fingers crossed this time
Some checks failed
CI / build-backend (pull_request) Failing after 52s
CI / build-frontend (pull_request) Failing after 21s
2025-11-26 07:11:40 -05:00
gamer147
cdc2176e35 [FA-11] Try and disable the caching again, forgot a step like an idiot
Some checks failed
CI / build-backend (pull_request) Failing after 1m24s
CI / build-frontend (pull_request) Failing after 20s
2025-11-26 07:08:32 -05:00
gamer147
e9eaf1569b [FA-11] Disable Node caching all together and let backend rebuild if needed
Some checks failed
CI / build-backend (pull_request) Failing after 52s
CI / build-frontend (pull_request) Failing after 4m52s
2025-11-26 00:49:27 -05:00
gamer147
ba99642e97 [FA-11] Fix build errors, try to fix cache miss on node build
Some checks failed
CI / build-backend (pull_request) Failing after 1m11s
CI / build-frontend (pull_request) Has been cancelled
2025-11-26 00:40:07 -05:00
gamer147
c6d794aabc Merge remote-tracking branch 'origin/feature/FA-11_CICD' into feature/FA-11_CICD
Some checks failed
CI / build-backend (pull_request) Failing after 53s
CI / build-frontend (pull_request) Failing after 4m52s
2025-11-26 00:18:40 -05:00
gamer147
62e7e20f94 [FA-11] Fix issue with local package reference 2025-11-26 00:18:33 -05:00
9e1792e4d0 Merge branch 'master' into feature/FA-11_CICD
Some checks failed
CI / build-backend (pull_request) Failing after 1m49s
CI / build-frontend (pull_request) Has been cancelled
2025-11-26 04:50:58 +00:00
gamer147
747a212fb0 Add assistant directly 2025-11-25 23:50:01 -05:00
gamer147
200bdaabed [FA-11] Try to add Claude assistant
Some checks failed
CI / build-frontend (pull_request) Has been cancelled
CI / build-backend (pull_request) Has been cancelled
2025-11-25 23:45:53 -05:00
gamer147
caa36648e2 Haven't checked yet 2025-11-25 23:29:55 -05:00
6f2454329d Merge pull request 'feature/FA-18_BootstrapFrontend' (#32) from feature/FA-18_BootstrapFrontend into master
Reviewed-on: #32
2025-11-24 18:37:29 +00:00
a01250696f Merge pull request 'feature/FA-5_ImageSupport' (#31) from feature/FA-5_ImageSupport into master
Reviewed-on: #31
2025-11-24 02:17:10 +00:00
31 changed files with 2746 additions and 1127 deletions

View File

@@ -0,0 +1,122 @@
name: Build Gateway
on:
workflow_dispatch:
push:
branches:
- master
paths:
- 'FictionArchive.API/**'
env:
REGISTRY: ${{ gitea.server_url }}
IMAGE_NAME: ${{ gitea.repository_owner }}/fictionarchive-api
jobs:
build-gateway:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: '8.0.x'
- name: Install Fusion CLI
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
- name: Create subgraphs directory
run: mkdir -p subgraphs
# Download all subgraph packages from latest successful builds
- name: Download Novel Service subgraph
uses: actions/download-artifact@v4
with:
name: novel-service-subgraph
path: subgraphs/novel
continue-on-error: true
- name: Download Translation Service subgraph
uses: actions/download-artifact@v4
with:
name: translation-service-subgraph
path: subgraphs/translation
continue-on-error: true
- name: Download Scheduler Service subgraph
uses: actions/download-artifact@v4
with:
name: scheduler-service-subgraph
path: subgraphs/scheduler
continue-on-error: true
- name: Download User Service subgraph
uses: actions/download-artifact@v4
with:
name: user-service-subgraph
path: subgraphs/user
continue-on-error: true
- name: Download File Service subgraph
uses: actions/download-artifact@v4
with:
name: file-service-subgraph
path: subgraphs/file
continue-on-error: true
- name: Configure subgraph URLs for Docker
run: |
for fsp in subgraphs/*/*.fsp; do
if [ -f "$fsp" ]; then
dir=$(dirname "$fsp")
name=$(basename "$dir")
url="http://${name}-service:8080/graphql"
echo "Setting $name URL to $url"
fusion subgraph config set http --url "$url" -c "$fsp"
fi
done
- name: Compose gateway
run: |
cd FictionArchive.API
rm -f gateway.fgp
for fsp in ../subgraphs/*/*.fsp; do
if [ -f "$fsp" ]; then
echo "Composing: $fsp"
fusion compose -p gateway.fgp -s "$fsp"
fi
done
- name: Restore dependencies
run: dotnet restore FictionArchive.API/FictionArchive.API.csproj
- name: Build gateway
run: dotnet build FictionArchive.API/FictionArchive.API.csproj -c Release --no-restore -p:SkipFusionBuild=true
- name: Run tests
run: dotnet test FictionArchive.sln -c Release --no-build --verbosity normal
continue-on-error: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Gitea Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ gitea.actor }}
password: ${{ secrets.REGISTRY_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: FictionArchive.API/Dockerfile
push: true
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ gitea.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -0,0 +1,77 @@
name: Build Subgraphs
on:
push:
branches:
- master
paths:
- 'FictionArchive.Service.*/**'
- 'FictionArchive.Common/**'
- 'FictionArchive.Service.Shared/**'
jobs:
build-subgraphs:
runs-on: ubuntu-latest
strategy:
matrix:
service:
- name: novel-service
project: FictionArchive.Service.NovelService
subgraph: Novel
- name: translation-service
project: FictionArchive.Service.TranslationService
subgraph: Translation
- name: scheduler-service
project: FictionArchive.Service.SchedulerService
subgraph: Scheduler
- name: user-service
project: FictionArchive.Service.UserService
subgraph: User
- name: file-service
project: FictionArchive.Service.FileService
subgraph: File
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: '8.0.x'
- name: Install Fusion CLI
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
- name: Restore dependencies
run: dotnet restore ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj
- name: Build
run: dotnet build ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj -c Release --no-restore
- name: Export schema
run: |
dotnet run -c Release --no-launch-profile \
--project ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj \
-- schema export --output ${{ matrix.service.project }}/schema.graphql
- name: Pack subgraph
run: fusion subgraph pack -w ${{ matrix.service.project }}
- name: Upload subgraph package
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.service.name }}-subgraph
path: ${{ matrix.service.project }}/*.fsp
retention-days: 30
# Trigger gateway build after all subgraphs are built
trigger-gateway:
runs-on: ubuntu-latest
needs: build-subgraphs
steps:
- name: Trigger gateway workflow
run: |
curl -X POST \
-H "Authorization: token ${{ secrets.GITEA_TOKEN }}" \
"${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/actions/workflows/build-gateway.yml/dispatches" \
-d '{"ref":"master"}'

View File

@@ -0,0 +1,62 @@
name: CI
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build-backend:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: '8.0.x'
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Fusion CLI
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
- name: Restore dependencies
run: dotnet restore FictionArchive.sln
- name: Build solution
run: dotnet build FictionArchive.sln --configuration Release --no-restore /p:SkipFusionBuild=true
- name: Run tests
run: dotnet test FictionArchive.sln --configuration Release --no-build --verbosity normal
build-frontend:
runs-on: ubuntu-latest
defaults:
run:
working-directory: fictionarchive-web
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v6.0.0
with:
node-version: '20'
package-manager-cache: false
- name: Install dependencies
run: npm ci
- name: Lint
run: npm run lint
- name: Build
run: npm run build

View File

@@ -0,0 +1,43 @@
name: Claude Assistant for Gitea
on:
# Trigger on issue comments (works on both issues and pull requests in Gitea)
issue_comment:
types: [created]
# Trigger on issues being opened or assigned
issues:
types: [opened, assigned]
# Note: pull_request_review_comment has limited support in Gitea
# Use issue_comment instead which covers PR comments
jobs:
claude-assistant:
# Basic trigger detection - check for @claude in comments or issue body
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || github.event.action == 'assigned'))
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
issues: write
# Note: Gitea Actions may not require id-token: write for basic functionality
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Run Claude Assistant
uses: markwylde/claude-code-gitea-action
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
gitea_token: ${{ secrets.CLAUDE_GITEA_TOKEN }}
timeout_minutes: "60"
trigger_phrase: "@claude"
# Optional: Customize for Gitea environment
custom_instructions: |
You are working in a Gitea environment. Be aware that:
- Some GitHub Actions features may behave differently
- Focus on core functionality and avoid advanced GitHub-specific features
- Use standard git operations when possible

View File

@@ -0,0 +1,98 @@
name: Release
on:
push:
tags:
- 'v*.*.*'
env:
REGISTRY: ${{ gitea.server_url }}
IMAGE_PREFIX: ${{ gitea.repository_owner }}/fictionarchive
jobs:
build-and-push:
runs-on: ubuntu-latest
strategy:
matrix:
service:
- name: api
dockerfile: FictionArchive.API/Dockerfile
- name: novel-service
dockerfile: FictionArchive.Service.NovelService/Dockerfile
- name: user-service
dockerfile: FictionArchive.Service.UserService/Dockerfile
- name: translation-service
dockerfile: FictionArchive.Service.TranslationService/Dockerfile
- name: file-service
dockerfile: FictionArchive.Service.FileService/Dockerfile
- name: scheduler-service
dockerfile: FictionArchive.Service.SchedulerService/Dockerfile
- name: authentication-service
dockerfile: FictionArchive.Service.AuthenticationService/Dockerfile
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Extract version from tag
id: version
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
- name: Log in to Gitea Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ gitea.actor }}
password: ${{ secrets.REGISTRY_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ${{ matrix.service.dockerfile }}
push: true
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:${{ steps.version.outputs.VERSION }}
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:latest
cache-from: type=gha
cache-to: type=gha,mode=max
build-frontend:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Extract version from tag
id: version
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
- name: Log in to Gitea Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ gitea.actor }}
password: ${{ secrets.REGISTRY_TOKEN }}
- name: Build and push frontend Docker image
uses: docker/build-push-action@v6
with:
context: ./fictionarchive-web
file: fictionarchive-web/Dockerfile
push: true
build-args: |
VITE_GRAPHQL_URI=${{ vars.VITE_GRAPHQL_URI }}
VITE_OIDC_AUTHORITY=${{ vars.VITE_OIDC_AUTHORITY }}
VITE_OIDC_CLIENT_ID=${{ vars.VITE_OIDC_CLIENT_ID }}
VITE_OIDC_REDIRECT_URI=${{ vars.VITE_OIDC_REDIRECT_URI }}
VITE_OIDC_POST_LOGOUT_REDIRECT_URI=${{ vars.VITE_OIDC_POST_LOGOUT_REDIRECT_URI }}
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-frontend:${{ steps.version.outputs.VERSION }}
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-frontend:latest
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -0,0 +1,405 @@
# FictionArchive Architecture Overview
## High-Level Architecture
```
┌────────────────────────────────────────────────────────────────┐
│ React 19 Frontend │
│ (Apollo Client, TailwindCSS, OIDC Auth) │
└───────────────────────────┬────────────────────────────────────┘
│ GraphQL
┌────────────────────────────────────────────────────────────────┐
│ Hot Chocolate Fusion Gateway │
│ (FictionArchive.API) │
└──────┬────────┬────────┬────────┬────────┬─────────────────────┘
│ │ │ │ │
▼ ▼ ▼ ▼ ▼
┌──────────┐┌──────────┐┌───────────┐┌──────────┐┌──────────────┐
│ Novel ││ User ││Translation││Scheduler ││ File │
│ Service ││ Service ││ Service ││ Service ││ Service │
└────┬─────┘└────┬─────┘└─────┬─────┘└────┬─────┘└──────┬───────┘
│ │ │ │ │
└───────────┴────────────┴───────────┴─────────────┘
┌────────┴────────┐
│ RabbitMQ │
│ (Event Bus) │
└─────────────────┘
┌────────┴────────┐
│ PostgreSQL │
│ (per service) │
└─────────────────┘
```
## Technology Stack
| Layer | Technology | Version |
|-------|------------|---------|
| Runtime | .NET | 8.0 |
| GraphQL | Hot Chocolate / Fusion | 13+ |
| Database | PostgreSQL | 12+ |
| ORM | Entity Framework Core | 8.0 |
| Message Broker | RabbitMQ | 3.12+ |
| Job Scheduler | Quartz.NET | Latest |
| Object Storage | AWS S3 / Garage | - |
| Date/Time | NodaTime | Latest |
| Frontend | React | 19.2 |
| Frontend Build | Vite | 7.2 |
| GraphQL Client | Apollo Client | 4.0 |
| Auth | OIDC Client TS | 3.4 |
| Styling | TailwindCSS | 3.4 |
| UI Components | Radix UI | Latest |
## Project Structure
```
FictionArchive.sln
├── FictionArchive.Common # Shared enums and extensions
├── FictionArchive.API # GraphQL Fusion Gateway
├── FictionArchive.Service.Shared # Shared infrastructure
├── FictionArchive.Service.NovelService
├── FictionArchive.Service.UserService
├── FictionArchive.Service.TranslationService
├── FictionArchive.Service.FileService
├── FictionArchive.Service.SchedulerService
├── FictionArchive.Service.AuthenticationService
├── FictionArchive.Service.NovelService.Tests
└── fictionarchive-web # React frontend
```
## Services
### FictionArchive.API - GraphQL Fusion Gateway
- **Role**: Single entry point for all GraphQL queries
- **Port**: 5001 (HTTPS)
- **Endpoints**:
- `/graphql` - GraphQL endpoint
- `/healthz` - Health check
- **Responsibilities**:
- Compose GraphQL schemas from all subgraphs
- Route queries to appropriate services
- CORS policy management
### FictionArchive.Service.NovelService
- **Role**: Novel/fiction content management
- **Port**: 8081 (HTTPS)
- **Database**: `FictionArchive_NovelService`
- **GraphQL Operations**:
- `GetNovels` - Paginated, filterable novel listing
- `ImportNovel` - Trigger novel import
- `FetchChapterContents` - Fetch chapter content
- **Models**: Novel, Chapter, Source, NovelTag, Image, LocalizationKey
- **External Integration**: Novelpia adapter
- **Events Published**: `TranslationRequestCreatedEvent`, `FileUploadRequestCreatedEvent`
- **Events Subscribed**: `TranslationRequestCompletedEvent`, `NovelUpdateRequestedEvent`, `ChapterPullRequestedEvent`, `FileUploadRequestStatusUpdateEvent`
### FictionArchive.Service.UserService
- **Role**: User identity and profile management
- **Port**: 8081 (HTTPS)
- **Database**: `FictionArchive_UserService`
- **Models**: User (with OAuth provider linking)
- **Events Subscribed**: `AuthUserAddedEvent`
### FictionArchive.Service.TranslationService
- **Role**: Text translation orchestration
- **Port**: 8081 (HTTPS)
- **Database**: `FictionArchive_TranslationService`
- **External Integration**: DeepL API
- **Models**: TranslationRequest
- **Events Published**: `TranslationRequestCompletedEvent`
- **Events Subscribed**: `TranslationRequestCreatedEvent`
### FictionArchive.Service.FileService
- **Role**: File storage and S3 proxy
- **Port**: 8080 (HTTP)
- **Protocol**: REST only (not GraphQL)
- **Endpoints**: `GET /api/{*path}` - S3 file proxy
- **External Integration**: S3-compatible storage (AWS S3 / Garage)
- **Events Published**: `FileUploadRequestStatusUpdateEvent`
- **Events Subscribed**: `FileUploadRequestCreatedEvent`
### FictionArchive.Service.SchedulerService
- **Role**: Job scheduling and automation
- **Port**: 8081 (HTTPS)
- **Database**: `FictionArchive_SchedulerService`
- **Scheduler**: Quartz.NET with persistent job store
- **GraphQL Operations**: `ScheduleEventJob`, `GetScheduledJobs`
- **Models**: SchedulerJob, EventJobTemplate
### FictionArchive.Service.AuthenticationService
- **Role**: OAuth/OIDC webhook receiver
- **Port**: 8080 (HTTP)
- **Protocol**: REST only
- **Endpoints**: `POST /api/AuthenticationWebhook/UserRegistered`
- **Events Published**: `AuthUserAddedEvent`
- **No Database** - Stateless webhook handler
## Communication Patterns
### GraphQL Federation
- Hot Chocolate Fusion Gateway composes subgraph schemas
- Schema export automated via `build_gateway.py`
- Each service defines its own Query/Mutation types
### Event-Driven Architecture (RabbitMQ)
- Direct exchange: `fiction-archive-event-bus`
- Per-service queues based on `ClientIdentifier`
- Routing key = event class name
- Headers: `X-Created-At`, `X-Event-Id`
- NodaTime JSON serialization
### Event Flow Examples
**Novel Import:**
```
1. Frontend → importNovel mutation
2. NovelService publishes NovelUpdateRequestedEvent
3. NovelUpdateRequestedEventHandler processes
4. Fetches metadata via NovelpiaAdapter
5. Publishes FileUploadRequestCreatedEvent (for cover)
6. FileService uploads to S3
7. FileService publishes FileUploadRequestStatusUpdateEvent
8. NovelService updates image path
```
**Translation:**
```
1. NovelService publishes TranslationRequestCreatedEvent
2. TranslationService translates via DeepL
3. TranslationService publishes TranslationRequestCompletedEvent
4. NovelService updates chapter translation
```
## Data Storage
### Database Pattern
- Database per service (PostgreSQL)
- Connection string format: `Host=localhost;Database=FictionArchive_{ServiceName};...`
- Auto-migration on startup via `dbContext.UpdateDatabase()`
### Audit Trail
- `AuditInterceptor` auto-sets `CreatedTime` and `LastUpdatedTime`
- `IAuditable` interface with NodaTime `Instant` fields
- `BaseEntity<TKey>` abstract base class
### Object Storage
- S3-compatible (AWS S3 or Garage)
- Path-style URLs for Garage compatibility
- Proxied through FileService
## Frontend Architecture
### Structure
```
fictionarchive-web/
├── src/
│ ├── auth/ # OIDC authentication
│ ├── components/ # React components
│ │ └── ui/ # Radix-based primitives
│ ├── pages/ # Route pages
│ ├── layouts/ # Layout components
│ ├── graphql/ # GraphQL queries
│ ├── __generated__/ # Codegen output
│ └── lib/ # Utilities
└── codegen.ts # GraphQL Codegen config
```
### Authentication
- OIDC via `oidc-client-ts`
- Environment variables for configuration
- `useAuth` hook for state access
### State Management
- Apollo Client for GraphQL state
- React Context for auth state
## Infrastructure
### Docker
- Multi-stage builds
- Base: `mcr.microsoft.com/dotnet/aspnet:8.0`
- Non-root user for security
- Ports: 8080 (HTTP) or 8081 (HTTPS)
### Health Checks
- All services expose `/healthz`
### Configuration
- `appsettings.json` - Default settings
- `appsettings.Development.json` - Dev overrides
- `appsettings.Local.json` - Local secrets (not committed)
---
# Improvement Recommendations
## Critical
### 1. Event Bus - No Dead Letter Queue or Retry Logic
**Location**: `FictionArchive.Service.Shared/Services/EventBus/Implementations/RabbitMQEventBus.cs:126-133`
**Issue**: Events are always ACK'd even on failure. No DLQ configuration for poison messages. Failed events are lost forever.
**Recommendation**: Implement retry with exponential backoff, dead-letter exchange, and poison message handling.
```csharp
// Example: Add retry and DLQ
catch (Exception e)
{
_logger.LogError(e, "Error handling event");
if (retryCount < maxRetries)
{
await channel.BasicNackAsync(@event.DeliveryTag, false, true); // requeue
}
else
{
// Send to DLQ
await channel.BasicNackAsync(@event.DeliveryTag, false, false);
}
}
```
### 2. CORS Configuration is Insecure
**Location**: `FictionArchive.API/Program.cs:24-33`
**Issue**: `AllowAnyOrigin()` allows requests from any domain, unsuitable for production.
**Recommendation**: Configure specific allowed origins via appsettings:
```csharp
builder.Services.AddCors(options =>
{
options.AddPolicy("Production", policy =>
{
policy.WithOrigins(builder.Configuration.GetSection("Cors:AllowedOrigins").Get<string[]>())
.AllowAnyMethod()
.AllowAnyHeader();
});
});
```
### 3. Auto-Migration on Startup
**Location**: `FictionArchive.Service.Shared/Services/Database/FictionArchiveDbContext.cs:23-38`
**Issue**: Running migrations at startup can cause race conditions with multiple instances and potential data corruption during rolling deployments.
**Recommendation**: Use a migration job, init container, or CLI tool instead of startup code.
## Important
### 4. No Circuit Breaker Pattern
**Issue**: External service calls (DeepL, Novelpia, S3) lack resilience patterns.
**Recommendation**: Add Polly for circuit breaker, retry, and timeout policies:
```csharp
builder.Services.AddHttpClient<ISourceAdapter, NovelpiaAdapter>()
.AddPolicyHandler(GetRetryPolicy())
.AddPolicyHandler(GetCircuitBreakerPolicy());
```
### 5. Missing Request Validation/Rate Limiting
**Issue**: No visible rate limiting on GraphQL mutations. `ImportNovel` could be abused.
**Recommendation**: Add rate limiting middleware and input validation.
### 6. Hardcoded Exchange Name
**Location**: `RabbitMQEventBus.cs:24`
**Issue**: `fiction-archive-event-bus` is hardcoded.
**Recommendation**: Move to configuration for environment flexibility.
### 7. No Distributed Tracing
**Issue**: Event correlation exists (`X-Event-Id` header) but not integrated with tracing.
**Recommendation**: Add OpenTelemetry for end-to-end request tracing across services.
### 8. Singleton AuditInterceptor
**Location**: `FictionArchiveDbContext.cs:20`
**Issue**: `new AuditInterceptor()` created per DbContext instance.
**Recommendation**: Register as singleton in DI and inject.
## Minor / Code Quality
### 9. Limited Test Coverage
**Issue**: Only `NovelService.Tests` exists. No integration tests for event handlers.
**Recommendation**: Add unit and integration tests for each service, especially event handlers.
### 10. Inconsistent Port Configuration
**Issue**: Some services use 8080 (HTTP), others 8081 (HTTPS).
**Recommendation**: Standardize on HTTPS with proper cert management.
### 11. No API Versioning
**Issue**: GraphQL schemas have no versioning strategy.
**Recommendation**: Consider schema versioning or deprecation annotations for breaking changes.
### 12. Frontend - No Error Boundary
**Issue**: React app lacks error boundaries for graceful failure handling.
**Recommendation**: Add React Error Boundaries around routes.
### 13. Missing Health Check Depth
**Issue**: Health checks only verify service is running, not dependencies.
**Recommendation**: Add database, RabbitMQ, and S3 health checks:
```csharp
builder.Services.AddHealthChecks()
.AddNpgSql(connectionString)
.AddRabbitMQ()
.AddS3(options => { });
```
### 14. Synchronous File Operations in Event Handlers
**Issue**: File uploads may block event handling thread for large files.
**Recommendation**: Consider async streaming for large files.
## Architectural Suggestions
### 15. Consider Outbox Pattern
**Issue**: Publishing events and saving to DB aren't transactional, could lead to inconsistent state.
**Recommendation**: Implement transactional outbox pattern for guaranteed delivery:
```
1. Save entity + outbox message in same transaction
2. Background worker publishes from outbox
3. Delete outbox message after successful publish
```
### 16. Gateway Schema Build Process
**Issue**: Python script (`build_gateway.py`) for schema composition requires manual execution.
**Recommendation**: Integrate into CI/CD pipeline or consider runtime schema polling.
### 17. Secret Management
**Issue**: Credentials in appsettings files.
**Recommendation**: Use Azure Key Vault, AWS Secrets Manager, HashiCorp Vault, or similar secret management solution.
---
## Key Files Reference
| File | Purpose |
|------|---------|
| `FictionArchive.API/Program.cs` | Gateway setup |
| `FictionArchive.API/build_gateway.py` | Schema composition script |
| `FictionArchive.Service.Shared/Services/EventBus/` | Event bus implementation |
| `FictionArchive.Service.Shared/Extensions/` | Service registration helpers |
| `FictionArchive.Service.Shared/Services/Database/` | DB infrastructure |
| `fictionarchive-web/src/auth/AuthContext.tsx` | Frontend auth state |

220
Documentation/CICD.md Normal file
View File

@@ -0,0 +1,220 @@
# CI/CD Configuration
This document describes the CI/CD pipeline configuration for FictionArchive using Gitea Actions.
## Workflows Overview
| Workflow | File | Trigger | Purpose |
|----------|------|---------|---------|
| CI | `build.yml` | Push/PR to master | Build and test all projects |
| Build Subgraphs | `build-subgraphs.yml` | Push to master (service changes) | Build GraphQL subgraph packages |
| Build Gateway | `build-gateway.yml` | Manual or triggered by subgraphs | Compose gateway and build Docker image |
| Release | `release.yml` | Tag `v*.*.*` | Build and push all Docker images |
## Pipeline Architecture
```
┌─────────────────────────────────────────────────────────────────────┐
│ Push to master │
└─────────────────────────────┬───────────────────────────────────────┘
┌───────────────┴───────────────┐
▼ ▼
┌─────────────────────────┐ ┌─────────────────────────┐
│ build.yml │ │ build-subgraphs.yml │
│ (CI checks - always) │ │ (if service changes) │
└─────────────────────────┘ └────────────┬────────────┘
┌─────────────────────────┐
│ build-gateway.yml │
│ (compose & push API) │
└─────────────────────────┘
┌─────────────────────────────────────────────────────────────────────┐
│ Push tag v*.*.* │
└─────────────────────────────┬───────────────────────────────────────┘
┌─────────────────────────┐
│ release.yml │
│ (build & push all) │
└─────────────────────────┘
```
## Required Configuration
### Repository Secrets
Configure these in **Settings → Actions → Secrets**:
| Secret | Description | Required By |
|--------|-------------|-------------|
| `REGISTRY_TOKEN` | Gitea access token with `write:package` scope | `release.yml`, `build-gateway.yml` |
| `GITEA_TOKEN` | Gitea access token for API calls | `build-subgraphs.yml` |
#### Creating Access Tokens
1. Go to **Settings → Applications → Access Tokens**
2. Create a new token with the following scopes:
- `write:package` - Push container images
- `write:repository` - Trigger workflows via API
3. Copy the token and add it as a repository secret
### Repository Variables
Configure these in **Settings → Actions → Variables**:
| Variable | Description | Example | Required By |
|----------|-------------|---------|-------------|
| `VITE_GRAPHQL_URI` | GraphQL API endpoint URL | `https://api.fictionarchive.example.com/graphql/` | `release.yml` |
| `VITE_OIDC_AUTHORITY` | OIDC provider authority URL | `https://auth.example.com/application/o/fiction-archive/` | `release.yml` |
| `VITE_OIDC_CLIENT_ID` | OIDC client identifier | `your-client-id` | `release.yml` |
| `VITE_OIDC_REDIRECT_URI` | Post-login redirect URL | `https://fictionarchive.example.com/` | `release.yml` |
| `VITE_OIDC_POST_LOGOUT_REDIRECT_URI` | Post-logout redirect URL | `https://fictionarchive.example.com/` | `release.yml` |
## Workflow Details
### CI (`build.yml`)
**Trigger:** Push or pull request to `master`
**Jobs:**
1. `build-backend` - Builds .NET solution and runs tests
2. `build-frontend` - Builds React application with linting
**Requirements:**
- .NET 8.0 SDK
- Python 3.12
- Node.js 20
- HotChocolate Fusion CLI
### Build Subgraphs (`build-subgraphs.yml`)
**Trigger:** Push to `master` with changes in:
- `FictionArchive.Service.*/**`
- `FictionArchive.Common/**`
- `FictionArchive.Service.Shared/**`
**Jobs:**
1. `build-subgraphs` - Matrix job building each service's `.fsp` package
2. `trigger-gateway` - Triggers gateway rebuild via API
**Subgraphs Built:**
- Novel Service
- Translation Service
- Scheduler Service
- User Service
- File Service
**Artifacts:** Each subgraph produces a `.fsp` file retained for 30 days.
### Build Gateway (`build-gateway.yml`)
**Trigger:**
- Manual dispatch (`workflow_dispatch`)
- Push to `master` with changes in `FictionArchive.API/**`
- Triggered by `build-subgraphs.yml` completion
**Process:**
1. Downloads all subgraph `.fsp` artifacts
2. Configures Docker-internal URLs for each subgraph
3. Composes gateway schema using Fusion CLI
4. Builds and pushes API Docker image
**Image Tags:**
- `<registry>/<owner>/fictionarchive-api:latest`
- `<registry>/<owner>/fictionarchive-api:<commit-sha>`
### Release (`release.yml`)
**Trigger:** Push tag matching `v*.*.*` (e.g., `v1.0.0`)
**Jobs:**
1. `build-and-push` - Matrix job building all backend service images
2. `build-frontend` - Builds and pushes frontend image
**Services Built:**
- `fictionarchive-api`
- `fictionarchive-novel-service`
- `fictionarchive-user-service`
- `fictionarchive-translation-service`
- `fictionarchive-file-service`
- `fictionarchive-scheduler-service`
- `fictionarchive-authentication-service`
- `fictionarchive-frontend`
**Image Tags:**
- `<registry>/<owner>/fictionarchive-<service>:<version>`
- `<registry>/<owner>/fictionarchive-<service>:latest`
## Container Registry
Images are pushed to the Gitea Container Registry at:
```
<gitea-server-url>/<repository-owner>/fictionarchive-<service>:<tag>
```
### Pulling Images
```bash
# Login to registry
docker login <gitea-server-url> -u <username> -p <token>
# Pull an image
docker pull <gitea-server-url>/<owner>/fictionarchive-api:latest
```
## Creating a Release
1. Ensure all changes are committed and pushed to `master`
2. Create and push a version tag:
```bash
git tag v1.0.0
git push origin v1.0.0
```
3. The release workflow will automatically build and push all images
4. Monitor progress in **Actions** tab
## Troubleshooting
### Build Failures
**"REGISTRY_TOKEN secret not found"**
- Ensure the `REGISTRY_TOKEN` secret is configured in repository settings
- Verify the token has `write:package` scope
**"Failed to trigger gateway workflow"**
- Ensure `GITEA_TOKEN` secret is configured
- Verify the token has `write:repository` scope
**"No subgraph artifacts found"**
- The gateway build requires subgraph artifacts from a previous `build-subgraphs` run
- Trigger `build-subgraphs.yml` manually or push a change to a service
### Frontend Build Failures
**"VITE_* variables are empty"**
- Ensure all required variables are configured in repository settings
- Variables use `vars.*` context, not `secrets.*`
### Docker Push Failures
**"unauthorized: authentication required"**
- Verify `REGISTRY_TOKEN` has correct permissions
- Check that the token hasn't expired
## Local Testing
To test workflows locally before pushing:
```bash
# Install act (GitHub Actions local runner)
# Note: act has partial Gitea Actions compatibility
# Run CI workflow
act push -W .gitea/workflows/build.yml
# Run with specific event
act push --eventpath .gitea/test-event.json
```

187
Documentation/README.md Normal file
View File

@@ -0,0 +1,187 @@
# FictionArchive
A distributed microservices-based web application for managing fiction and novel content. Features include importing from external sources, multi-language translation, file storage, and user management.
## Architecture
FictionArchive uses a GraphQL Fusion gateway pattern to orchestrate multiple domain services with event-driven communication via RabbitMQ.
More information available in [ARCHITECTURE.md](ARCHITECTURE.md)
## Prerequisites
- .NET SDK 8.0+
- Node.js 20+
- Python 3 (for gateway build script)
- Docker & Docker Compose
- PostgreSQL 16+
- RabbitMQ 3+
**Required CLI Tools**
```bash
# Hot Chocolate Fusion CLI
dotnet tool install -g HotChocolate.Fusion.CommandLine
```
## Getting Started
### Local Development
1. **Clone the repository**
```bash
git clone <repository-url>
cd FictionArchive
```
2. **Start infrastructure** (PostgreSQL, RabbitMQ)
```bash
docker compose up -d postgres rabbitmq
```
3. **Build and run backend**
```bash
dotnet restore
dotnet build FictionArchive.sln
# Start services (in separate terminals or use a process manager)
dotnet run --project FictionArchive.Service.NovelService
dotnet run --project FictionArchive.Service.UserService
dotnet run --project FictionArchive.Service.TranslationService
dotnet run --project FictionArchive.Service.FileService
dotnet run --project FictionArchive.Service.SchedulerService
dotnet run --project FictionArchive.Service.AuthenticationService
# Start the gateway (builds fusion schema automatically)
dotnet run --project FictionArchive.API
```
4. **Build and run frontend**
```bash
cd fictionarchive-web
npm install
npm run codegen # Generate GraphQL types
npm run dev # Start dev server at http://localhost:5173
```
### Docker Deployment
1. **Create environment file**
```bash
cp .env.example .env
# Edit .env with your configuration
```
2. **Start all services**
```bash
docker compose up -d
```
## Configuration
### Environment Variables
Create a `.env` file in the project root:
```bash
# PostgreSQL
POSTGRES_USER=postgres
POSTGRES_PASSWORD=your-secure-password
# RabbitMQ
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=your-secure-password
# External Services
NOVELPIA_USERNAME=your-username
NOVELPIA_PASSWORD=your-password
DEEPL_API_KEY=your-api-key
# S3 Storage
S3_ENDPOINT=https://s3.example.com
S3_BUCKET=fictionarchive
S3_ACCESS_KEY=your-access-key
S3_SECRET_KEY=your-secret-key
# OIDC Authentication
OIDC_AUTHORITY=https://auth.example.com/application/o/fiction-archive/
OIDC_CLIENT_ID=your-client-id
```
### Frontend Environment
Create `fictionarchive-web/.env.local`:
```bash
VITE_GRAPHQL_URI=http://localhost:5234/graphql/
VITE_OIDC_AUTHORITY=https://auth.example.com/application/o/fiction-archive/
VITE_OIDC_CLIENT_ID=your-client-id
VITE_OIDC_REDIRECT_URI=http://localhost:5173/
VITE_OIDC_POST_LOGOUT_REDIRECT_URI=http://localhost:5173/
```
## Building the GraphQL Gateway
The API gateway uses Hot Chocolate Fusion to compose schemas from all subgraphs. The gateway schema is rebuilt automatically when building the API project.
**Manual rebuild:**
```bash
cd FictionArchive.API
python build_gateway.py
```
**Skip specific services** by adding them to `FictionArchive.API/gateway_skip.txt`:
```
FictionArchive.Service.NovelService.Tests
```
## CI/CD
The project uses Gitea Actions with the following workflows:
| Workflow | Trigger | Description |
|----------|---------|-------------|
| `build.yml` | Push/PR to master | CI checks - builds and tests |
| `build-subgraphs.yml` | Service changes on master | Builds subgraph `.fsp` packages |
| `build-gateway.yml` | Gateway changes or subgraph builds | Composes gateway and builds Docker image |
| `release.yml` | Tag `v*.*.*` | Builds and pushes all Docker images |
### Release Process
```bash
git tag v1.0.0
git push origin v1.0.0
```
## Project Structure
```
FictionArchive/
├── FictionArchive.sln
├── FictionArchive.Common/ # Shared enums and extensions
├── FictionArchive.Service.Shared/ # Shared infrastructure (EventBus, DB)
├── FictionArchive.API/ # GraphQL Fusion Gateway
├── FictionArchive.Service.NovelService/
├── FictionArchive.Service.UserService/
├── FictionArchive.Service.TranslationService/
├── FictionArchive.Service.FileService/
├── FictionArchive.Service.SchedulerService/
├── FictionArchive.Service.AuthenticationService/
├── FictionArchive.Service.NovelService.Tests/
├── fictionarchive-web/ # React frontend
├── docker-compose.yml
└── .gitea/workflows/ # CI/CD workflows
```
## Testing
```bash
# Run all tests
dotnet test FictionArchive.sln
# Run specific test project
dotnet test FictionArchive.Service.NovelService.Tests
```
## Documentation
- [ARCHITECTURE.md](ARCHITECTURE.md) - Detailed architecture documentation
- [AGENTS.md](AGENTS.md) - Development guidelines and coding standards

View File

@@ -7,15 +7,23 @@ EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["FictionArchive.API/FictionArchive.API.csproj", "FictionArchive.API/"]
COPY ["FictionArchive.Common/FictionArchive.Common.csproj", "FictionArchive.Common/"]
COPY ["FictionArchive.Service.Shared/FictionArchive.Service.Shared.csproj", "FictionArchive.Service.Shared/"]
RUN dotnet restore "FictionArchive.API/FictionArchive.API.csproj"
COPY . .
COPY FictionArchive.API/ FictionArchive.API/
COPY FictionArchive.Common/ FictionArchive.Common/
COPY FictionArchive.Service.Shared/ FictionArchive.Service.Shared/
WORKDIR "/src/FictionArchive.API"
RUN dotnet build "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/build
# Skip fusion build - gateway.fgp should be pre-composed in CI
RUN dotnet build "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/build -p:SkipFusionBuild=true
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
RUN dotnet publish "./FictionArchive.API.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false /p:SkipFusionBuild=true
FROM base AS final
WORKDIR /app

View File

@@ -22,9 +22,9 @@
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2"/>
</ItemGroup>
<!-- Builds the Fusion graph file before building the application itself -->
<Target Name="RunFusionBuild" BeforeTargets="BeforeBuild">
<Exec Command="python build_gateway.py" WorkingDirectory="$(ProjectDir)" />
<!-- Builds the Fusion graph file before building the application itself (skipped in CI) -->
<Target Name="RunFusionBuild" BeforeTargets="BeforeBuild" Condition="'$(SkipFusionBuild)' != 'true'">
<Exec Command="python build_gateway.py $(FusionBuildArgs)" WorkingDirectory="$(ProjectDir)" />
</Target>
<ItemGroup>

View File

@@ -1,12 +1,22 @@
#!/usr/bin/env python3
"""
Local development script for building the Fusion gateway.
This script is used for local development only. In CI/CD, subgraphs are built
separately and the gateway is composed from pre-built .fsp artifacts.
Usage:
python build_gateway.py
Requirements:
- .NET 8.0 SDK
- HotChocolate Fusion CLI (dotnet tool install -g HotChocolate.Fusion.CommandLine)
"""
import subprocess
import sys
import os
from pathlib import Path
# ----------------------------------------
# Helpers
# ----------------------------------------
def run(cmd, cwd=None):
"""Run a command and exit on failure."""
@@ -19,7 +29,7 @@ def run(cmd, cwd=None):
def load_skip_list(skip_file: Path):
if not skip_file.exists():
print(f"WARNING: skip-projects.txt not found at {skip_file}")
print(f"WARNING: gateway_skip.txt not found at {skip_file}")
return set()
lines = skip_file.read_text().splitlines()
@@ -53,7 +63,7 @@ print("----------------------------------------")
service_dirs = [
d for d in services_dir.glob("FictionArchive.Service.*")
if d.is_dir()
if d.is_dir() and (d / "subgraph-config.json").exists()
]
selected_services = []

View File

@@ -9,14 +9,8 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="9.0.11" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="9.0.11" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="9.0.0" />
<PackageReference Include="NodaTime" Version="3.2.2" />
</ItemGroup>
<ItemGroup>
<Reference Include="Microsoft.Extensions.Configuration.Abstractions" />
<Reference Include="Microsoft.Extensions.Hosting.Abstractions">
<HintPath>..\..\..\..\..\..\Program Files\dotnet\shared\Microsoft.AspNetCore.App\8.0.15\Microsoft.Extensions.Hosting.Abstractions.dll</HintPath>
</Reference>
</ItemGroup>
</Project>

10
README.md Normal file
View File

@@ -0,0 +1,10 @@
# FictionArchive
A distributed microservices-based web application for managing fiction and novel content.
## Documentation
- [README](Documentation/README.md) - Getting started and project overview
- [ARCHITECTURE](Documentation/ARCHITECTURE.md) - System architecture and design
- [CICD](Documentation/CICD.md) - CI/CD pipeline configuration
- [AGENTS](Documentation/AGENTS.md) - Development guidelines and coding standards

177
docker-compose.yml Normal file
View File

@@ -0,0 +1,177 @@
services:
# ===========================================
# Infrastructure
# ===========================================
postgres:
image: postgres:16-alpine
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 5s
timeout: 5s
retries: 5
restart: unless-stopped
rabbitmq:
image: rabbitmq:3-management-alpine
environment:
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER:-guest}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD:-guest}
volumes:
- rabbitmq_data:/var/lib/rabbitmq
healthcheck:
test: ["CMD", "rabbitmq-diagnostics", "check_running"]
interval: 10s
timeout: 5s
retries: 5
restart: unless-stopped
# ===========================================
# Backend Services
# ===========================================
novel-service:
build:
context: .
dockerfile: FictionArchive.Service.NovelService/Dockerfile
environment:
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_NovelService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
Novelpia__Username: ${NOVELPIA_USERNAME}
Novelpia__Password: ${NOVELPIA_PASSWORD}
NovelUpdateService__PendingImageUrl: https://files.fictionarchive.orfl.xyz/api/pendingupload.png
depends_on:
postgres:
condition: service_healthy
rabbitmq:
condition: service_healthy
restart: unless-stopped
translation-service:
build:
context: .
dockerfile: FictionArchive.Service.TranslationService/Dockerfile
environment:
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_TranslationService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
DeepL__ApiKey: ${DEEPL_API_KEY}
depends_on:
postgres:
condition: service_healthy
rabbitmq:
condition: service_healthy
restart: unless-stopped
scheduler-service:
build:
context: .
dockerfile: FictionArchive.Service.SchedulerService/Dockerfile
environment:
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_SchedulerService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
depends_on:
postgres:
condition: service_healthy
rabbitmq:
condition: service_healthy
restart: unless-stopped
user-service:
build:
context: .
dockerfile: FictionArchive.Service.UserService/Dockerfile
environment:
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_UserService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
depends_on:
postgres:
condition: service_healthy
rabbitmq:
condition: service_healthy
restart: unless-stopped
authentication-service:
build:
context: .
dockerfile: FictionArchive.Service.AuthenticationService/Dockerfile
environment:
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
depends_on:
rabbitmq:
condition: service_healthy
restart: unless-stopped
file-service:
build:
context: .
dockerfile: FictionArchive.Service.FileService/Dockerfile
environment:
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
S3__Endpoint: ${S3_ENDPOINT:-https://s3.orfl.xyz}
S3__Bucket: ${S3_BUCKET:-fictionarchive}
S3__AccessKey: ${S3_ACCESS_KEY}
S3__SecretKey: ${S3_SECRET_KEY}
Proxy__BaseUrl: https://files.orfl.xyz/api
labels:
- "traefik.enable=true"
- "traefik.http.routers.file-service.rule=Host(`files.orfl.xyz`)"
- "traefik.http.routers.file-service.entrypoints=websecure"
- "traefik.http.routers.file-service.tls.certresolver=letsencrypt"
- "traefik.http.services.file-service.loadbalancer.server.port=8080"
depends_on:
rabbitmq:
condition: service_healthy
restart: unless-stopped
# ===========================================
# API Gateway
# ===========================================
api-gateway:
build:
context: .
dockerfile: FictionArchive.API/Dockerfile
environment:
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
labels:
- "traefik.enable=true"
- "traefik.http.routers.api-gateway.rule=Host(`api.fictionarchive.orfl.xyz`)"
- "traefik.http.routers.api-gateway.entrypoints=websecure"
- "traefik.http.routers.api-gateway.tls.certresolver=letsencrypt"
- "traefik.http.services.api-gateway.loadbalancer.server.port=8080"
depends_on:
- novel-service
- translation-service
- scheduler-service
- user-service
- authentication-service
- file-service
restart: unless-stopped
# ===========================================
# Frontend
# ===========================================
frontend:
build:
context: ./fictionarchive-web
dockerfile: Dockerfile
args:
VITE_GRAPHQL_URI: https://api.fictionarchive.orfl.xyz/graphql/
VITE_OIDC_AUTHORITY: ${OIDC_AUTHORITY:-https://auth.orfl.xyz/application/o/fiction-archive/}
VITE_OIDC_CLIENT_ID: ${OIDC_CLIENT_ID}
VITE_OIDC_REDIRECT_URI: https://fictionarchive.orfl.xyz/
VITE_OIDC_POST_LOGOUT_REDIRECT_URI: https://fictionarchive.orfl.xyz/
labels:
- "traefik.enable=true"
- "traefik.http.routers.frontend.rule=Host(`fictionarchive.orfl.xyz`)"
- "traefik.http.routers.frontend.entrypoints=websecure"
- "traefik.http.routers.frontend.tls.certresolver=letsencrypt"
- "traefik.http.services.frontend.loadbalancer.server.port=80"
restart: unless-stopped
volumes:
postgres_data:
rabbitmq_data:
letsencrypt:

View File

@@ -12,9 +12,6 @@ dist
dist-ssr
*.local
# Generated GraphQL artifacts
src/__generated__/
# Editor directories and files
.vscode/*
!.vscode/extensions.json

View File

@@ -0,0 +1,32 @@
FROM node:20-alpine AS build
WORKDIR /app
# Build arguments for Vite environment variables
ARG VITE_GRAPHQL_URI
ARG VITE_OIDC_AUTHORITY
ARG VITE_OIDC_CLIENT_ID
ARG VITE_OIDC_REDIRECT_URI
ARG VITE_OIDC_POST_LOGOUT_REDIRECT_URI
# Set environment variables for build
ENV VITE_GRAPHQL_URI=$VITE_GRAPHQL_URI
ENV VITE_OIDC_AUTHORITY=$VITE_OIDC_AUTHORITY
ENV VITE_OIDC_CLIENT_ID=$VITE_OIDC_CLIENT_ID
ENV VITE_OIDC_REDIRECT_URI=$VITE_OIDC_REDIRECT_URI
ENV VITE_OIDC_POST_LOGOUT_REDIRECT_URI=$VITE_OIDC_POST_LOGOUT_REDIRECT_URI
COPY package*.json ./
RUN npm ci
COPY . .
RUN npm run build
FROM nginx:alpine
COPY --from=build /app/dist /usr/share/nginx/html
COPY nginx.conf /etc/nginx/conf.d/default.conf
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]

View File

@@ -25,8 +25,8 @@ VITE_CODEGEN_TOKEN=your_api_token
## Scripts
- `npm run dev`: start Vite dev server.
- `npm run build`: type-check + build (runs codegen first via `prebuild`).
- `npm run codegen`: generate typed hooks from `src/**/*.graphql` into `src/__generated__/graphql.ts`.
- `npm run build`: type-check + production build.
- `npm run codegen`: generate typed hooks from `src/**/*.graphql` into `src/__generated__/graphql.ts`. **Run this manually after changing GraphQL operations or when the gateway schema changes.**
## Project notes
@@ -39,4 +39,4 @@ VITE_CODEGEN_TOKEN=your_api_token
- Default schema URL: `CODEGEN_SCHEMA_URL` (falls back to `VITE_GRAPHQL_URI`, then `https://localhost:5001/graphql`).
- Add `VITE_CODEGEN_TOKEN` (or `CODEGEN_TOKEN`) if your gateway requires a bearer token during introspection.
- Generated outputs land in `src/__generated__/graphql.ts` (git-ignored). Run `npm run codegen` after schema/operation changes or rely on `npm run build` (runs `prebuild`).
- Generated outputs land in `src/__generated__/graphql.ts` (committed to git). Run `npm run codegen` after schema/operation changes.

View File

@@ -22,15 +22,17 @@ const config: CodegenConfig = {
plugins: [
'typescript',
'typescript-operations',
'typescript-react-apollo',
'typed-document-node',
],
config: {
withHooks: true,
avoidOptionals: true,
dedupeFragments: true,
avoidOptionals: {
field: true,
inputValue: false,
},
enumsAsConst: true,
maybeValue: 'T | null',
skipTypename: true,
apolloReactHooksImportFrom: '@apollo/client/react',
useTypeImports: true,
},
},
},

View File

@@ -6,7 +6,7 @@ import tseslint from 'typescript-eslint'
import { defineConfig, globalIgnores } from 'eslint/config'
export default defineConfig([
globalIgnores(['dist']),
globalIgnores(['dist', 'src/__generated__']),
{
files: ['**/*.{ts,tsx}'],
extends: [

View File

@@ -0,0 +1,21 @@
server {
listen 80;
server_name localhost;
root /usr/share/nginx/html;
index index.html;
# Gzip compression
gzip on;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
# Handle SPA routing - serve index.html for all routes
location / {
try_files $uri $uri/ /index.html;
}
# Cache static assets
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
expires 1y;
add_header Cache-Control "public, immutable";
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,6 @@
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"prebuild": "npm run codegen",
"codegen": "graphql-codegen --config codegen.ts -r dotenv/config --use-system-ca",
"lint": "eslint .",
"preview": "vite preview"
@@ -17,18 +16,18 @@
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"graphql": "^16.12.0",
"react-router-dom": "^6.27.0",
"oidc-client-ts": "^3.4.1",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-router-dom": "^6.27.0",
"tailwind-merge": "^2.5.4"
},
"devDependencies": {
"@eslint/js": "^9.39.1",
"@graphql-codegen/cli": "^5.0.3",
"@graphql-codegen/typed-document-node": "^6.1.1",
"@graphql-codegen/typescript": "^4.0.9",
"@graphql-codegen/typescript-operations": "^4.0.9",
"@graphql-codegen/typescript-react-apollo": "^4.0.9",
"@eslint/js": "^9.39.1",
"@types/node": "^24.10.1",
"@types/react": "^19.2.5",
"@types/react-dom": "^19.2.3",

View File

@@ -0,0 +1,774 @@
import type { TypedDocumentNode as DocumentNode } from '@graphql-typed-document-node/core';
export type Maybe<T> = T | null;
export type InputMaybe<T> = T | null;
export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] };
export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> };
export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> };
export type MakeEmpty<T extends { [key: string]: unknown }, K extends keyof T> = { [_ in K]?: never };
export type Incremental<T> = T | { [P in keyof T]?: P extends ' $fragmentName' | '__typename' ? T[P] : never };
/** All built-in and custom scalars, mapped to their actual values */
export type Scalars = {
ID: { input: string; output: string; }
String: { input: string; output: string; }
Boolean: { input: boolean; output: boolean; }
Int: { input: number; output: number; }
Float: { input: number; output: number; }
Instant: { input: any; output: any; }
UUID: { input: any; output: any; }
UnsignedInt: { input: any; output: any; }
};
export type Chapter = {
body: LocalizationKey;
createdTime: Scalars['Instant']['output'];
id: Scalars['UnsignedInt']['output'];
images: Array<Image>;
lastUpdatedTime: Scalars['Instant']['output'];
name: LocalizationKey;
order: Scalars['UnsignedInt']['output'];
revision: Scalars['UnsignedInt']['output'];
url: Maybe<Scalars['String']['output']>;
};
export type ChapterFilterInput = {
and?: InputMaybe<Array<ChapterFilterInput>>;
body?: InputMaybe<LocalizationKeyFilterInput>;
createdTime?: InputMaybe<InstantFilterInput>;
id?: InputMaybe<UnsignedIntOperationFilterInputType>;
images?: InputMaybe<ListFilterInputTypeOfImageFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
name?: InputMaybe<LocalizationKeyFilterInput>;
or?: InputMaybe<Array<ChapterFilterInput>>;
order?: InputMaybe<UnsignedIntOperationFilterInputType>;
revision?: InputMaybe<UnsignedIntOperationFilterInputType>;
url?: InputMaybe<StringOperationFilterInput>;
};
export type ChapterPullRequestedEvent = {
chapterNumber: Scalars['UnsignedInt']['output'];
novelId: Scalars['UnsignedInt']['output'];
};
export type ChapterSortInput = {
body?: InputMaybe<LocalizationKeySortInput>;
createdTime?: InputMaybe<SortEnumType>;
id?: InputMaybe<SortEnumType>;
lastUpdatedTime?: InputMaybe<SortEnumType>;
name?: InputMaybe<LocalizationKeySortInput>;
order?: InputMaybe<SortEnumType>;
revision?: InputMaybe<SortEnumType>;
url?: InputMaybe<SortEnumType>;
};
export type DeleteJobError = KeyNotFoundError;
export type DeleteJobInput = {
jobKey: Scalars['String']['input'];
};
export type DeleteJobPayload = {
boolean: Maybe<Scalars['Boolean']['output']>;
errors: Maybe<Array<DeleteJobError>>;
};
export type DuplicateNameError = Error & {
message: Scalars['String']['output'];
};
export type Error = {
message: Scalars['String']['output'];
};
export type FetchChapterContentsInput = {
chapterNumber: Scalars['UnsignedInt']['input'];
novelId: Scalars['UnsignedInt']['input'];
};
export type FetchChapterContentsPayload = {
chapterPullRequestedEvent: Maybe<ChapterPullRequestedEvent>;
};
export type FormatError = Error & {
message: Scalars['String']['output'];
};
export type Image = {
chapter: Maybe<Chapter>;
createdTime: Scalars['Instant']['output'];
id: Scalars['UUID']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
newPath: Maybe<Scalars['String']['output']>;
originalPath: Scalars['String']['output'];
};
export type ImageFilterInput = {
and?: InputMaybe<Array<ImageFilterInput>>;
chapter?: InputMaybe<ChapterFilterInput>;
createdTime?: InputMaybe<InstantFilterInput>;
id?: InputMaybe<UuidOperationFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
newPath?: InputMaybe<StringOperationFilterInput>;
or?: InputMaybe<Array<ImageFilterInput>>;
originalPath?: InputMaybe<StringOperationFilterInput>;
};
export type ImageSortInput = {
chapter?: InputMaybe<ChapterSortInput>;
createdTime?: InputMaybe<SortEnumType>;
id?: InputMaybe<SortEnumType>;
lastUpdatedTime?: InputMaybe<SortEnumType>;
newPath?: InputMaybe<SortEnumType>;
originalPath?: InputMaybe<SortEnumType>;
};
export type ImportNovelInput = {
novelUrl: Scalars['String']['input'];
};
export type ImportNovelPayload = {
novelUpdateRequestedEvent: Maybe<NovelUpdateRequestedEvent>;
};
export type InstantFilterInput = {
and?: InputMaybe<Array<InstantFilterInput>>;
or?: InputMaybe<Array<InstantFilterInput>>;
};
export type JobKey = {
group: Scalars['String']['output'];
name: Scalars['String']['output'];
};
export type JobPersistenceError = Error & {
message: Scalars['String']['output'];
};
export type KeyNotFoundError = Error & {
message: Scalars['String']['output'];
};
export type KeyValuePairOfStringAndString = {
key: Scalars['String']['output'];
value: Scalars['String']['output'];
};
export const Language = {
Ch: 'CH',
En: 'EN',
Ja: 'JA',
Kr: 'KR'
} as const;
export type Language = typeof Language[keyof typeof Language];
export type LanguageOperationFilterInput = {
eq?: InputMaybe<Language>;
in?: InputMaybe<Array<Language>>;
neq?: InputMaybe<Language>;
nin?: InputMaybe<Array<Language>>;
};
export type ListFilterInputTypeOfChapterFilterInput = {
all?: InputMaybe<ChapterFilterInput>;
any?: InputMaybe<Scalars['Boolean']['input']>;
none?: InputMaybe<ChapterFilterInput>;
some?: InputMaybe<ChapterFilterInput>;
};
export type ListFilterInputTypeOfImageFilterInput = {
all?: InputMaybe<ImageFilterInput>;
any?: InputMaybe<Scalars['Boolean']['input']>;
none?: InputMaybe<ImageFilterInput>;
some?: InputMaybe<ImageFilterInput>;
};
export type ListFilterInputTypeOfLocalizationTextFilterInput = {
all?: InputMaybe<LocalizationTextFilterInput>;
any?: InputMaybe<Scalars['Boolean']['input']>;
none?: InputMaybe<LocalizationTextFilterInput>;
some?: InputMaybe<LocalizationTextFilterInput>;
};
export type ListFilterInputTypeOfNovelFilterInput = {
all?: InputMaybe<NovelFilterInput>;
any?: InputMaybe<Scalars['Boolean']['input']>;
none?: InputMaybe<NovelFilterInput>;
some?: InputMaybe<NovelFilterInput>;
};
export type ListFilterInputTypeOfNovelTagFilterInput = {
all?: InputMaybe<NovelTagFilterInput>;
any?: InputMaybe<Scalars['Boolean']['input']>;
none?: InputMaybe<NovelTagFilterInput>;
some?: InputMaybe<NovelTagFilterInput>;
};
export type LocalizationKey = {
createdTime: Scalars['Instant']['output'];
id: Scalars['UUID']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
texts: Array<LocalizationText>;
};
export type LocalizationKeyFilterInput = {
and?: InputMaybe<Array<LocalizationKeyFilterInput>>;
createdTime?: InputMaybe<InstantFilterInput>;
id?: InputMaybe<UuidOperationFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
or?: InputMaybe<Array<LocalizationKeyFilterInput>>;
texts?: InputMaybe<ListFilterInputTypeOfLocalizationTextFilterInput>;
};
export type LocalizationKeySortInput = {
createdTime?: InputMaybe<SortEnumType>;
id?: InputMaybe<SortEnumType>;
lastUpdatedTime?: InputMaybe<SortEnumType>;
};
export type LocalizationText = {
createdTime: Scalars['Instant']['output'];
id: Scalars['UUID']['output'];
language: Language;
lastUpdatedTime: Scalars['Instant']['output'];
text: Scalars['String']['output'];
translationEngine: Maybe<TranslationEngine>;
};
export type LocalizationTextFilterInput = {
and?: InputMaybe<Array<LocalizationTextFilterInput>>;
createdTime?: InputMaybe<InstantFilterInput>;
id?: InputMaybe<UuidOperationFilterInput>;
language?: InputMaybe<LanguageOperationFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
or?: InputMaybe<Array<LocalizationTextFilterInput>>;
text?: InputMaybe<StringOperationFilterInput>;
translationEngine?: InputMaybe<TranslationEngineFilterInput>;
};
export type Mutation = {
deleteJob: DeleteJobPayload;
fetchChapterContents: FetchChapterContentsPayload;
importNovel: ImportNovelPayload;
registerUser: RegisterUserPayload;
runJob: RunJobPayload;
scheduleEventJob: ScheduleEventJobPayload;
translateText: TranslateTextPayload;
};
export type MutationDeleteJobArgs = {
input: DeleteJobInput;
};
export type MutationFetchChapterContentsArgs = {
input: FetchChapterContentsInput;
};
export type MutationImportNovelArgs = {
input: ImportNovelInput;
};
export type MutationRegisterUserArgs = {
input: RegisterUserInput;
};
export type MutationRunJobArgs = {
input: RunJobInput;
};
export type MutationScheduleEventJobArgs = {
input: ScheduleEventJobInput;
};
export type MutationTranslateTextArgs = {
input: TranslateTextInput;
};
export type Novel = {
author: Person;
chapters: Array<Chapter>;
coverImage: Maybe<Image>;
createdTime: Scalars['Instant']['output'];
description: LocalizationKey;
externalId: Scalars['String']['output'];
id: Scalars['UnsignedInt']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
name: LocalizationKey;
rawLanguage: Language;
rawStatus: NovelStatus;
source: Source;
statusOverride: Maybe<NovelStatus>;
tags: Array<NovelTag>;
url: Scalars['String']['output'];
};
export type NovelFilterInput = {
and?: InputMaybe<Array<NovelFilterInput>>;
author?: InputMaybe<PersonFilterInput>;
chapters?: InputMaybe<ListFilterInputTypeOfChapterFilterInput>;
coverImage?: InputMaybe<ImageFilterInput>;
createdTime?: InputMaybe<InstantFilterInput>;
description?: InputMaybe<LocalizationKeyFilterInput>;
externalId?: InputMaybe<StringOperationFilterInput>;
id?: InputMaybe<UnsignedIntOperationFilterInputType>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
name?: InputMaybe<LocalizationKeyFilterInput>;
or?: InputMaybe<Array<NovelFilterInput>>;
rawLanguage?: InputMaybe<LanguageOperationFilterInput>;
rawStatus?: InputMaybe<NovelStatusOperationFilterInput>;
source?: InputMaybe<SourceFilterInput>;
statusOverride?: InputMaybe<NullableOfNovelStatusOperationFilterInput>;
tags?: InputMaybe<ListFilterInputTypeOfNovelTagFilterInput>;
url?: InputMaybe<StringOperationFilterInput>;
};
export type NovelSortInput = {
author?: InputMaybe<PersonSortInput>;
coverImage?: InputMaybe<ImageSortInput>;
createdTime?: InputMaybe<SortEnumType>;
description?: InputMaybe<LocalizationKeySortInput>;
externalId?: InputMaybe<SortEnumType>;
id?: InputMaybe<SortEnumType>;
lastUpdatedTime?: InputMaybe<SortEnumType>;
name?: InputMaybe<LocalizationKeySortInput>;
rawLanguage?: InputMaybe<SortEnumType>;
rawStatus?: InputMaybe<SortEnumType>;
source?: InputMaybe<SourceSortInput>;
statusOverride?: InputMaybe<SortEnumType>;
url?: InputMaybe<SortEnumType>;
};
export const NovelStatus = {
Abandoned: 'ABANDONED',
Completed: 'COMPLETED',
Hiatus: 'HIATUS',
InProgress: 'IN_PROGRESS',
Unknown: 'UNKNOWN'
} as const;
export type NovelStatus = typeof NovelStatus[keyof typeof NovelStatus];
export type NovelStatusOperationFilterInput = {
eq?: InputMaybe<NovelStatus>;
in?: InputMaybe<Array<NovelStatus>>;
neq?: InputMaybe<NovelStatus>;
nin?: InputMaybe<Array<NovelStatus>>;
};
export type NovelTag = {
createdTime: Scalars['Instant']['output'];
displayName: LocalizationKey;
id: Scalars['UnsignedInt']['output'];
key: Scalars['String']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
novels: Array<Novel>;
source: Maybe<Source>;
tagType: TagType;
};
export type NovelTagFilterInput = {
and?: InputMaybe<Array<NovelTagFilterInput>>;
createdTime?: InputMaybe<InstantFilterInput>;
displayName?: InputMaybe<LocalizationKeyFilterInput>;
id?: InputMaybe<UnsignedIntOperationFilterInputType>;
key?: InputMaybe<StringOperationFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
novels?: InputMaybe<ListFilterInputTypeOfNovelFilterInput>;
or?: InputMaybe<Array<NovelTagFilterInput>>;
source?: InputMaybe<SourceFilterInput>;
tagType?: InputMaybe<TagTypeOperationFilterInput>;
};
export type NovelUpdateRequestedEvent = {
novelUrl: Scalars['String']['output'];
};
/** A connection to a list of items. */
export type NovelsConnection = {
/** A list of edges. */
edges: Maybe<Array<NovelsEdge>>;
/** A flattened list of the nodes. */
nodes: Maybe<Array<Novel>>;
/** Information to aid in pagination. */
pageInfo: PageInfo;
};
/** An edge in a connection. */
export type NovelsEdge = {
/** A cursor for use in pagination. */
cursor: Scalars['String']['output'];
/** The item at the end of the edge. */
node: Novel;
};
export type NullableOfNovelStatusOperationFilterInput = {
eq?: InputMaybe<NovelStatus>;
in?: InputMaybe<Array<InputMaybe<NovelStatus>>>;
neq?: InputMaybe<NovelStatus>;
nin?: InputMaybe<Array<InputMaybe<NovelStatus>>>;
};
/** Information about pagination in a connection. */
export type PageInfo = {
/** When paginating forwards, the cursor to continue. */
endCursor: Maybe<Scalars['String']['output']>;
/** Indicates whether more edges exist following the set defined by the clients arguments. */
hasNextPage: Scalars['Boolean']['output'];
/** Indicates whether more edges exist prior the set defined by the clients arguments. */
hasPreviousPage: Scalars['Boolean']['output'];
/** When paginating backwards, the cursor to continue. */
startCursor: Maybe<Scalars['String']['output']>;
};
export type Person = {
createdTime: Scalars['Instant']['output'];
externalUrl: Maybe<Scalars['String']['output']>;
id: Scalars['UnsignedInt']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
name: LocalizationKey;
};
export type PersonFilterInput = {
and?: InputMaybe<Array<PersonFilterInput>>;
createdTime?: InputMaybe<InstantFilterInput>;
externalUrl?: InputMaybe<StringOperationFilterInput>;
id?: InputMaybe<UnsignedIntOperationFilterInputType>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
name?: InputMaybe<LocalizationKeyFilterInput>;
or?: InputMaybe<Array<PersonFilterInput>>;
};
export type PersonSortInput = {
createdTime?: InputMaybe<SortEnumType>;
externalUrl?: InputMaybe<SortEnumType>;
id?: InputMaybe<SortEnumType>;
lastUpdatedTime?: InputMaybe<SortEnumType>;
name?: InputMaybe<LocalizationKeySortInput>;
};
export type Query = {
jobs: Array<SchedulerJob>;
novels: Maybe<NovelsConnection>;
translationEngines: Array<TranslationEngineDescriptor>;
translationRequests: Maybe<TranslationRequestsConnection>;
users: Array<User>;
};
export type QueryNovelsArgs = {
after?: InputMaybe<Scalars['String']['input']>;
before?: InputMaybe<Scalars['String']['input']>;
first?: InputMaybe<Scalars['Int']['input']>;
last?: InputMaybe<Scalars['Int']['input']>;
order?: InputMaybe<Array<NovelSortInput>>;
where?: InputMaybe<NovelFilterInput>;
};
export type QueryTranslationEnginesArgs = {
order?: InputMaybe<Array<TranslationEngineDescriptorSortInput>>;
where?: InputMaybe<TranslationEngineDescriptorFilterInput>;
};
export type QueryTranslationRequestsArgs = {
after?: InputMaybe<Scalars['String']['input']>;
before?: InputMaybe<Scalars['String']['input']>;
first?: InputMaybe<Scalars['Int']['input']>;
last?: InputMaybe<Scalars['Int']['input']>;
order?: InputMaybe<Array<TranslationRequestSortInput>>;
where?: InputMaybe<TranslationRequestFilterInput>;
};
export type RegisterUserInput = {
email: Scalars['String']['input'];
inviterOAuthProviderId?: InputMaybe<Scalars['String']['input']>;
oAuthProviderId: Scalars['String']['input'];
username: Scalars['String']['input'];
};
export type RegisterUserPayload = {
user: Maybe<User>;
};
export type RunJobError = JobPersistenceError;
export type RunJobInput = {
jobKey: Scalars['String']['input'];
};
export type RunJobPayload = {
boolean: Maybe<Scalars['Boolean']['output']>;
errors: Maybe<Array<RunJobError>>;
};
export type ScheduleEventJobError = DuplicateNameError | FormatError;
export type ScheduleEventJobInput = {
cronSchedule: Scalars['String']['input'];
description: Scalars['String']['input'];
eventData: Scalars['String']['input'];
eventType: Scalars['String']['input'];
key: Scalars['String']['input'];
};
export type ScheduleEventJobPayload = {
errors: Maybe<Array<ScheduleEventJobError>>;
schedulerJob: Maybe<SchedulerJob>;
};
export type SchedulerJob = {
cronSchedule: Array<Scalars['String']['output']>;
description: Scalars['String']['output'];
jobData: Array<KeyValuePairOfStringAndString>;
jobKey: JobKey;
jobTypeName: Scalars['String']['output'];
};
export const SortEnumType = {
Asc: 'ASC',
Desc: 'DESC'
} as const;
export type SortEnumType = typeof SortEnumType[keyof typeof SortEnumType];
export type Source = {
createdTime: Scalars['Instant']['output'];
id: Scalars['UnsignedInt']['output'];
key: Scalars['String']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
name: Scalars['String']['output'];
url: Scalars['String']['output'];
};
export type SourceFilterInput = {
and?: InputMaybe<Array<SourceFilterInput>>;
createdTime?: InputMaybe<InstantFilterInput>;
id?: InputMaybe<UnsignedIntOperationFilterInputType>;
key?: InputMaybe<StringOperationFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
name?: InputMaybe<StringOperationFilterInput>;
or?: InputMaybe<Array<SourceFilterInput>>;
url?: InputMaybe<StringOperationFilterInput>;
};
export type SourceSortInput = {
createdTime?: InputMaybe<SortEnumType>;
id?: InputMaybe<SortEnumType>;
key?: InputMaybe<SortEnumType>;
lastUpdatedTime?: InputMaybe<SortEnumType>;
name?: InputMaybe<SortEnumType>;
url?: InputMaybe<SortEnumType>;
};
export type StringOperationFilterInput = {
and?: InputMaybe<Array<StringOperationFilterInput>>;
contains?: InputMaybe<Scalars['String']['input']>;
endsWith?: InputMaybe<Scalars['String']['input']>;
eq?: InputMaybe<Scalars['String']['input']>;
in?: InputMaybe<Array<InputMaybe<Scalars['String']['input']>>>;
ncontains?: InputMaybe<Scalars['String']['input']>;
nendsWith?: InputMaybe<Scalars['String']['input']>;
neq?: InputMaybe<Scalars['String']['input']>;
nin?: InputMaybe<Array<InputMaybe<Scalars['String']['input']>>>;
nstartsWith?: InputMaybe<Scalars['String']['input']>;
or?: InputMaybe<Array<StringOperationFilterInput>>;
startsWith?: InputMaybe<Scalars['String']['input']>;
};
export const TagType = {
External: 'EXTERNAL',
Genre: 'GENRE',
System: 'SYSTEM',
UserDefined: 'USER_DEFINED'
} as const;
export type TagType = typeof TagType[keyof typeof TagType];
export type TagTypeOperationFilterInput = {
eq?: InputMaybe<TagType>;
in?: InputMaybe<Array<TagType>>;
neq?: InputMaybe<TagType>;
nin?: InputMaybe<Array<TagType>>;
};
export type TranslateTextInput = {
from: Language;
text: Scalars['String']['input'];
to: Language;
translationEngineKey: Scalars['String']['input'];
};
export type TranslateTextPayload = {
translationResult: Maybe<TranslationResult>;
};
export type TranslationEngine = {
createdTime: Scalars['Instant']['output'];
id: Scalars['UnsignedInt']['output'];
key: Scalars['String']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
};
export type TranslationEngineDescriptor = {
displayName: Scalars['String']['output'];
key: Scalars['String']['output'];
};
export type TranslationEngineDescriptorFilterInput = {
and?: InputMaybe<Array<TranslationEngineDescriptorFilterInput>>;
displayName?: InputMaybe<StringOperationFilterInput>;
key?: InputMaybe<StringOperationFilterInput>;
or?: InputMaybe<Array<TranslationEngineDescriptorFilterInput>>;
};
export type TranslationEngineDescriptorSortInput = {
displayName?: InputMaybe<SortEnumType>;
key?: InputMaybe<SortEnumType>;
};
export type TranslationEngineFilterInput = {
and?: InputMaybe<Array<TranslationEngineFilterInput>>;
createdTime?: InputMaybe<InstantFilterInput>;
id?: InputMaybe<UnsignedIntOperationFilterInputType>;
key?: InputMaybe<StringOperationFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
or?: InputMaybe<Array<TranslationEngineFilterInput>>;
};
export type TranslationRequest = {
billedCharacterCount: Scalars['UnsignedInt']['output'];
createdTime: Scalars['Instant']['output'];
from: Language;
id: Scalars['UUID']['output'];
lastUpdatedTime: Scalars['Instant']['output'];
originalText: Scalars['String']['output'];
status: TranslationRequestStatus;
to: Language;
translatedText: Maybe<Scalars['String']['output']>;
translationEngineKey: Scalars['String']['output'];
};
export type TranslationRequestFilterInput = {
and?: InputMaybe<Array<TranslationRequestFilterInput>>;
billedCharacterCount?: InputMaybe<UnsignedIntOperationFilterInputType>;
createdTime?: InputMaybe<InstantFilterInput>;
from?: InputMaybe<LanguageOperationFilterInput>;
id?: InputMaybe<UuidOperationFilterInput>;
lastUpdatedTime?: InputMaybe<InstantFilterInput>;
or?: InputMaybe<Array<TranslationRequestFilterInput>>;
originalText?: InputMaybe<StringOperationFilterInput>;
status?: InputMaybe<TranslationRequestStatusOperationFilterInput>;
to?: InputMaybe<LanguageOperationFilterInput>;
translatedText?: InputMaybe<StringOperationFilterInput>;
translationEngineKey?: InputMaybe<StringOperationFilterInput>;
};
export type TranslationRequestSortInput = {
billedCharacterCount?: InputMaybe<SortEnumType>;
createdTime?: InputMaybe<SortEnumType>;
from?: InputMaybe<SortEnumType>;
id?: InputMaybe<SortEnumType>;
lastUpdatedTime?: InputMaybe<SortEnumType>;
originalText?: InputMaybe<SortEnumType>;
status?: InputMaybe<SortEnumType>;
to?: InputMaybe<SortEnumType>;
translatedText?: InputMaybe<SortEnumType>;
translationEngineKey?: InputMaybe<SortEnumType>;
};
export const TranslationRequestStatus = {
Failed: 'FAILED',
Pending: 'PENDING',
Success: 'SUCCESS'
} as const;
export type TranslationRequestStatus = typeof TranslationRequestStatus[keyof typeof TranslationRequestStatus];
export type TranslationRequestStatusOperationFilterInput = {
eq?: InputMaybe<TranslationRequestStatus>;
in?: InputMaybe<Array<TranslationRequestStatus>>;
neq?: InputMaybe<TranslationRequestStatus>;
nin?: InputMaybe<Array<TranslationRequestStatus>>;
};
/** A connection to a list of items. */
export type TranslationRequestsConnection = {
/** A list of edges. */
edges: Maybe<Array<TranslationRequestsEdge>>;
/** A flattened list of the nodes. */
nodes: Maybe<Array<TranslationRequest>>;
/** Information to aid in pagination. */
pageInfo: PageInfo;
};
/** An edge in a connection. */
export type TranslationRequestsEdge = {
/** A cursor for use in pagination. */
cursor: Scalars['String']['output'];
/** The item at the end of the edge. */
node: TranslationRequest;
};
export type TranslationResult = {
billedCharacterCount: Scalars['UnsignedInt']['output'];
from: Language;
originalText: Scalars['String']['output'];
status: TranslationRequestStatus;
to: Language;
translatedText: Maybe<Scalars['String']['output']>;
translationEngineKey: Scalars['String']['output'];
};
export type UnsignedIntOperationFilterInputType = {
eq?: InputMaybe<Scalars['UnsignedInt']['input']>;
gt?: InputMaybe<Scalars['UnsignedInt']['input']>;
gte?: InputMaybe<Scalars['UnsignedInt']['input']>;
in?: InputMaybe<Array<InputMaybe<Scalars['UnsignedInt']['input']>>>;
lt?: InputMaybe<Scalars['UnsignedInt']['input']>;
lte?: InputMaybe<Scalars['UnsignedInt']['input']>;
neq?: InputMaybe<Scalars['UnsignedInt']['input']>;
ngt?: InputMaybe<Scalars['UnsignedInt']['input']>;
ngte?: InputMaybe<Scalars['UnsignedInt']['input']>;
nin?: InputMaybe<Array<InputMaybe<Scalars['UnsignedInt']['input']>>>;
nlt?: InputMaybe<Scalars['UnsignedInt']['input']>;
nlte?: InputMaybe<Scalars['UnsignedInt']['input']>;
};
export type User = {
createdTime: Scalars['Instant']['output'];
disabled: Scalars['Boolean']['output'];
email: Scalars['String']['output'];
id: Scalars['UUID']['output'];
inviter: Maybe<User>;
lastUpdatedTime: Scalars['Instant']['output'];
oAuthProviderId: Scalars['String']['output'];
username: Scalars['String']['output'];
};
export type UuidOperationFilterInput = {
eq?: InputMaybe<Scalars['UUID']['input']>;
gt?: InputMaybe<Scalars['UUID']['input']>;
gte?: InputMaybe<Scalars['UUID']['input']>;
in?: InputMaybe<Array<InputMaybe<Scalars['UUID']['input']>>>;
lt?: InputMaybe<Scalars['UUID']['input']>;
lte?: InputMaybe<Scalars['UUID']['input']>;
neq?: InputMaybe<Scalars['UUID']['input']>;
ngt?: InputMaybe<Scalars['UUID']['input']>;
ngte?: InputMaybe<Scalars['UUID']['input']>;
nin?: InputMaybe<Array<InputMaybe<Scalars['UUID']['input']>>>;
nlt?: InputMaybe<Scalars['UUID']['input']>;
nlte?: InputMaybe<Scalars['UUID']['input']>;
};
export type NovelsQueryVariables = Exact<{
first?: InputMaybe<Scalars['Int']['input']>;
after?: InputMaybe<Scalars['String']['input']>;
}>;
export type NovelsQuery = { novels: { edges: Array<{ cursor: string, node: { id: any, url: string, name: { texts: Array<{ language: Language, text: string }> }, description: { texts: Array<{ language: Language, text: string }> }, coverImage: { originalPath: string, newPath: string | null } | null } }> | null, pageInfo: { hasNextPage: boolean, endCursor: string | null } } | null };
export const NovelsDocument = {"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"Novels"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"first"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"after"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"novels"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"first"},"value":{"kind":"Variable","name":{"kind":"Name","value":"first"}}},{"kind":"Argument","name":{"kind":"Name","value":"after"},"value":{"kind":"Variable","name":{"kind":"Name","value":"after"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"edges"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"cursor"}},{"kind":"Field","name":{"kind":"Name","value":"node"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"url"}},{"kind":"Field","name":{"kind":"Name","value":"name"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"texts"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"language"}},{"kind":"Field","name":{"kind":"Name","value":"text"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"description"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"texts"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"language"}},{"kind":"Field","name":{"kind":"Name","value":"text"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"coverImage"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"originalPath"}},{"kind":"Field","name":{"kind":"Name","value":"newPath"}}]}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"pageInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"hasNextPage"}},{"kind":"Field","name":{"kind":"Name","value":"endCursor"}}]}}]}}]}}]} as unknown as DocumentNode<NovelsQuery, NovelsQueryVariables>;

View File

@@ -6,7 +6,7 @@ const uri = import.meta.env.VITE_GRAPHQL_URI ?? 'https://localhost:5001/graphql'
const httpLink = new HttpLink({ uri })
const authLink = new SetContextLink(async ({ headers }, _) => {
const authLink = new SetContextLink(async ({ headers }) => {
if (!userManager) return { headers }
try {
const user = await userManager.getUser()

View File

@@ -14,12 +14,11 @@ const AuthContext = createContext<AuthContextValue | undefined>(undefined)
export function AuthProvider({ children }: { children: ReactNode }) {
const [user, setUser] = useState<User | null>(null)
const [isLoading, setIsLoading] = useState(true)
const [isLoading, setIsLoading] = useState(!!userManager)
const callbackHandledRef = useRef(false)
useEffect(() => {
if (!userManager) {
setIsLoading(false)
return
}
@@ -121,6 +120,7 @@ export function AuthProvider({ children }: { children: ReactNode }) {
return <AuthContext.Provider value={value}>{children}</AuthContext.Provider>
}
// eslint-disable-next-line react-refresh/only-export-components
export function useAuth() {
const context = useContext(AuthContext)
if (!context) {

View File

@@ -1,11 +1,13 @@
import type { Novel } from '../__generated__/graphql'
import type { NovelsQuery } from '../__generated__/graphql'
import { Card, CardContent, CardHeader, CardTitle } from './ui/card'
type NovelNode = NonNullable<NonNullable<NovelsQuery['novels']>['edges']>[number]['node']
type NovelCardProps = {
novel: Novel
novel: NovelNode
}
function pickText(novelText?: Novel['name'] | Novel['description']) {
function pickText(novelText?: NovelNode['name'] | NovelNode['description']) {
const texts = novelText?.texts ?? []
const english = texts.find((t) => t.language === 'EN')
return (english ?? texts[0])?.text ?? 'No description available.'

View File

@@ -31,4 +31,5 @@ function Badge({ className, variant, ...props }: BadgeProps) {
)
}
// eslint-disable-next-line react-refresh/only-export-components
export { Badge, badgeVariants }

View File

@@ -51,4 +51,5 @@ const Button = React.forwardRef<HTMLButtonElement, ButtonProps>(
)
Button.displayName = 'Button'
// eslint-disable-next-line react-refresh/only-export-components
export { Button, buttonVariants }

View File

@@ -2,8 +2,7 @@ import * as React from 'react'
import { cn } from '../../lib/utils'
export interface InputProps
extends React.InputHTMLAttributes<HTMLInputElement> {}
export type InputProps = React.InputHTMLAttributes<HTMLInputElement>
const Input = React.forwardRef<HTMLInputElement, InputProps>(
({ className, type, ...props }, ref) => {

View File

@@ -1,6 +1,7 @@
import { useMemo } from 'react'
import { useNovelsQuery } from '../__generated__/graphql'
import { useQuery } from '@apollo/client/react'
import { NovelsDocument } from '../__generated__/graphql'
import { NovelCard } from '../components/NovelCard'
import { Button } from '../components/ui/button'
import { Card, CardContent, CardHeader, CardTitle } from '../components/ui/card'
@@ -8,19 +9,18 @@ import { Card, CardContent, CardHeader, CardTitle } from '../components/ui/card'
const PAGE_SIZE = 12
export function NovelsPage() {
const { data, loading, error, fetchMore } = useNovelsQuery({
const { data, loading, error, fetchMore } = useQuery(NovelsDocument, {
variables: { first: PAGE_SIZE, after: null },
notifyOnNetworkStatusChange: true,
})
const edges = data?.novels?.edges ?? []
const pageInfo = data?.novels?.pageInfo
const hasNextPage = pageInfo?.hasNextPage ?? false
const endCursor = pageInfo?.endCursor ?? null
const novels = useMemo(
() => edges.map((edge) => edge?.node).filter(Boolean),
[edges]
() => (data?.novels?.edges ?? []).map((edge) => edge?.node).filter(Boolean),
[data?.novels?.edges]
)
async function handleLoadMore() {