Compare commits
23 Commits
15a8185621
...
feature/FA
| Author | SHA1 | Date | |
|---|---|---|---|
| 12e3c5dfdd | |||
|
|
b71d9031e1 | ||
|
|
09ebdb1b2a | ||
| 43d5ada7fb | |||
|
|
4635ed1b4e | ||
|
|
920fd00910 | ||
|
|
0d9f788678 | ||
|
|
0938c16a76 | ||
|
|
f25cbc1a04 | ||
|
|
078eaf5237 | ||
|
|
b9115d78a9 | ||
|
|
7e94f06853 | ||
|
|
50263109ab | ||
|
|
6ebfe81ae3 | ||
|
|
80aac63f7d | ||
|
|
adc99c7000 | ||
| 87075be61e | |||
| 259dc08aea | |||
| 2203d2ee54 | |||
| 30cc89242d | |||
| 84294455f9 | |||
| be62af98d3 | |||
| aff1396c6a |
@@ -3,18 +3,31 @@ name: Build Gateway
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'FictionArchive.API/**'
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
REGISTRY: ${{ gitea.server_url }}
|
||||
IMAGE_NAME: ${{ gitea.repository_owner }}/fictionarchive-api
|
||||
|
||||
jobs:
|
||||
build-gateway:
|
||||
build-subgraphs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
service:
|
||||
- name: novel-service
|
||||
project: FictionArchive.Service.NovelService
|
||||
subgraph: Novel
|
||||
- name: translation-service
|
||||
project: FictionArchive.Service.TranslationService
|
||||
subgraph: Translation
|
||||
- name: scheduler-service
|
||||
project: FictionArchive.Service.SchedulerService
|
||||
subgraph: Scheduler
|
||||
- name: user-service
|
||||
project: FictionArchive.Service.UserService
|
||||
subgraph: User
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -27,44 +40,75 @@ jobs:
|
||||
- name: Install Fusion CLI
|
||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||
|
||||
- name: Add .NET tools to PATH
|
||||
run: echo "$HOME/.dotnet/tools" >> $GITHUB_PATH
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj -c Release --no-restore
|
||||
|
||||
- name: Export schema
|
||||
run: |
|
||||
dotnet run -c Release --no-launch-profile \
|
||||
--project ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj \
|
||||
-- schema export --output schema.graphql
|
||||
|
||||
- name: Pack subgraph
|
||||
run: fusion subgraph pack -w ${{ matrix.service.project }}
|
||||
|
||||
- name: Upload subgraph package
|
||||
uses: christopherhx/gitea-upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.service.name }}-subgraph
|
||||
path: ${{ matrix.service.project }}/*.fsp
|
||||
retention-days: 30
|
||||
|
||||
build-gateway:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-subgraphs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '8.0.x'
|
||||
|
||||
- name: Install Fusion CLI
|
||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||
|
||||
- name: Add .NET tools to PATH
|
||||
run: echo "$HOME/.dotnet/tools" >> $GITHUB_PATH
|
||||
|
||||
- name: Create subgraphs directory
|
||||
run: mkdir -p subgraphs
|
||||
|
||||
# Download all subgraph packages from latest successful builds
|
||||
- name: Download Novel Service subgraph
|
||||
uses: actions/download-artifact@v4
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: novel-service-subgraph
|
||||
path: subgraphs/novel
|
||||
continue-on-error: true
|
||||
|
||||
- name: Download Translation Service subgraph
|
||||
uses: actions/download-artifact@v4
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: translation-service-subgraph
|
||||
path: subgraphs/translation
|
||||
continue-on-error: true
|
||||
|
||||
- name: Download Scheduler Service subgraph
|
||||
uses: actions/download-artifact@v4
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: scheduler-service-subgraph
|
||||
path: subgraphs/scheduler
|
||||
continue-on-error: true
|
||||
|
||||
- name: Download User Service subgraph
|
||||
uses: actions/download-artifact@v4
|
||||
uses: christopherhx/gitea-download-artifact@v4
|
||||
with:
|
||||
name: user-service-subgraph
|
||||
path: subgraphs/user
|
||||
continue-on-error: true
|
||||
|
||||
- name: Download File Service subgraph
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: file-service-subgraph
|
||||
path: subgraphs/file
|
||||
continue-on-error: true
|
||||
|
||||
- name: Configure subgraph URLs for Docker
|
||||
run: |
|
||||
@@ -95,13 +139,13 @@ jobs:
|
||||
- name: Build gateway
|
||||
run: dotnet build FictionArchive.API/FictionArchive.API.csproj -c Release --no-restore -p:SkipFusionBuild=true
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test FictionArchive.sln -c Release --no-build --verbosity normal
|
||||
continue-on-error: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract registry hostname
|
||||
id: registry
|
||||
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -116,7 +160,7 @@ jobs:
|
||||
file: FictionArchive.API/Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ gitea.sha }}
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_NAME }}:latest
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_NAME }}:${{ gitea.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
name: Build Subgraphs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'FictionArchive.Service.*/**'
|
||||
- 'FictionArchive.Common/**'
|
||||
- 'FictionArchive.Service.Shared/**'
|
||||
|
||||
jobs:
|
||||
build-subgraphs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
service:
|
||||
- name: novel-service
|
||||
project: FictionArchive.Service.NovelService
|
||||
subgraph: Novel
|
||||
- name: translation-service
|
||||
project: FictionArchive.Service.TranslationService
|
||||
subgraph: Translation
|
||||
- name: scheduler-service
|
||||
project: FictionArchive.Service.SchedulerService
|
||||
subgraph: Scheduler
|
||||
- name: user-service
|
||||
project: FictionArchive.Service.UserService
|
||||
subgraph: User
|
||||
- name: file-service
|
||||
project: FictionArchive.Service.FileService
|
||||
subgraph: File
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '8.0.x'
|
||||
|
||||
- name: Install Fusion CLI
|
||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj -c Release --no-restore
|
||||
|
||||
- name: Export schema
|
||||
run: |
|
||||
dotnet run -c Release --no-launch-profile \
|
||||
--project ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj \
|
||||
-- schema export --output ${{ matrix.service.project }}/schema.graphql
|
||||
|
||||
- name: Pack subgraph
|
||||
run: fusion subgraph pack -w ${{ matrix.service.project }}
|
||||
|
||||
- name: Upload subgraph package
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.service.name }}-subgraph
|
||||
path: ${{ matrix.service.project }}/*.fsp
|
||||
retention-days: 30
|
||||
|
||||
# Trigger gateway build after all subgraphs are built
|
||||
trigger-gateway:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-subgraphs
|
||||
steps:
|
||||
- name: Trigger gateway workflow
|
||||
run: |
|
||||
curl -X POST \
|
||||
-H "Authorization: token ${{ secrets.GITEA_TOKEN }}" \
|
||||
"${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/actions/workflows/build-gateway.yml/dispatches" \
|
||||
-d '{"ref":"master"}'
|
||||
@@ -20,14 +20,6 @@ jobs:
|
||||
with:
|
||||
dotnet-version: '8.0.x'
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Fusion CLI
|
||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore FictionArchive.sln
|
||||
|
||||
@@ -35,7 +27,27 @@ jobs:
|
||||
run: dotnet build FictionArchive.sln --configuration Release --no-restore /p:SkipFusionBuild=true
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test FictionArchive.sln --configuration Release --no-build --verbosity normal
|
||||
run: |
|
||||
dotnet test FictionArchive.sln --configuration Release --no-build --verbosity normal \
|
||||
--logger "trx;LogFileName=test-results.trx" \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload test results
|
||||
uses: christopherhx/gitea-upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results
|
||||
path: ./TestResults/**/*.trx
|
||||
retention-days: 30
|
||||
|
||||
- name: Upload coverage results
|
||||
uses: christopherhx/gitea-upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: coverage-results
|
||||
path: ./TestResults/**/coverage.cobertura.xml
|
||||
retention-days: 30
|
||||
|
||||
build-frontend:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -1,43 +1,49 @@
|
||||
name: Claude Assistant for Gitea
|
||||
name: Claude PR Assistant
|
||||
|
||||
on:
|
||||
# Trigger on issue comments (works on both issues and pull requests in Gitea)
|
||||
issue_comment:
|
||||
types: [created]
|
||||
# Trigger on issues being opened or assigned
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
# Note: pull_request_review_comment has limited support in Gitea
|
||||
# Use issue_comment instead which covers PR comments
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude-assistant:
|
||||
# Basic trigger detection - check for @claude in comments or issue body
|
||||
claude-code-action:
|
||||
if: |
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || github.event.action == 'assigned'))
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && contains(github.event.issue.body, '@claude'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
issues: write
|
||||
# Note: Gitea Actions may not require id-token: write for basic functionality
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run Claude Assistant
|
||||
uses: markwylde/claude-code-gitea-action
|
||||
- name: Run Claude PR Action
|
||||
uses: markwylde/claude-code-gitea-action@v1.0.20
|
||||
with:
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
gitea_token: ${{ secrets.CLAUDE_GITEA_TOKEN }}
|
||||
# Or use OAuth token instead:
|
||||
# claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
timeout_minutes: "60"
|
||||
trigger_phrase: "@claude"
|
||||
# Optional: Customize for Gitea environment
|
||||
custom_instructions: |
|
||||
You are working in a Gitea environment. Be aware that:
|
||||
- Some GitHub Actions features may behave differently
|
||||
- Focus on core functionality and avoid advanced GitHub-specific features
|
||||
- Use standard git operations when possible
|
||||
# mode: tag # Default: responds to @claude mentions
|
||||
# Optional: Restrict network access to specific domains only
|
||||
# experimental_allowed_domains: |
|
||||
# .anthropic.com
|
||||
# .github.com
|
||||
# api.github.com
|
||||
# .githubusercontent.com
|
||||
# bun.sh
|
||||
# registry.npmjs.org
|
||||
# .blob.core.windows.net
|
||||
@@ -15,8 +15,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
service:
|
||||
- name: api
|
||||
dockerfile: FictionArchive.API/Dockerfile
|
||||
- name: novel-service
|
||||
dockerfile: FictionArchive.Service.NovelService/Dockerfile
|
||||
- name: user-service
|
||||
@@ -40,6 +38,10 @@ jobs:
|
||||
id: version
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract registry hostname
|
||||
id: registry
|
||||
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -54,8 +56,8 @@ jobs:
|
||||
file: ${{ matrix.service.dockerfile }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:${{ steps.version.outputs.VERSION }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:latest
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:${{ steps.version.outputs.VERSION }}
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -72,6 +74,10 @@ jobs:
|
||||
id: version
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract registry hostname
|
||||
id: registry
|
||||
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -92,7 +98,7 @@ jobs:
|
||||
VITE_OIDC_REDIRECT_URI=${{ vars.VITE_OIDC_REDIRECT_URI }}
|
||||
VITE_OIDC_POST_LOGOUT_REDIRECT_URI=${{ vars.VITE_OIDC_POST_LOGOUT_REDIRECT_URI }}
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-frontend:${{ steps.version.outputs.VERSION }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-frontend:latest
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-frontend:${{ steps.version.outputs.VERSION }}
|
||||
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-frontend:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -7,9 +7,9 @@ This document describes the CI/CD pipeline configuration for FictionArchive usin
|
||||
| Workflow | File | Trigger | Purpose |
|
||||
|----------|------|---------|---------|
|
||||
| CI | `build.yml` | Push/PR to master | Build and test all projects |
|
||||
| Build Subgraphs | `build-subgraphs.yml` | Push to master (service changes) | Build GraphQL subgraph packages |
|
||||
| Build Gateway | `build-gateway.yml` | Manual or triggered by subgraphs | Compose gateway and build Docker image |
|
||||
| Build Gateway | `build-gateway.yml` | Tag `v*.*.*` or manual | Build subgraphs, compose gateway, push API image |
|
||||
| Release | `release.yml` | Tag `v*.*.*` | Build and push all Docker images |
|
||||
| Claude PR Assistant | `claude_assistant.yml` | Issue/PR comments with @claude | AI-assisted code review and issue handling |
|
||||
|
||||
## Pipeline Architecture
|
||||
|
||||
@@ -18,27 +18,32 @@ This document describes the CI/CD pipeline configuration for FictionArchive usin
|
||||
│ Push to master │
|
||||
└─────────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
┌───────────────┴───────────────┐
|
||||
▼ ▼
|
||||
┌─────────────────────────┐ ┌─────────────────────────┐
|
||||
│ build.yml │ │ build-subgraphs.yml │
|
||||
│ (CI checks - always) │ │ (if service changes) │
|
||||
└─────────────────────────┘ └────────────┬────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────┐
|
||||
│ build-gateway.yml │
|
||||
│ (compose & push API) │
|
||||
│ build.yml │
|
||||
│ (CI checks) │
|
||||
└─────────────────────────┘
|
||||
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ Push tag v*.*.* │
|
||||
└─────────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
┌───────────────┴───────────────┐
|
||||
▼ ▼
|
||||
┌─────────────────────────┐ ┌─────────────────────────┐
|
||||
│ release.yml │ │ build-gateway.yml │
|
||||
│ (build & push all │ │ (build subgraphs & │
|
||||
│ backend + frontend) │ │ push API gateway) │
|
||||
└─────────────────────────┘ └─────────────────────────┘
|
||||
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ Issue/PR comment containing @claude │
|
||||
└─────────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────┐
|
||||
│ release.yml │
|
||||
│ (build & push all) │
|
||||
│ claude_assistant.yml │
|
||||
│ (AI code assistance) │
|
||||
└─────────────────────────┘
|
||||
```
|
||||
|
||||
@@ -51,14 +56,15 @@ Configure these in **Settings → Actions → Secrets**:
|
||||
| Secret | Description | Required By |
|
||||
|--------|-------------|-------------|
|
||||
| `REGISTRY_TOKEN` | Gitea access token with `write:package` scope | `release.yml`, `build-gateway.yml` |
|
||||
| `GITEA_TOKEN` | Gitea access token for API calls | `build-subgraphs.yml` |
|
||||
| `CLAUDE_CODE_OAUTH_TOKEN` | Claude Code OAuth token | `claude_assistant.yml` |
|
||||
| `CLAUDE_GITEA_TOKEN` | Gitea token for Claude assistant | `claude_assistant.yml` |
|
||||
|
||||
#### Creating Access Tokens
|
||||
|
||||
1. Go to **Settings → Applications → Access Tokens**
|
||||
2. Create a new token with the following scopes:
|
||||
- `write:package` - Push container images
|
||||
- `write:repository` - Trigger workflows via API
|
||||
- `write:repository` - For Claude assistant to push commits
|
||||
3. Copy the token and add it as a repository secret
|
||||
|
||||
### Repository Variables
|
||||
@@ -85,42 +91,62 @@ Configure these in **Settings → Actions → Variables**:
|
||||
|
||||
**Requirements:**
|
||||
- .NET 8.0 SDK
|
||||
- Python 3.12
|
||||
- Node.js 20
|
||||
- HotChocolate Fusion CLI
|
||||
|
||||
### Build Subgraphs (`build-subgraphs.yml`)
|
||||
**Steps (Backend):**
|
||||
1. Checkout repository
|
||||
2. Setup .NET 8.0
|
||||
3. Restore dependencies
|
||||
4. Build solution (Release, with `SkipFusionBuild=true`)
|
||||
5. Run tests
|
||||
|
||||
**Trigger:** Push to `master` with changes in:
|
||||
- `FictionArchive.Service.*/**`
|
||||
- `FictionArchive.Common/**`
|
||||
- `FictionArchive.Service.Shared/**`
|
||||
|
||||
**Jobs:**
|
||||
1. `build-subgraphs` - Matrix job building each service's `.fsp` package
|
||||
2. `trigger-gateway` - Triggers gateway rebuild via API
|
||||
|
||||
**Subgraphs Built:**
|
||||
- Novel Service
|
||||
- Translation Service
|
||||
- Scheduler Service
|
||||
- User Service
|
||||
- File Service
|
||||
|
||||
**Artifacts:** Each subgraph produces a `.fsp` file retained for 30 days.
|
||||
**Steps (Frontend):**
|
||||
1. Checkout repository
|
||||
2. Setup Node.js 20
|
||||
3. Install dependencies (`npm ci`)
|
||||
4. Run linter (`npm run lint`)
|
||||
5. Build application (`npm run build`)
|
||||
|
||||
### Build Gateway (`build-gateway.yml`)
|
||||
|
||||
**Trigger:**
|
||||
- Manual dispatch (`workflow_dispatch`)
|
||||
- Push to `master` with changes in `FictionArchive.API/**`
|
||||
- Triggered by `build-subgraphs.yml` completion
|
||||
- Push tag matching `v*.*.*`
|
||||
|
||||
**Process:**
|
||||
1. Downloads all subgraph `.fsp` artifacts
|
||||
2. Configures Docker-internal URLs for each subgraph
|
||||
3. Composes gateway schema using Fusion CLI
|
||||
4. Builds and pushes API Docker image
|
||||
**Jobs:**
|
||||
|
||||
#### 1. `build-subgraphs` (Matrix Job)
|
||||
Builds GraphQL subgraph packages for each service:
|
||||
|
||||
| Service | Project | Subgraph Name |
|
||||
|---------|---------|---------------|
|
||||
| novel-service | FictionArchive.Service.NovelService | Novel |
|
||||
| translation-service | FictionArchive.Service.TranslationService | Translation |
|
||||
| scheduler-service | FictionArchive.Service.SchedulerService | Scheduler |
|
||||
| user-service | FictionArchive.Service.UserService | User |
|
||||
|
||||
**Note:** File Service and Authentication Service are not subgraphs (no GraphQL schema).
|
||||
|
||||
**Steps:**
|
||||
1. Checkout repository
|
||||
2. Setup .NET 8.0
|
||||
3. Install HotChocolate Fusion CLI
|
||||
4. Restore and build service project
|
||||
5. Export GraphQL schema (`schema export`)
|
||||
6. Pack subgraph into `.fsp` file
|
||||
7. Upload artifact (retained 30 days)
|
||||
|
||||
#### 2. `build-gateway` (Depends on `build-subgraphs`)
|
||||
Composes the API gateway from subgraph packages.
|
||||
|
||||
**Steps:**
|
||||
1. Checkout repository
|
||||
2. Setup .NET 8.0 and Fusion CLI
|
||||
3. Download all subgraph artifacts
|
||||
4. Configure Docker-internal URLs (`http://{service}-service:8080/graphql`)
|
||||
5. Compose gateway schema using Fusion CLI
|
||||
6. Build gateway project
|
||||
7. Build and push Docker image
|
||||
|
||||
**Image Tags:**
|
||||
- `<registry>/<owner>/fictionarchive-api:latest`
|
||||
@@ -131,23 +157,54 @@ Configure these in **Settings → Actions → Variables**:
|
||||
**Trigger:** Push tag matching `v*.*.*` (e.g., `v1.0.0`)
|
||||
|
||||
**Jobs:**
|
||||
1. `build-and-push` - Matrix job building all backend service images
|
||||
2. `build-frontend` - Builds and pushes frontend image
|
||||
|
||||
**Services Built:**
|
||||
- `fictionarchive-api`
|
||||
- `fictionarchive-novel-service`
|
||||
- `fictionarchive-user-service`
|
||||
- `fictionarchive-translation-service`
|
||||
- `fictionarchive-file-service`
|
||||
- `fictionarchive-scheduler-service`
|
||||
- `fictionarchive-authentication-service`
|
||||
- `fictionarchive-frontend`
|
||||
#### 1. `build-and-push` (Matrix Job)
|
||||
Builds and pushes all backend service images:
|
||||
|
||||
| Service | Dockerfile |
|
||||
|---------|------------|
|
||||
| novel-service | FictionArchive.Service.NovelService/Dockerfile |
|
||||
| user-service | FictionArchive.Service.UserService/Dockerfile |
|
||||
| translation-service | FictionArchive.Service.TranslationService/Dockerfile |
|
||||
| file-service | FictionArchive.Service.FileService/Dockerfile |
|
||||
| scheduler-service | FictionArchive.Service.SchedulerService/Dockerfile |
|
||||
| authentication-service | FictionArchive.Service.AuthenticationService/Dockerfile |
|
||||
|
||||
#### 2. `build-frontend`
|
||||
Builds and pushes the frontend image with environment-specific build arguments.
|
||||
|
||||
**Build Args:**
|
||||
- `VITE_GRAPHQL_URI`
|
||||
- `VITE_OIDC_AUTHORITY`
|
||||
- `VITE_OIDC_CLIENT_ID`
|
||||
- `VITE_OIDC_REDIRECT_URI`
|
||||
- `VITE_OIDC_POST_LOGOUT_REDIRECT_URI`
|
||||
|
||||
**Image Tags:**
|
||||
- `<registry>/<owner>/fictionarchive-<service>:<version>`
|
||||
- `<registry>/<owner>/fictionarchive-<service>:latest`
|
||||
|
||||
### Claude PR Assistant (`claude_assistant.yml`)
|
||||
|
||||
**Trigger:** Comments or issues containing `@claude`:
|
||||
- Issue comments
|
||||
- Pull request review comments
|
||||
- Pull request reviews
|
||||
- New issues (opened or assigned)
|
||||
|
||||
**Permissions Required:**
|
||||
- `contents: write`
|
||||
- `pull-requests: write`
|
||||
- `issues: write`
|
||||
- `id-token: write`
|
||||
|
||||
**Usage:**
|
||||
Mention `@claude` in any issue or PR comment to invoke the AI assistant for:
|
||||
- Code review assistance
|
||||
- Bug analysis
|
||||
- Implementation suggestions
|
||||
- Documentation help
|
||||
|
||||
## Container Registry
|
||||
|
||||
Images are pushed to the Gitea Container Registry at:
|
||||
@@ -155,6 +212,19 @@ Images are pushed to the Gitea Container Registry at:
|
||||
<gitea-server-url>/<repository-owner>/fictionarchive-<service>:<tag>
|
||||
```
|
||||
|
||||
### Image Naming Convention
|
||||
|
||||
| Image | Description |
|
||||
|-------|-------------|
|
||||
| `fictionarchive-api` | API Gateway (GraphQL Federation) |
|
||||
| `fictionarchive-novel-service` | Novel Service |
|
||||
| `fictionarchive-user-service` | User Service |
|
||||
| `fictionarchive-translation-service` | Translation Service |
|
||||
| `fictionarchive-file-service` | File Service |
|
||||
| `fictionarchive-scheduler-service` | Scheduler Service |
|
||||
| `fictionarchive-authentication-service` | Authentication Service |
|
||||
| `fictionarchive-frontend` | Web Frontend |
|
||||
|
||||
### Pulling Images
|
||||
|
||||
```bash
|
||||
@@ -184,13 +254,13 @@ docker pull <gitea-server-url>/<owner>/fictionarchive-api:latest
|
||||
- Ensure the `REGISTRY_TOKEN` secret is configured in repository settings
|
||||
- Verify the token has `write:package` scope
|
||||
|
||||
**"Failed to trigger gateway workflow"**
|
||||
- Ensure `GITEA_TOKEN` secret is configured
|
||||
- Verify the token has `write:repository` scope
|
||||
|
||||
**"No subgraph artifacts found"**
|
||||
- The gateway build requires subgraph artifacts from a previous `build-subgraphs` run
|
||||
- Trigger `build-subgraphs.yml` manually or push a change to a service
|
||||
- The gateway build requires subgraph artifacts from the `build-subgraphs` job
|
||||
- If subgraph builds failed, check the matrix job logs for errors
|
||||
|
||||
**"Schema export failed"**
|
||||
- Ensure the service project has a valid `subgraph-config.json`
|
||||
- Check that the service starts correctly for schema export
|
||||
|
||||
### Frontend Build Failures
|
||||
|
||||
@@ -204,6 +274,13 @@ docker pull <gitea-server-url>/<owner>/fictionarchive-api:latest
|
||||
- Verify `REGISTRY_TOKEN` has correct permissions
|
||||
- Check that the token hasn't expired
|
||||
|
||||
### Claude Assistant Failures
|
||||
|
||||
**"Claude assistant not responding"**
|
||||
- Verify `CLAUDE_CODE_OAUTH_TOKEN` is configured
|
||||
- Verify `CLAUDE_GITEA_TOKEN` is configured and has write permissions
|
||||
- Check that the comment contains `@claude` mention
|
||||
|
||||
## Local Testing
|
||||
|
||||
To test workflows locally before pushing:
|
||||
|
||||
@@ -7,17 +7,17 @@ EXPOSE 8081
|
||||
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["FictionArchive.Service.ImageService/FictionArchive.Service.ImageService.csproj", "FictionArchive.Service.ImageService/"]
|
||||
RUN dotnet restore "FictionArchive.Service.ImageService/FictionArchive.Service.ImageService.csproj"
|
||||
COPY ["FictionArchive.Service.FileService/FictionArchive.Service.FileService.csproj", "FictionArchive.Service.FileService/"]
|
||||
RUN dotnet restore "FictionArchive.Service.FileService/FictionArchive.Service.FileService.csproj"
|
||||
COPY . .
|
||||
WORKDIR "/src/FictionArchive.Service.ImageService"
|
||||
RUN dotnet build "./FictionArchive.Service.ImageService.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||
WORKDIR "/src/FictionArchive.Service.FileService"
|
||||
RUN dotnet build "./FictionArchive.Service.FileService.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||
|
||||
FROM build AS publish
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
RUN dotnet publish "./FictionArchive.Service.ImageService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
||||
RUN dotnet publish "./FictionArchive.Service.FileService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
||||
|
||||
FROM base AS final
|
||||
WORKDIR /app
|
||||
COPY --from=publish /app/publish .
|
||||
ENTRYPOINT ["dotnet", "FictionArchive.Service.ImageService.dll"]
|
||||
ENTRYPOINT ["dotnet", "FictionArchive.Service.FileService.dll"]
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"BaseUrl": "https://localhost:7247/api"
|
||||
},
|
||||
"RabbitMQ": {
|
||||
"ConnectionString": "amqp://localhost",
|
||||
"ConnectionString": "amqp://localhost2",
|
||||
"ClientIdentifier": "FileService"
|
||||
},
|
||||
"S3": {
|
||||
|
||||
@@ -12,26 +12,15 @@ namespace FictionArchive.Service.NovelService.GraphQL;
|
||||
|
||||
public class Mutation
|
||||
{
|
||||
public async Task<NovelUpdateRequestedEvent> ImportNovel(string novelUrl, IEventBus eventBus)
|
||||
public async Task<NovelUpdateRequestedEvent> ImportNovel(string novelUrl, NovelUpdateService service)
|
||||
{
|
||||
var importNovelRequestEvent = new NovelUpdateRequestedEvent()
|
||||
{
|
||||
NovelUrl = novelUrl
|
||||
};
|
||||
await eventBus.Publish(importNovelRequestEvent);
|
||||
return importNovelRequestEvent;
|
||||
return await service.QueueNovelImport(novelUrl);
|
||||
}
|
||||
|
||||
public async Task<ChapterPullRequestedEvent> FetchChapterContents(uint novelId,
|
||||
uint chapterNumber,
|
||||
IEventBus eventBus)
|
||||
NovelUpdateService service)
|
||||
{
|
||||
var chapterPullEvent = new ChapterPullRequestedEvent()
|
||||
{
|
||||
NovelId = novelId,
|
||||
ChapterNumber = chapterNumber
|
||||
};
|
||||
await eventBus.Publish(chapterPullEvent);
|
||||
return chapterPullEvent;
|
||||
return await service.QueueChapterPull(novelId, chapterNumber);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ using FictionArchive.Service.NovelService.Services;
|
||||
using FictionArchive.Service.NovelService.Services.EventHandlers;
|
||||
using FictionArchive.Service.NovelService.Services.SourceAdapters;
|
||||
using FictionArchive.Service.NovelService.Services.SourceAdapters.Novelpia;
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using FictionArchive.Service.Shared.Services.GraphQL;
|
||||
@@ -17,6 +18,8 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
builder.AddLocalAppsettings();
|
||||
|
||||
@@ -24,6 +27,8 @@ public class Program
|
||||
|
||||
#region Event Bus
|
||||
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
@@ -32,6 +37,7 @@ public class Program
|
||||
.Subscribe<NovelUpdateRequestedEvent, NovelUpdateRequestedEventHandler>()
|
||||
.Subscribe<ChapterPullRequestedEvent, ChapterPullRequestedEventHandler>()
|
||||
.Subscribe<FileUploadRequestStatusUpdateEvent, FileUploadRequestStatusUpdateEventHandler>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -43,7 +49,9 @@ public class Program
|
||||
|
||||
#region Database
|
||||
|
||||
builder.Services.RegisterDbContext<NovelServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<NovelServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -69,9 +77,10 @@ public class Program
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
// Update database
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<NovelServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ using FictionArchive.Service.FileService.IntegrationEvents;
|
||||
using FictionArchive.Service.NovelService.Models.Configuration;
|
||||
using FictionArchive.Service.NovelService.Models.Enums;
|
||||
using FictionArchive.Service.NovelService.Models.Images;
|
||||
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||
using FictionArchive.Service.NovelService.Models.Localization;
|
||||
using FictionArchive.Service.NovelService.Models.Novels;
|
||||
using FictionArchive.Service.NovelService.Models.SourceAdapters;
|
||||
@@ -201,4 +202,25 @@ public class NovelUpdateService
|
||||
|
||||
await _dbContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
public async Task<NovelUpdateRequestedEvent> QueueNovelImport(string novelUrl)
|
||||
{
|
||||
var importNovelRequestEvent = new NovelUpdateRequestedEvent()
|
||||
{
|
||||
NovelUrl = novelUrl
|
||||
};
|
||||
await _eventBus.Publish(importNovelRequestEvent);
|
||||
return importNovelRequestEvent;
|
||||
}
|
||||
|
||||
public async Task<ChapterPullRequestedEvent> QueueChapterPull(uint novelId, uint chapterNumber)
|
||||
{
|
||||
var chapterPullEvent = new ChapterPullRequestedEvent()
|
||||
{
|
||||
NovelId = novelId,
|
||||
ChapterNumber = chapterNumber
|
||||
};
|
||||
await _eventBus.Publish(chapterPullEvent);
|
||||
return chapterPullEvent;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using FictionArchive.Service.SchedulerService.GraphQL;
|
||||
using FictionArchive.Service.SchedulerService.Services;
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using Quartz;
|
||||
@@ -11,6 +12,8 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
// Services
|
||||
@@ -20,21 +23,36 @@ public class Program
|
||||
|
||||
#region Database
|
||||
|
||||
builder.Services.RegisterDbContext<SchedulerServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<SchedulerServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Event Bus
|
||||
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Quartz
|
||||
|
||||
if (isSchemaExport)
|
||||
{
|
||||
// Schema export mode: use in-memory store (no DB connection needed)
|
||||
builder.Services.AddQuartz(opt =>
|
||||
{
|
||||
opt.UseInMemoryStore();
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.Services.AddQuartz(opt =>
|
||||
{
|
||||
opt.UsePersistentStore(pso =>
|
||||
@@ -52,13 +70,16 @@ public class Program
|
||||
{
|
||||
opt.WaitForJobsToComplete = true;
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<SchedulerServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
|
||||
@@ -6,8 +6,20 @@ namespace FictionArchive.Service.Shared.Extensions;
|
||||
|
||||
public static class DatabaseExtensions
|
||||
{
|
||||
public static IServiceCollection RegisterDbContext<TContext>(this IServiceCollection services,
|
||||
string connectionString) where TContext : FictionArchiveDbContext
|
||||
public static IServiceCollection RegisterDbContext<TContext>(
|
||||
this IServiceCollection services,
|
||||
string connectionString,
|
||||
bool skipInfrastructure = false) where TContext : FictionArchiveDbContext
|
||||
{
|
||||
if (skipInfrastructure)
|
||||
{
|
||||
// For schema export: use in-memory provider to allow EF Core entity discovery
|
||||
services.AddDbContext<TContext>(options =>
|
||||
{
|
||||
options.UseInMemoryDatabase($"SchemaExport_{typeof(TContext).Name}");
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
services.AddDbContext<TContext>(options =>
|
||||
{
|
||||
@@ -16,6 +28,7 @@ public static class DatabaseExtensions
|
||||
o.UseNodaTime();
|
||||
});
|
||||
});
|
||||
}
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,7 @@
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Relational" Version="9.0.11" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="9.0.11">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
|
||||
22
FictionArchive.Service.Shared/SchemaExportDetector.cs
Normal file
22
FictionArchive.Service.Shared/SchemaExportDetector.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
namespace FictionArchive.Service.Shared;
|
||||
|
||||
/// <summary>
|
||||
/// Detects if the application is running in schema export mode (for HotChocolate CLI commands).
|
||||
/// In this mode, infrastructure like RabbitMQ and databases should not be initialized.
|
||||
/// </summary>
|
||||
public static class SchemaExportDetector
|
||||
{
|
||||
/// <summary>
|
||||
/// Checks if the current run is a schema export command.
|
||||
/// </summary>
|
||||
/// <param name="args">Command line arguments passed to Main()</param>
|
||||
/// <returns>True if running schema export, false otherwise</returns>
|
||||
public static bool IsSchemaExportMode(string[] args)
|
||||
{
|
||||
// HotChocolate CLI pattern: "schema export" after "--" delimiter
|
||||
// Handles: dotnet run -- schema export --output schema.graphql
|
||||
var normalizedArgs = args.SkipWhile(a => a == "--").ToArray();
|
||||
return normalizedArgs.Length > 0 &&
|
||||
normalizedArgs[0].Equals("schema", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using DeepL;
|
||||
using FictionArchive.Common.Extensions;
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using FictionArchive.Service.Shared.Services.GraphQL;
|
||||
@@ -18,6 +19,8 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
builder.AddLocalAppsettings();
|
||||
|
||||
@@ -25,18 +28,23 @@ public class Program
|
||||
|
||||
#region Event Bus
|
||||
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<TranslationRequestCreatedEvent, TranslationRequestCreatedEventHandler>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
#region Database
|
||||
|
||||
builder.Services.RegisterDbContext<TranslationServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<TranslationServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -60,9 +68,10 @@ public class Program
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
// Update database
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<TranslationServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using FictionArchive.Service.Shared;
|
||||
using FictionArchive.Service.Shared.Extensions;
|
||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||
using FictionArchive.Service.UserService.GraphQL;
|
||||
@@ -11,15 +12,20 @@ public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
#region Event Bus
|
||||
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
builder.Services.AddRabbitMQ(opt =>
|
||||
{
|
||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||
})
|
||||
.Subscribe<AuthUserAddedEvent, AuthUserAddedEventHandler>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
@@ -29,16 +35,19 @@ public class Program
|
||||
|
||||
#endregion
|
||||
|
||||
builder.Services.RegisterDbContext<UserServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
||||
builder.Services.RegisterDbContext<UserServiceDbContext>(
|
||||
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||
skipInfrastructure: isSchemaExport);
|
||||
builder.Services.AddTransient<UserManagementService>();
|
||||
|
||||
builder.Services.AddHealthChecks();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
// Update database
|
||||
using (var scope = app.Services.CreateScope())
|
||||
// Update database (skip in schema export mode)
|
||||
if (!isSchemaExport)
|
||||
{
|
||||
using var scope = app.Services.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<UserServiceDbContext>();
|
||||
dbContext.UpdateDatabase();
|
||||
}
|
||||
|
||||
@@ -34,15 +34,18 @@ services:
|
||||
# Backend Services
|
||||
# ===========================================
|
||||
novel-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: FictionArchive.Service.NovelService/Dockerfile
|
||||
image: git.orfl.xyz/conco/fictionarchive-novel-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_NovelService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
Novelpia__Username: ${NOVELPIA_USERNAME}
|
||||
Novelpia__Password: ${NOVELPIA_PASSWORD}
|
||||
NovelUpdateService__PendingImageUrl: https://files.fictionarchive.orfl.xyz/api/pendingupload.png
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -51,13 +54,16 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
translation-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: FictionArchive.Service.TranslationService/Dockerfile
|
||||
image: git.orfl.xyz/conco/fictionarchive-translation-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_TranslationService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
DeepL__ApiKey: ${DEEPL_API_KEY}
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -66,12 +72,15 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
scheduler-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: FictionArchive.Service.SchedulerService/Dockerfile
|
||||
image: git.orfl.xyz/conco/fictionarchive-scheduler-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_SchedulerService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -80,12 +89,15 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
user-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: FictionArchive.Service.UserService/Dockerfile
|
||||
image: git.orfl.xyz/conco/fictionarchive-user-service:latest
|
||||
environment:
|
||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_UserService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -94,20 +106,21 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
authentication-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: FictionArchive.Service.AuthenticationService/Dockerfile
|
||||
image: git.orfl.xyz/conco/fictionarchive-authentication-service:latest
|
||||
environment:
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
file-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: FictionArchive.Service.FileService/Dockerfile
|
||||
image: git.orfl.xyz/conco/fictionarchive-file-service:latest
|
||||
environment:
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
S3__Endpoint: ${S3_ENDPOINT:-https://s3.orfl.xyz}
|
||||
@@ -115,6 +128,11 @@ services:
|
||||
S3__AccessKey: ${S3_ACCESS_KEY}
|
||||
S3__SecretKey: ${S3_SECRET_KEY}
|
||||
Proxy__BaseUrl: https://files.orfl.xyz/api
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.file-service.rule=Host(`files.orfl.xyz`)"
|
||||
@@ -130,11 +148,14 @@ services:
|
||||
# API Gateway
|
||||
# ===========================================
|
||||
api-gateway:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: FictionArchive.API/Dockerfile
|
||||
image: git.orfl.xyz/conco/fictionarchive-api:latest
|
||||
environment:
|
||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api-gateway.rule=Host(`api.fictionarchive.orfl.xyz`)"
|
||||
@@ -154,15 +175,12 @@ services:
|
||||
# Frontend
|
||||
# ===========================================
|
||||
frontend:
|
||||
build:
|
||||
context: ./fictionarchive-web
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
VITE_GRAPHQL_URI: https://api.fictionarchive.orfl.xyz/graphql/
|
||||
VITE_OIDC_AUTHORITY: ${OIDC_AUTHORITY:-https://auth.orfl.xyz/application/o/fiction-archive/}
|
||||
VITE_OIDC_CLIENT_ID: ${OIDC_CLIENT_ID}
|
||||
VITE_OIDC_REDIRECT_URI: https://fictionarchive.orfl.xyz/
|
||||
VITE_OIDC_POST_LOGOUT_REDIRECT_URI: https://fictionarchive.orfl.xyz/
|
||||
image: git.orfl.xyz/conco/fictionarchive-frontend:latest
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost/"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.frontend.rule=Host(`fictionarchive.orfl.xyz`)"
|
||||
|
||||
40
fictionarchive-web/.dockerignore
Normal file
40
fictionarchive-web/.dockerignore
Normal file
@@ -0,0 +1,40 @@
|
||||
# Dependencies
|
||||
node_modules
|
||||
|
||||
# Build output
|
||||
dist
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
|
||||
# IDE and editor
|
||||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Logs
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Test coverage
|
||||
coverage
|
||||
|
||||
# Docker
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
docker-compose*
|
||||
|
||||
# Documentation
|
||||
README.md
|
||||
*.md
|
||||
|
||||
# TypeScript build info
|
||||
*.tsbuildinfo
|
||||
Reference in New Issue
Block a user