Compare commits
23 Commits
15a8185621
...
feature/FA
| Author | SHA1 | Date | |
|---|---|---|---|
| 12e3c5dfdd | |||
|
|
b71d9031e1 | ||
|
|
09ebdb1b2a | ||
| 43d5ada7fb | |||
|
|
4635ed1b4e | ||
|
|
920fd00910 | ||
|
|
0d9f788678 | ||
|
|
0938c16a76 | ||
|
|
f25cbc1a04 | ||
|
|
078eaf5237 | ||
|
|
b9115d78a9 | ||
|
|
7e94f06853 | ||
|
|
50263109ab | ||
|
|
6ebfe81ae3 | ||
|
|
80aac63f7d | ||
|
|
adc99c7000 | ||
| 87075be61e | |||
| 259dc08aea | |||
| 2203d2ee54 | |||
| 30cc89242d | |||
| 84294455f9 | |||
| be62af98d3 | |||
| aff1396c6a |
@@ -3,18 +3,71 @@ name: Build Gateway
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
tags:
|
||||||
- master
|
- 'v*.*.*'
|
||||||
paths:
|
|
||||||
- 'FictionArchive.API/**'
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
REGISTRY: ${{ gitea.server_url }}
|
REGISTRY: ${{ gitea.server_url }}
|
||||||
IMAGE_NAME: ${{ gitea.repository_owner }}/fictionarchive-api
|
IMAGE_NAME: ${{ gitea.repository_owner }}/fictionarchive-api
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
build-subgraphs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
service:
|
||||||
|
- name: novel-service
|
||||||
|
project: FictionArchive.Service.NovelService
|
||||||
|
subgraph: Novel
|
||||||
|
- name: translation-service
|
||||||
|
project: FictionArchive.Service.TranslationService
|
||||||
|
subgraph: Translation
|
||||||
|
- name: scheduler-service
|
||||||
|
project: FictionArchive.Service.SchedulerService
|
||||||
|
subgraph: Scheduler
|
||||||
|
- name: user-service
|
||||||
|
project: FictionArchive.Service.UserService
|
||||||
|
subgraph: User
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '8.0.x'
|
||||||
|
|
||||||
|
- name: Install Fusion CLI
|
||||||
|
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||||
|
|
||||||
|
- name: Add .NET tools to PATH
|
||||||
|
run: echo "$HOME/.dotnet/tools" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Export schema
|
||||||
|
run: |
|
||||||
|
dotnet run -c Release --no-launch-profile \
|
||||||
|
--project ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj \
|
||||||
|
-- schema export --output schema.graphql
|
||||||
|
|
||||||
|
- name: Pack subgraph
|
||||||
|
run: fusion subgraph pack -w ${{ matrix.service.project }}
|
||||||
|
|
||||||
|
- name: Upload subgraph package
|
||||||
|
uses: christopherhx/gitea-upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.service.name }}-subgraph
|
||||||
|
path: ${{ matrix.service.project }}/*.fsp
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
build-gateway:
|
build-gateway:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-subgraphs
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -27,44 +80,35 @@ jobs:
|
|||||||
- name: Install Fusion CLI
|
- name: Install Fusion CLI
|
||||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
||||||
|
|
||||||
|
- name: Add .NET tools to PATH
|
||||||
|
run: echo "$HOME/.dotnet/tools" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Create subgraphs directory
|
- name: Create subgraphs directory
|
||||||
run: mkdir -p subgraphs
|
run: mkdir -p subgraphs
|
||||||
|
|
||||||
# Download all subgraph packages from latest successful builds
|
|
||||||
- name: Download Novel Service subgraph
|
- name: Download Novel Service subgraph
|
||||||
uses: actions/download-artifact@v4
|
uses: christopherhx/gitea-download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: novel-service-subgraph
|
name: novel-service-subgraph
|
||||||
path: subgraphs/novel
|
path: subgraphs/novel
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: Download Translation Service subgraph
|
- name: Download Translation Service subgraph
|
||||||
uses: actions/download-artifact@v4
|
uses: christopherhx/gitea-download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: translation-service-subgraph
|
name: translation-service-subgraph
|
||||||
path: subgraphs/translation
|
path: subgraphs/translation
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: Download Scheduler Service subgraph
|
- name: Download Scheduler Service subgraph
|
||||||
uses: actions/download-artifact@v4
|
uses: christopherhx/gitea-download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: scheduler-service-subgraph
|
name: scheduler-service-subgraph
|
||||||
path: subgraphs/scheduler
|
path: subgraphs/scheduler
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: Download User Service subgraph
|
- name: Download User Service subgraph
|
||||||
uses: actions/download-artifact@v4
|
uses: christopherhx/gitea-download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: user-service-subgraph
|
name: user-service-subgraph
|
||||||
path: subgraphs/user
|
path: subgraphs/user
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: Download File Service subgraph
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: file-service-subgraph
|
|
||||||
path: subgraphs/file
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: Configure subgraph URLs for Docker
|
- name: Configure subgraph URLs for Docker
|
||||||
run: |
|
run: |
|
||||||
@@ -95,13 +139,13 @@ jobs:
|
|||||||
- name: Build gateway
|
- name: Build gateway
|
||||||
run: dotnet build FictionArchive.API/FictionArchive.API.csproj -c Release --no-restore -p:SkipFusionBuild=true
|
run: dotnet build FictionArchive.API/FictionArchive.API.csproj -c Release --no-restore -p:SkipFusionBuild=true
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
run: dotnet test FictionArchive.sln -c Release --no-build --verbosity normal
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Extract registry hostname
|
||||||
|
id: registry
|
||||||
|
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Log in to Gitea Container Registry
|
- name: Log in to Gitea Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
@@ -116,7 +160,7 @@ jobs:
|
|||||||
file: FictionArchive.API/Dockerfile
|
file: FictionArchive.API/Dockerfile
|
||||||
push: true
|
push: true
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_NAME }}:latest
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ gitea.sha }}
|
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_NAME }}:${{ gitea.sha }}
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
name: Build Subgraphs
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
paths:
|
|
||||||
- 'FictionArchive.Service.*/**'
|
|
||||||
- 'FictionArchive.Common/**'
|
|
||||||
- 'FictionArchive.Service.Shared/**'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-subgraphs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
service:
|
|
||||||
- name: novel-service
|
|
||||||
project: FictionArchive.Service.NovelService
|
|
||||||
subgraph: Novel
|
|
||||||
- name: translation-service
|
|
||||||
project: FictionArchive.Service.TranslationService
|
|
||||||
subgraph: Translation
|
|
||||||
- name: scheduler-service
|
|
||||||
project: FictionArchive.Service.SchedulerService
|
|
||||||
subgraph: Scheduler
|
|
||||||
- name: user-service
|
|
||||||
project: FictionArchive.Service.UserService
|
|
||||||
subgraph: User
|
|
||||||
- name: file-service
|
|
||||||
project: FictionArchive.Service.FileService
|
|
||||||
subgraph: File
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup .NET
|
|
||||||
uses: actions/setup-dotnet@v4
|
|
||||||
with:
|
|
||||||
dotnet-version: '8.0.x'
|
|
||||||
|
|
||||||
- name: Install Fusion CLI
|
|
||||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
|
||||||
|
|
||||||
- name: Restore dependencies
|
|
||||||
run: dotnet restore ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: dotnet build ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj -c Release --no-restore
|
|
||||||
|
|
||||||
- name: Export schema
|
|
||||||
run: |
|
|
||||||
dotnet run -c Release --no-launch-profile \
|
|
||||||
--project ${{ matrix.service.project }}/${{ matrix.service.project }}.csproj \
|
|
||||||
-- schema export --output ${{ matrix.service.project }}/schema.graphql
|
|
||||||
|
|
||||||
- name: Pack subgraph
|
|
||||||
run: fusion subgraph pack -w ${{ matrix.service.project }}
|
|
||||||
|
|
||||||
- name: Upload subgraph package
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.service.name }}-subgraph
|
|
||||||
path: ${{ matrix.service.project }}/*.fsp
|
|
||||||
retention-days: 30
|
|
||||||
|
|
||||||
# Trigger gateway build after all subgraphs are built
|
|
||||||
trigger-gateway:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: build-subgraphs
|
|
||||||
steps:
|
|
||||||
- name: Trigger gateway workflow
|
|
||||||
run: |
|
|
||||||
curl -X POST \
|
|
||||||
-H "Authorization: token ${{ secrets.GITEA_TOKEN }}" \
|
|
||||||
"${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/actions/workflows/build-gateway.yml/dispatches" \
|
|
||||||
-d '{"ref":"master"}'
|
|
||||||
@@ -20,14 +20,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
dotnet-version: '8.0.x'
|
dotnet-version: '8.0.x'
|
||||||
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.12'
|
|
||||||
|
|
||||||
- name: Install Fusion CLI
|
|
||||||
run: dotnet tool install -g HotChocolate.Fusion.CommandLine
|
|
||||||
|
|
||||||
- name: Restore dependencies
|
- name: Restore dependencies
|
||||||
run: dotnet restore FictionArchive.sln
|
run: dotnet restore FictionArchive.sln
|
||||||
|
|
||||||
@@ -35,7 +27,27 @@ jobs:
|
|||||||
run: dotnet build FictionArchive.sln --configuration Release --no-restore /p:SkipFusionBuild=true
|
run: dotnet build FictionArchive.sln --configuration Release --no-restore /p:SkipFusionBuild=true
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: dotnet test FictionArchive.sln --configuration Release --no-build --verbosity normal
|
run: |
|
||||||
|
dotnet test FictionArchive.sln --configuration Release --no-build --verbosity normal \
|
||||||
|
--logger "trx;LogFileName=test-results.trx" \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: christopherhx/gitea-upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: test-results
|
||||||
|
path: ./TestResults/**/*.trx
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Upload coverage results
|
||||||
|
uses: christopherhx/gitea-upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: coverage-results
|
||||||
|
path: ./TestResults/**/coverage.cobertura.xml
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
build-frontend:
|
build-frontend:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
@@ -1,43 +1,49 @@
|
|||||||
name: Claude Assistant for Gitea
|
name: Claude PR Assistant
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Trigger on issue comments (works on both issues and pull requests in Gitea)
|
|
||||||
issue_comment:
|
issue_comment:
|
||||||
types: [created]
|
types: [created]
|
||||||
# Trigger on issues being opened or assigned
|
pull_request_review_comment:
|
||||||
|
types: [created]
|
||||||
issues:
|
issues:
|
||||||
types: [opened, assigned]
|
types: [opened, assigned]
|
||||||
# Note: pull_request_review_comment has limited support in Gitea
|
pull_request_review:
|
||||||
# Use issue_comment instead which covers PR comments
|
types: [submitted]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
claude-assistant:
|
claude-code-action:
|
||||||
# Basic trigger detection - check for @claude in comments or issue body
|
|
||||||
if: |
|
if: |
|
||||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || github.event.action == 'assigned'))
|
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||||
|
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||||
|
(github.event_name == 'issues' && contains(github.event.issue.body, '@claude'))
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
issues: write
|
issues: write
|
||||||
# Note: Gitea Actions may not require id-token: write for basic functionality
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Run Claude Assistant
|
- name: Run Claude PR Action
|
||||||
uses: markwylde/claude-code-gitea-action
|
uses: markwylde/claude-code-gitea-action@v1.0.20
|
||||||
with:
|
with:
|
||||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||||
gitea_token: ${{ secrets.CLAUDE_GITEA_TOKEN }}
|
gitea_token: ${{ secrets.CLAUDE_GITEA_TOKEN }}
|
||||||
|
# Or use OAuth token instead:
|
||||||
|
# claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||||
timeout_minutes: "60"
|
timeout_minutes: "60"
|
||||||
trigger_phrase: "@claude"
|
# mode: tag # Default: responds to @claude mentions
|
||||||
# Optional: Customize for Gitea environment
|
# Optional: Restrict network access to specific domains only
|
||||||
custom_instructions: |
|
# experimental_allowed_domains: |
|
||||||
You are working in a Gitea environment. Be aware that:
|
# .anthropic.com
|
||||||
- Some GitHub Actions features may behave differently
|
# .github.com
|
||||||
- Focus on core functionality and avoid advanced GitHub-specific features
|
# api.github.com
|
||||||
- Use standard git operations when possible
|
# .githubusercontent.com
|
||||||
|
# bun.sh
|
||||||
|
# registry.npmjs.org
|
||||||
|
# .blob.core.windows.net
|
||||||
@@ -15,8 +15,6 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
service:
|
service:
|
||||||
- name: api
|
|
||||||
dockerfile: FictionArchive.API/Dockerfile
|
|
||||||
- name: novel-service
|
- name: novel-service
|
||||||
dockerfile: FictionArchive.Service.NovelService/Dockerfile
|
dockerfile: FictionArchive.Service.NovelService/Dockerfile
|
||||||
- name: user-service
|
- name: user-service
|
||||||
@@ -40,6 +38,10 @@ jobs:
|
|||||||
id: version
|
id: version
|
||||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Extract registry hostname
|
||||||
|
id: registry
|
||||||
|
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Log in to Gitea Container Registry
|
- name: Log in to Gitea Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
@@ -54,8 +56,8 @@ jobs:
|
|||||||
file: ${{ matrix.service.dockerfile }}
|
file: ${{ matrix.service.dockerfile }}
|
||||||
push: true
|
push: true
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:${{ steps.version.outputs.VERSION }}
|
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:${{ steps.version.outputs.VERSION }}
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:latest
|
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-${{ matrix.service.name }}:latest
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
@@ -72,6 +74,10 @@ jobs:
|
|||||||
id: version
|
id: version
|
||||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Extract registry hostname
|
||||||
|
id: registry
|
||||||
|
run: echo "HOST=$(echo '${{ gitea.server_url }}' | sed 's|https\?://||')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Log in to Gitea Container Registry
|
- name: Log in to Gitea Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
@@ -92,7 +98,7 @@ jobs:
|
|||||||
VITE_OIDC_REDIRECT_URI=${{ vars.VITE_OIDC_REDIRECT_URI }}
|
VITE_OIDC_REDIRECT_URI=${{ vars.VITE_OIDC_REDIRECT_URI }}
|
||||||
VITE_OIDC_POST_LOGOUT_REDIRECT_URI=${{ vars.VITE_OIDC_POST_LOGOUT_REDIRECT_URI }}
|
VITE_OIDC_POST_LOGOUT_REDIRECT_URI=${{ vars.VITE_OIDC_POST_LOGOUT_REDIRECT_URI }}
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-frontend:${{ steps.version.outputs.VERSION }}
|
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-frontend:${{ steps.version.outputs.VERSION }}
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_PREFIX }}-frontend:latest
|
${{ steps.registry.outputs.HOST }}/${{ env.IMAGE_PREFIX }}-frontend:latest
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ This document describes the CI/CD pipeline configuration for FictionArchive usin
|
|||||||
| Workflow | File | Trigger | Purpose |
|
| Workflow | File | Trigger | Purpose |
|
||||||
|----------|------|---------|---------|
|
|----------|------|---------|---------|
|
||||||
| CI | `build.yml` | Push/PR to master | Build and test all projects |
|
| CI | `build.yml` | Push/PR to master | Build and test all projects |
|
||||||
| Build Subgraphs | `build-subgraphs.yml` | Push to master (service changes) | Build GraphQL subgraph packages |
|
| Build Gateway | `build-gateway.yml` | Tag `v*.*.*` or manual | Build subgraphs, compose gateway, push API image |
|
||||||
| Build Gateway | `build-gateway.yml` | Manual or triggered by subgraphs | Compose gateway and build Docker image |
|
|
||||||
| Release | `release.yml` | Tag `v*.*.*` | Build and push all Docker images |
|
| Release | `release.yml` | Tag `v*.*.*` | Build and push all Docker images |
|
||||||
|
| Claude PR Assistant | `claude_assistant.yml` | Issue/PR comments with @claude | AI-assisted code review and issue handling |
|
||||||
|
|
||||||
## Pipeline Architecture
|
## Pipeline Architecture
|
||||||
|
|
||||||
@@ -18,27 +18,32 @@ This document describes the CI/CD pipeline configuration for FictionArchive usin
|
|||||||
│ Push to master │
|
│ Push to master │
|
||||||
└─────────────────────────────┬───────────────────────────────────────┘
|
└─────────────────────────────┬───────────────────────────────────────┘
|
||||||
│
|
│
|
||||||
┌───────────────┴───────────────┐
|
▼
|
||||||
▼ ▼
|
┌─────────────────────────┐
|
||||||
┌─────────────────────────┐ ┌─────────────────────────┐
|
│ build.yml │
|
||||||
│ build.yml │ │ build-subgraphs.yml │
|
│ (CI checks) │
|
||||||
│ (CI checks - always) │ │ (if service changes) │
|
└─────────────────────────┘
|
||||||
└─────────────────────────┘ └────────────┬────────────┘
|
|
||||||
│
|
|
||||||
▼
|
|
||||||
┌─────────────────────────┐
|
|
||||||
│ build-gateway.yml │
|
|
||||||
│ (compose & push API) │
|
|
||||||
└─────────────────────────┘
|
|
||||||
|
|
||||||
┌─────────────────────────────────────────────────────────────────────┐
|
┌─────────────────────────────────────────────────────────────────────┐
|
||||||
│ Push tag v*.*.* │
|
│ Push tag v*.*.* │
|
||||||
|
└─────────────────────────────┬───────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌───────────────┴───────────────┐
|
||||||
|
▼ ▼
|
||||||
|
┌─────────────────────────┐ ┌─────────────────────────┐
|
||||||
|
│ release.yml │ │ build-gateway.yml │
|
||||||
|
│ (build & push all │ │ (build subgraphs & │
|
||||||
|
│ backend + frontend) │ │ push API gateway) │
|
||||||
|
└─────────────────────────┘ └─────────────────────────┘
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────────────────────────────┐
|
||||||
|
│ Issue/PR comment containing @claude │
|
||||||
└─────────────────────────────┬───────────────────────────────────────┘
|
└─────────────────────────────┬───────────────────────────────────────┘
|
||||||
│
|
│
|
||||||
▼
|
▼
|
||||||
┌─────────────────────────┐
|
┌─────────────────────────┐
|
||||||
│ release.yml │
|
│ claude_assistant.yml │
|
||||||
│ (build & push all) │
|
│ (AI code assistance) │
|
||||||
└─────────────────────────┘
|
└─────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -51,14 +56,15 @@ Configure these in **Settings → Actions → Secrets**:
|
|||||||
| Secret | Description | Required By |
|
| Secret | Description | Required By |
|
||||||
|--------|-------------|-------------|
|
|--------|-------------|-------------|
|
||||||
| `REGISTRY_TOKEN` | Gitea access token with `write:package` scope | `release.yml`, `build-gateway.yml` |
|
| `REGISTRY_TOKEN` | Gitea access token with `write:package` scope | `release.yml`, `build-gateway.yml` |
|
||||||
| `GITEA_TOKEN` | Gitea access token for API calls | `build-subgraphs.yml` |
|
| `CLAUDE_CODE_OAUTH_TOKEN` | Claude Code OAuth token | `claude_assistant.yml` |
|
||||||
|
| `CLAUDE_GITEA_TOKEN` | Gitea token for Claude assistant | `claude_assistant.yml` |
|
||||||
|
|
||||||
#### Creating Access Tokens
|
#### Creating Access Tokens
|
||||||
|
|
||||||
1. Go to **Settings → Applications → Access Tokens**
|
1. Go to **Settings → Applications → Access Tokens**
|
||||||
2. Create a new token with the following scopes:
|
2. Create a new token with the following scopes:
|
||||||
- `write:package` - Push container images
|
- `write:package` - Push container images
|
||||||
- `write:repository` - Trigger workflows via API
|
- `write:repository` - For Claude assistant to push commits
|
||||||
3. Copy the token and add it as a repository secret
|
3. Copy the token and add it as a repository secret
|
||||||
|
|
||||||
### Repository Variables
|
### Repository Variables
|
||||||
@@ -85,42 +91,62 @@ Configure these in **Settings → Actions → Variables**:
|
|||||||
|
|
||||||
**Requirements:**
|
**Requirements:**
|
||||||
- .NET 8.0 SDK
|
- .NET 8.0 SDK
|
||||||
- Python 3.12
|
|
||||||
- Node.js 20
|
- Node.js 20
|
||||||
- HotChocolate Fusion CLI
|
|
||||||
|
|
||||||
### Build Subgraphs (`build-subgraphs.yml`)
|
**Steps (Backend):**
|
||||||
|
1. Checkout repository
|
||||||
|
2. Setup .NET 8.0
|
||||||
|
3. Restore dependencies
|
||||||
|
4. Build solution (Release, with `SkipFusionBuild=true`)
|
||||||
|
5. Run tests
|
||||||
|
|
||||||
**Trigger:** Push to `master` with changes in:
|
**Steps (Frontend):**
|
||||||
- `FictionArchive.Service.*/**`
|
1. Checkout repository
|
||||||
- `FictionArchive.Common/**`
|
2. Setup Node.js 20
|
||||||
- `FictionArchive.Service.Shared/**`
|
3. Install dependencies (`npm ci`)
|
||||||
|
4. Run linter (`npm run lint`)
|
||||||
**Jobs:**
|
5. Build application (`npm run build`)
|
||||||
1. `build-subgraphs` - Matrix job building each service's `.fsp` package
|
|
||||||
2. `trigger-gateway` - Triggers gateway rebuild via API
|
|
||||||
|
|
||||||
**Subgraphs Built:**
|
|
||||||
- Novel Service
|
|
||||||
- Translation Service
|
|
||||||
- Scheduler Service
|
|
||||||
- User Service
|
|
||||||
- File Service
|
|
||||||
|
|
||||||
**Artifacts:** Each subgraph produces a `.fsp` file retained for 30 days.
|
|
||||||
|
|
||||||
### Build Gateway (`build-gateway.yml`)
|
### Build Gateway (`build-gateway.yml`)
|
||||||
|
|
||||||
**Trigger:**
|
**Trigger:**
|
||||||
- Manual dispatch (`workflow_dispatch`)
|
- Manual dispatch (`workflow_dispatch`)
|
||||||
- Push to `master` with changes in `FictionArchive.API/**`
|
- Push tag matching `v*.*.*`
|
||||||
- Triggered by `build-subgraphs.yml` completion
|
|
||||||
|
|
||||||
**Process:**
|
**Jobs:**
|
||||||
1. Downloads all subgraph `.fsp` artifacts
|
|
||||||
2. Configures Docker-internal URLs for each subgraph
|
#### 1. `build-subgraphs` (Matrix Job)
|
||||||
3. Composes gateway schema using Fusion CLI
|
Builds GraphQL subgraph packages for each service:
|
||||||
4. Builds and pushes API Docker image
|
|
||||||
|
| Service | Project | Subgraph Name |
|
||||||
|
|---------|---------|---------------|
|
||||||
|
| novel-service | FictionArchive.Service.NovelService | Novel |
|
||||||
|
| translation-service | FictionArchive.Service.TranslationService | Translation |
|
||||||
|
| scheduler-service | FictionArchive.Service.SchedulerService | Scheduler |
|
||||||
|
| user-service | FictionArchive.Service.UserService | User |
|
||||||
|
|
||||||
|
**Note:** File Service and Authentication Service are not subgraphs (no GraphQL schema).
|
||||||
|
|
||||||
|
**Steps:**
|
||||||
|
1. Checkout repository
|
||||||
|
2. Setup .NET 8.0
|
||||||
|
3. Install HotChocolate Fusion CLI
|
||||||
|
4. Restore and build service project
|
||||||
|
5. Export GraphQL schema (`schema export`)
|
||||||
|
6. Pack subgraph into `.fsp` file
|
||||||
|
7. Upload artifact (retained 30 days)
|
||||||
|
|
||||||
|
#### 2. `build-gateway` (Depends on `build-subgraphs`)
|
||||||
|
Composes the API gateway from subgraph packages.
|
||||||
|
|
||||||
|
**Steps:**
|
||||||
|
1. Checkout repository
|
||||||
|
2. Setup .NET 8.0 and Fusion CLI
|
||||||
|
3. Download all subgraph artifacts
|
||||||
|
4. Configure Docker-internal URLs (`http://{service}-service:8080/graphql`)
|
||||||
|
5. Compose gateway schema using Fusion CLI
|
||||||
|
6. Build gateway project
|
||||||
|
7. Build and push Docker image
|
||||||
|
|
||||||
**Image Tags:**
|
**Image Tags:**
|
||||||
- `<registry>/<owner>/fictionarchive-api:latest`
|
- `<registry>/<owner>/fictionarchive-api:latest`
|
||||||
@@ -131,23 +157,54 @@ Configure these in **Settings → Actions → Variables**:
|
|||||||
**Trigger:** Push tag matching `v*.*.*` (e.g., `v1.0.0`)
|
**Trigger:** Push tag matching `v*.*.*` (e.g., `v1.0.0`)
|
||||||
|
|
||||||
**Jobs:**
|
**Jobs:**
|
||||||
1. `build-and-push` - Matrix job building all backend service images
|
|
||||||
2. `build-frontend` - Builds and pushes frontend image
|
|
||||||
|
|
||||||
**Services Built:**
|
#### 1. `build-and-push` (Matrix Job)
|
||||||
- `fictionarchive-api`
|
Builds and pushes all backend service images:
|
||||||
- `fictionarchive-novel-service`
|
|
||||||
- `fictionarchive-user-service`
|
| Service | Dockerfile |
|
||||||
- `fictionarchive-translation-service`
|
|---------|------------|
|
||||||
- `fictionarchive-file-service`
|
| novel-service | FictionArchive.Service.NovelService/Dockerfile |
|
||||||
- `fictionarchive-scheduler-service`
|
| user-service | FictionArchive.Service.UserService/Dockerfile |
|
||||||
- `fictionarchive-authentication-service`
|
| translation-service | FictionArchive.Service.TranslationService/Dockerfile |
|
||||||
- `fictionarchive-frontend`
|
| file-service | FictionArchive.Service.FileService/Dockerfile |
|
||||||
|
| scheduler-service | FictionArchive.Service.SchedulerService/Dockerfile |
|
||||||
|
| authentication-service | FictionArchive.Service.AuthenticationService/Dockerfile |
|
||||||
|
|
||||||
|
#### 2. `build-frontend`
|
||||||
|
Builds and pushes the frontend image with environment-specific build arguments.
|
||||||
|
|
||||||
|
**Build Args:**
|
||||||
|
- `VITE_GRAPHQL_URI`
|
||||||
|
- `VITE_OIDC_AUTHORITY`
|
||||||
|
- `VITE_OIDC_CLIENT_ID`
|
||||||
|
- `VITE_OIDC_REDIRECT_URI`
|
||||||
|
- `VITE_OIDC_POST_LOGOUT_REDIRECT_URI`
|
||||||
|
|
||||||
**Image Tags:**
|
**Image Tags:**
|
||||||
- `<registry>/<owner>/fictionarchive-<service>:<version>`
|
- `<registry>/<owner>/fictionarchive-<service>:<version>`
|
||||||
- `<registry>/<owner>/fictionarchive-<service>:latest`
|
- `<registry>/<owner>/fictionarchive-<service>:latest`
|
||||||
|
|
||||||
|
### Claude PR Assistant (`claude_assistant.yml`)
|
||||||
|
|
||||||
|
**Trigger:** Comments or issues containing `@claude`:
|
||||||
|
- Issue comments
|
||||||
|
- Pull request review comments
|
||||||
|
- Pull request reviews
|
||||||
|
- New issues (opened or assigned)
|
||||||
|
|
||||||
|
**Permissions Required:**
|
||||||
|
- `contents: write`
|
||||||
|
- `pull-requests: write`
|
||||||
|
- `issues: write`
|
||||||
|
- `id-token: write`
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
Mention `@claude` in any issue or PR comment to invoke the AI assistant for:
|
||||||
|
- Code review assistance
|
||||||
|
- Bug analysis
|
||||||
|
- Implementation suggestions
|
||||||
|
- Documentation help
|
||||||
|
|
||||||
## Container Registry
|
## Container Registry
|
||||||
|
|
||||||
Images are pushed to the Gitea Container Registry at:
|
Images are pushed to the Gitea Container Registry at:
|
||||||
@@ -155,6 +212,19 @@ Images are pushed to the Gitea Container Registry at:
|
|||||||
<gitea-server-url>/<repository-owner>/fictionarchive-<service>:<tag>
|
<gitea-server-url>/<repository-owner>/fictionarchive-<service>:<tag>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Image Naming Convention
|
||||||
|
|
||||||
|
| Image | Description |
|
||||||
|
|-------|-------------|
|
||||||
|
| `fictionarchive-api` | API Gateway (GraphQL Federation) |
|
||||||
|
| `fictionarchive-novel-service` | Novel Service |
|
||||||
|
| `fictionarchive-user-service` | User Service |
|
||||||
|
| `fictionarchive-translation-service` | Translation Service |
|
||||||
|
| `fictionarchive-file-service` | File Service |
|
||||||
|
| `fictionarchive-scheduler-service` | Scheduler Service |
|
||||||
|
| `fictionarchive-authentication-service` | Authentication Service |
|
||||||
|
| `fictionarchive-frontend` | Web Frontend |
|
||||||
|
|
||||||
### Pulling Images
|
### Pulling Images
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -184,13 +254,13 @@ docker pull <gitea-server-url>/<owner>/fictionarchive-api:latest
|
|||||||
- Ensure the `REGISTRY_TOKEN` secret is configured in repository settings
|
- Ensure the `REGISTRY_TOKEN` secret is configured in repository settings
|
||||||
- Verify the token has `write:package` scope
|
- Verify the token has `write:package` scope
|
||||||
|
|
||||||
**"Failed to trigger gateway workflow"**
|
|
||||||
- Ensure `GITEA_TOKEN` secret is configured
|
|
||||||
- Verify the token has `write:repository` scope
|
|
||||||
|
|
||||||
**"No subgraph artifacts found"**
|
**"No subgraph artifacts found"**
|
||||||
- The gateway build requires subgraph artifacts from a previous `build-subgraphs` run
|
- The gateway build requires subgraph artifacts from the `build-subgraphs` job
|
||||||
- Trigger `build-subgraphs.yml` manually or push a change to a service
|
- If subgraph builds failed, check the matrix job logs for errors
|
||||||
|
|
||||||
|
**"Schema export failed"**
|
||||||
|
- Ensure the service project has a valid `subgraph-config.json`
|
||||||
|
- Check that the service starts correctly for schema export
|
||||||
|
|
||||||
### Frontend Build Failures
|
### Frontend Build Failures
|
||||||
|
|
||||||
@@ -204,6 +274,13 @@ docker pull <gitea-server-url>/<owner>/fictionarchive-api:latest
|
|||||||
- Verify `REGISTRY_TOKEN` has correct permissions
|
- Verify `REGISTRY_TOKEN` has correct permissions
|
||||||
- Check that the token hasn't expired
|
- Check that the token hasn't expired
|
||||||
|
|
||||||
|
### Claude Assistant Failures
|
||||||
|
|
||||||
|
**"Claude assistant not responding"**
|
||||||
|
- Verify `CLAUDE_CODE_OAUTH_TOKEN` is configured
|
||||||
|
- Verify `CLAUDE_GITEA_TOKEN` is configured and has write permissions
|
||||||
|
- Check that the comment contains `@claude` mention
|
||||||
|
|
||||||
## Local Testing
|
## Local Testing
|
||||||
|
|
||||||
To test workflows locally before pushing:
|
To test workflows locally before pushing:
|
||||||
|
|||||||
@@ -7,17 +7,17 @@ EXPOSE 8081
|
|||||||
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
||||||
ARG BUILD_CONFIGURATION=Release
|
ARG BUILD_CONFIGURATION=Release
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
COPY ["FictionArchive.Service.ImageService/FictionArchive.Service.ImageService.csproj", "FictionArchive.Service.ImageService/"]
|
COPY ["FictionArchive.Service.FileService/FictionArchive.Service.FileService.csproj", "FictionArchive.Service.FileService/"]
|
||||||
RUN dotnet restore "FictionArchive.Service.ImageService/FictionArchive.Service.ImageService.csproj"
|
RUN dotnet restore "FictionArchive.Service.FileService/FictionArchive.Service.FileService.csproj"
|
||||||
COPY . .
|
COPY . .
|
||||||
WORKDIR "/src/FictionArchive.Service.ImageService"
|
WORKDIR "/src/FictionArchive.Service.FileService"
|
||||||
RUN dotnet build "./FictionArchive.Service.ImageService.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
RUN dotnet build "./FictionArchive.Service.FileService.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||||
|
|
||||||
FROM build AS publish
|
FROM build AS publish
|
||||||
ARG BUILD_CONFIGURATION=Release
|
ARG BUILD_CONFIGURATION=Release
|
||||||
RUN dotnet publish "./FictionArchive.Service.ImageService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
RUN dotnet publish "./FictionArchive.Service.FileService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
||||||
|
|
||||||
FROM base AS final
|
FROM base AS final
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=publish /app/publish .
|
COPY --from=publish /app/publish .
|
||||||
ENTRYPOINT ["dotnet", "FictionArchive.Service.ImageService.dll"]
|
ENTRYPOINT ["dotnet", "FictionArchive.Service.FileService.dll"]
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
"BaseUrl": "https://localhost:7247/api"
|
"BaseUrl": "https://localhost:7247/api"
|
||||||
},
|
},
|
||||||
"RabbitMQ": {
|
"RabbitMQ": {
|
||||||
"ConnectionString": "amqp://localhost",
|
"ConnectionString": "amqp://localhost2",
|
||||||
"ClientIdentifier": "FileService"
|
"ClientIdentifier": "FileService"
|
||||||
},
|
},
|
||||||
"S3": {
|
"S3": {
|
||||||
|
|||||||
@@ -12,26 +12,15 @@ namespace FictionArchive.Service.NovelService.GraphQL;
|
|||||||
|
|
||||||
public class Mutation
|
public class Mutation
|
||||||
{
|
{
|
||||||
public async Task<NovelUpdateRequestedEvent> ImportNovel(string novelUrl, IEventBus eventBus)
|
public async Task<NovelUpdateRequestedEvent> ImportNovel(string novelUrl, NovelUpdateService service)
|
||||||
{
|
{
|
||||||
var importNovelRequestEvent = new NovelUpdateRequestedEvent()
|
return await service.QueueNovelImport(novelUrl);
|
||||||
{
|
|
||||||
NovelUrl = novelUrl
|
|
||||||
};
|
|
||||||
await eventBus.Publish(importNovelRequestEvent);
|
|
||||||
return importNovelRequestEvent;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<ChapterPullRequestedEvent> FetchChapterContents(uint novelId,
|
public async Task<ChapterPullRequestedEvent> FetchChapterContents(uint novelId,
|
||||||
uint chapterNumber,
|
uint chapterNumber,
|
||||||
IEventBus eventBus)
|
NovelUpdateService service)
|
||||||
{
|
{
|
||||||
var chapterPullEvent = new ChapterPullRequestedEvent()
|
return await service.QueueChapterPull(novelId, chapterNumber);
|
||||||
{
|
|
||||||
NovelId = novelId,
|
|
||||||
ChapterNumber = chapterNumber
|
|
||||||
};
|
|
||||||
await eventBus.Publish(chapterPullEvent);
|
|
||||||
return chapterPullEvent;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -6,6 +6,7 @@ using FictionArchive.Service.NovelService.Services;
|
|||||||
using FictionArchive.Service.NovelService.Services.EventHandlers;
|
using FictionArchive.Service.NovelService.Services.EventHandlers;
|
||||||
using FictionArchive.Service.NovelService.Services.SourceAdapters;
|
using FictionArchive.Service.NovelService.Services.SourceAdapters;
|
||||||
using FictionArchive.Service.NovelService.Services.SourceAdapters.Novelpia;
|
using FictionArchive.Service.NovelService.Services.SourceAdapters.Novelpia;
|
||||||
|
using FictionArchive.Service.Shared;
|
||||||
using FictionArchive.Service.Shared.Extensions;
|
using FictionArchive.Service.Shared.Extensions;
|
||||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||||
using FictionArchive.Service.Shared.Services.GraphQL;
|
using FictionArchive.Service.Shared.Services.GraphQL;
|
||||||
@@ -17,6 +18,8 @@ public class Program
|
|||||||
{
|
{
|
||||||
public static void Main(string[] args)
|
public static void Main(string[] args)
|
||||||
{
|
{
|
||||||
|
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
builder.AddLocalAppsettings();
|
builder.AddLocalAppsettings();
|
||||||
|
|
||||||
@@ -24,15 +27,18 @@ public class Program
|
|||||||
|
|
||||||
#region Event Bus
|
#region Event Bus
|
||||||
|
|
||||||
builder.Services.AddRabbitMQ(opt =>
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
builder.Services.AddRabbitMQ(opt =>
|
||||||
})
|
{
|
||||||
.Subscribe<TranslationRequestCompletedEvent, TranslationRequestCompletedEventHandler>()
|
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||||
.Subscribe<NovelUpdateRequestedEvent, NovelUpdateRequestedEventHandler>()
|
})
|
||||||
.Subscribe<ChapterPullRequestedEvent, ChapterPullRequestedEventHandler>()
|
.Subscribe<TranslationRequestCompletedEvent, TranslationRequestCompletedEventHandler>()
|
||||||
.Subscribe<FileUploadRequestStatusUpdateEvent, FileUploadRequestStatusUpdateEventHandler>();
|
.Subscribe<NovelUpdateRequestedEvent, NovelUpdateRequestedEventHandler>()
|
||||||
|
.Subscribe<ChapterPullRequestedEvent, ChapterPullRequestedEventHandler>()
|
||||||
|
.Subscribe<FileUploadRequestStatusUpdateEvent, FileUploadRequestStatusUpdateEventHandler>();
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region GraphQL
|
#region GraphQL
|
||||||
@@ -43,7 +49,9 @@ public class Program
|
|||||||
|
|
||||||
#region Database
|
#region Database
|
||||||
|
|
||||||
builder.Services.RegisterDbContext<NovelServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
builder.Services.RegisterDbContext<NovelServiceDbContext>(
|
||||||
|
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||||
|
skipInfrastructure: isSchemaExport);
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
@@ -69,9 +77,10 @@ public class Program
|
|||||||
|
|
||||||
var app = builder.Build();
|
var app = builder.Build();
|
||||||
|
|
||||||
// Update database
|
// Update database (skip in schema export mode)
|
||||||
using (var scope = app.Services.CreateScope())
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
|
using var scope = app.Services.CreateScope();
|
||||||
var dbContext = scope.ServiceProvider.GetRequiredService<NovelServiceDbContext>();
|
var dbContext = scope.ServiceProvider.GetRequiredService<NovelServiceDbContext>();
|
||||||
dbContext.UpdateDatabase();
|
dbContext.UpdateDatabase();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ using FictionArchive.Service.FileService.IntegrationEvents;
|
|||||||
using FictionArchive.Service.NovelService.Models.Configuration;
|
using FictionArchive.Service.NovelService.Models.Configuration;
|
||||||
using FictionArchive.Service.NovelService.Models.Enums;
|
using FictionArchive.Service.NovelService.Models.Enums;
|
||||||
using FictionArchive.Service.NovelService.Models.Images;
|
using FictionArchive.Service.NovelService.Models.Images;
|
||||||
|
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||||
using FictionArchive.Service.NovelService.Models.Localization;
|
using FictionArchive.Service.NovelService.Models.Localization;
|
||||||
using FictionArchive.Service.NovelService.Models.Novels;
|
using FictionArchive.Service.NovelService.Models.Novels;
|
||||||
using FictionArchive.Service.NovelService.Models.SourceAdapters;
|
using FictionArchive.Service.NovelService.Models.SourceAdapters;
|
||||||
@@ -201,4 +202,25 @@ public class NovelUpdateService
|
|||||||
|
|
||||||
await _dbContext.SaveChangesAsync();
|
await _dbContext.SaveChangesAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task<NovelUpdateRequestedEvent> QueueNovelImport(string novelUrl)
|
||||||
|
{
|
||||||
|
var importNovelRequestEvent = new NovelUpdateRequestedEvent()
|
||||||
|
{
|
||||||
|
NovelUrl = novelUrl
|
||||||
|
};
|
||||||
|
await _eventBus.Publish(importNovelRequestEvent);
|
||||||
|
return importNovelRequestEvent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<ChapterPullRequestedEvent> QueueChapterPull(uint novelId, uint chapterNumber)
|
||||||
|
{
|
||||||
|
var chapterPullEvent = new ChapterPullRequestedEvent()
|
||||||
|
{
|
||||||
|
NovelId = novelId,
|
||||||
|
ChapterNumber = chapterNumber
|
||||||
|
};
|
||||||
|
await _eventBus.Publish(chapterPullEvent);
|
||||||
|
return chapterPullEvent;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using FictionArchive.Service.SchedulerService.GraphQL;
|
using FictionArchive.Service.SchedulerService.GraphQL;
|
||||||
using FictionArchive.Service.SchedulerService.Services;
|
using FictionArchive.Service.SchedulerService.Services;
|
||||||
|
using FictionArchive.Service.Shared;
|
||||||
using FictionArchive.Service.Shared.Extensions;
|
using FictionArchive.Service.Shared.Extensions;
|
||||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||||
using Quartz;
|
using Quartz;
|
||||||
@@ -11,6 +12,8 @@ public class Program
|
|||||||
{
|
{
|
||||||
public static void Main(string[] args)
|
public static void Main(string[] args)
|
||||||
{
|
{
|
||||||
|
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
|
|
||||||
// Services
|
// Services
|
||||||
@@ -20,45 +23,63 @@ public class Program
|
|||||||
|
|
||||||
#region Database
|
#region Database
|
||||||
|
|
||||||
builder.Services.RegisterDbContext<SchedulerServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
builder.Services.RegisterDbContext<SchedulerServiceDbContext>(
|
||||||
|
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||||
|
skipInfrastructure: isSchemaExport);
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region Event Bus
|
#region Event Bus
|
||||||
|
|
||||||
builder.Services.AddRabbitMQ(opt =>
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
builder.Services.AddRabbitMQ(opt =>
|
||||||
});
|
{
|
||||||
|
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region Quartz
|
#region Quartz
|
||||||
|
|
||||||
builder.Services.AddQuartz(opt =>
|
if (isSchemaExport)
|
||||||
{
|
{
|
||||||
opt.UsePersistentStore(pso =>
|
// Schema export mode: use in-memory store (no DB connection needed)
|
||||||
|
builder.Services.AddQuartz(opt =>
|
||||||
{
|
{
|
||||||
pso.UsePostgres(pgsql =>
|
opt.UseInMemoryStore();
|
||||||
{
|
|
||||||
pgsql.ConnectionString = builder.Configuration.GetConnectionString("DefaultConnection");
|
|
||||||
pgsql.UseDriverDelegate<PostgreSQLDelegate>();
|
|
||||||
pgsql.TablePrefix = "quartz.qrtz_"; // Needed for Postgres due to the differing schema used
|
|
||||||
});
|
|
||||||
pso.UseNewtonsoftJsonSerializer();
|
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
builder.Services.AddQuartzHostedService(opt =>
|
else
|
||||||
{
|
{
|
||||||
opt.WaitForJobsToComplete = true;
|
builder.Services.AddQuartz(opt =>
|
||||||
});
|
{
|
||||||
|
opt.UsePersistentStore(pso =>
|
||||||
|
{
|
||||||
|
pso.UsePostgres(pgsql =>
|
||||||
|
{
|
||||||
|
pgsql.ConnectionString = builder.Configuration.GetConnectionString("DefaultConnection");
|
||||||
|
pgsql.UseDriverDelegate<PostgreSQLDelegate>();
|
||||||
|
pgsql.TablePrefix = "quartz.qrtz_"; // Needed for Postgres due to the differing schema used
|
||||||
|
});
|
||||||
|
pso.UseNewtonsoftJsonSerializer();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
builder.Services.AddQuartzHostedService(opt =>
|
||||||
|
{
|
||||||
|
opt.WaitForJobsToComplete = true;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
var app = builder.Build();
|
var app = builder.Build();
|
||||||
|
|
||||||
using (var scope = app.Services.CreateScope())
|
// Update database (skip in schema export mode)
|
||||||
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
|
using var scope = app.Services.CreateScope();
|
||||||
var dbContext = scope.ServiceProvider.GetRequiredService<SchedulerServiceDbContext>();
|
var dbContext = scope.ServiceProvider.GetRequiredService<SchedulerServiceDbContext>();
|
||||||
dbContext.UpdateDatabase();
|
dbContext.UpdateDatabase();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,16 +6,29 @@ namespace FictionArchive.Service.Shared.Extensions;
|
|||||||
|
|
||||||
public static class DatabaseExtensions
|
public static class DatabaseExtensions
|
||||||
{
|
{
|
||||||
public static IServiceCollection RegisterDbContext<TContext>(this IServiceCollection services,
|
public static IServiceCollection RegisterDbContext<TContext>(
|
||||||
string connectionString) where TContext : FictionArchiveDbContext
|
this IServiceCollection services,
|
||||||
|
string connectionString,
|
||||||
|
bool skipInfrastructure = false) where TContext : FictionArchiveDbContext
|
||||||
{
|
{
|
||||||
services.AddDbContext<TContext>(options =>
|
if (skipInfrastructure)
|
||||||
{
|
{
|
||||||
options.UseNpgsql(connectionString, o =>
|
// For schema export: use in-memory provider to allow EF Core entity discovery
|
||||||
|
services.AddDbContext<TContext>(options =>
|
||||||
{
|
{
|
||||||
o.UseNodaTime();
|
options.UseInMemoryDatabase($"SchemaExport_{typeof(TContext).Name}");
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
services.AddDbContext<TContext>(options =>
|
||||||
|
{
|
||||||
|
options.UseNpgsql(connectionString, o =>
|
||||||
|
{
|
||||||
|
o.UseNodaTime();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
return services;
|
return services;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -18,6 +18,7 @@
|
|||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
</PackageReference>
|
</PackageReference>
|
||||||
|
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.11" />
|
||||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Relational" Version="9.0.11" />
|
<PackageReference Include="Microsoft.EntityFrameworkCore.Relational" Version="9.0.11" />
|
||||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="9.0.11">
|
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="9.0.11">
|
||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
|
|||||||
22
FictionArchive.Service.Shared/SchemaExportDetector.cs
Normal file
22
FictionArchive.Service.Shared/SchemaExportDetector.cs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
namespace FictionArchive.Service.Shared;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Detects if the application is running in schema export mode (for HotChocolate CLI commands).
|
||||||
|
/// In this mode, infrastructure like RabbitMQ and databases should not be initialized.
|
||||||
|
/// </summary>
|
||||||
|
public static class SchemaExportDetector
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Checks if the current run is a schema export command.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="args">Command line arguments passed to Main()</param>
|
||||||
|
/// <returns>True if running schema export, false otherwise</returns>
|
||||||
|
public static bool IsSchemaExportMode(string[] args)
|
||||||
|
{
|
||||||
|
// HotChocolate CLI pattern: "schema export" after "--" delimiter
|
||||||
|
// Handles: dotnet run -- schema export --output schema.graphql
|
||||||
|
var normalizedArgs = args.SkipWhile(a => a == "--").ToArray();
|
||||||
|
return normalizedArgs.Length > 0 &&
|
||||||
|
normalizedArgs[0].Equals("schema", StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
using DeepL;
|
using DeepL;
|
||||||
using FictionArchive.Common.Extensions;
|
using FictionArchive.Common.Extensions;
|
||||||
|
using FictionArchive.Service.Shared;
|
||||||
using FictionArchive.Service.Shared.Extensions;
|
using FictionArchive.Service.Shared.Extensions;
|
||||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||||
using FictionArchive.Service.Shared.Services.GraphQL;
|
using FictionArchive.Service.Shared.Services.GraphQL;
|
||||||
@@ -18,6 +19,8 @@ public class Program
|
|||||||
{
|
{
|
||||||
public static void Main(string[] args)
|
public static void Main(string[] args)
|
||||||
{
|
{
|
||||||
|
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
builder.AddLocalAppsettings();
|
builder.AddLocalAppsettings();
|
||||||
|
|
||||||
@@ -25,18 +28,23 @@ public class Program
|
|||||||
|
|
||||||
#region Event Bus
|
#region Event Bus
|
||||||
|
|
||||||
builder.Services.AddRabbitMQ(opt =>
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
builder.Services.AddRabbitMQ(opt =>
|
||||||
})
|
{
|
||||||
.Subscribe<TranslationRequestCreatedEvent, TranslationRequestCreatedEventHandler>();
|
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||||
|
})
|
||||||
|
.Subscribe<TranslationRequestCreatedEvent, TranslationRequestCreatedEventHandler>();
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
#region Database
|
#region Database
|
||||||
|
|
||||||
builder.Services.RegisterDbContext<TranslationServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
builder.Services.RegisterDbContext<TranslationServiceDbContext>(
|
||||||
|
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||||
|
skipInfrastructure: isSchemaExport);
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
@@ -60,9 +68,10 @@ public class Program
|
|||||||
|
|
||||||
var app = builder.Build();
|
var app = builder.Build();
|
||||||
|
|
||||||
// Update database
|
// Update database (skip in schema export mode)
|
||||||
using (var scope = app.Services.CreateScope())
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
|
using var scope = app.Services.CreateScope();
|
||||||
var dbContext = scope.ServiceProvider.GetRequiredService<TranslationServiceDbContext>();
|
var dbContext = scope.ServiceProvider.GetRequiredService<TranslationServiceDbContext>();
|
||||||
dbContext.UpdateDatabase();
|
dbContext.UpdateDatabase();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using FictionArchive.Service.Shared;
|
||||||
using FictionArchive.Service.Shared.Extensions;
|
using FictionArchive.Service.Shared.Extensions;
|
||||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||||
using FictionArchive.Service.UserService.GraphQL;
|
using FictionArchive.Service.UserService.GraphQL;
|
||||||
@@ -11,16 +12,21 @@ public class Program
|
|||||||
{
|
{
|
||||||
public static void Main(string[] args)
|
public static void Main(string[] args)
|
||||||
{
|
{
|
||||||
|
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
|
|
||||||
#region Event Bus
|
#region Event Bus
|
||||||
|
|
||||||
builder.Services.AddRabbitMQ(opt =>
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
builder.Services.AddRabbitMQ(opt =>
|
||||||
})
|
{
|
||||||
.Subscribe<AuthUserAddedEvent, AuthUserAddedEventHandler>();
|
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||||
|
})
|
||||||
|
.Subscribe<AuthUserAddedEvent, AuthUserAddedEventHandler>();
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region GraphQL
|
#region GraphQL
|
||||||
@@ -29,16 +35,19 @@ public class Program
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
builder.Services.RegisterDbContext<UserServiceDbContext>(builder.Configuration.GetConnectionString("DefaultConnection"));
|
builder.Services.RegisterDbContext<UserServiceDbContext>(
|
||||||
|
builder.Configuration.GetConnectionString("DefaultConnection"),
|
||||||
|
skipInfrastructure: isSchemaExport);
|
||||||
builder.Services.AddTransient<UserManagementService>();
|
builder.Services.AddTransient<UserManagementService>();
|
||||||
|
|
||||||
builder.Services.AddHealthChecks();
|
builder.Services.AddHealthChecks();
|
||||||
|
|
||||||
var app = builder.Build();
|
var app = builder.Build();
|
||||||
|
|
||||||
// Update database
|
// Update database (skip in schema export mode)
|
||||||
using (var scope = app.Services.CreateScope())
|
if (!isSchemaExport)
|
||||||
{
|
{
|
||||||
|
using var scope = app.Services.CreateScope();
|
||||||
var dbContext = scope.ServiceProvider.GetRequiredService<UserServiceDbContext>();
|
var dbContext = scope.ServiceProvider.GetRequiredService<UserServiceDbContext>();
|
||||||
dbContext.UpdateDatabase();
|
dbContext.UpdateDatabase();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,15 +34,18 @@ services:
|
|||||||
# Backend Services
|
# Backend Services
|
||||||
# ===========================================
|
# ===========================================
|
||||||
novel-service:
|
novel-service:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-novel-service:latest
|
||||||
context: .
|
|
||||||
dockerfile: FictionArchive.Service.NovelService/Dockerfile
|
|
||||||
environment:
|
environment:
|
||||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_NovelService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_NovelService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||||
Novelpia__Username: ${NOVELPIA_USERNAME}
|
Novelpia__Username: ${NOVELPIA_USERNAME}
|
||||||
Novelpia__Password: ${NOVELPIA_PASSWORD}
|
Novelpia__Password: ${NOVELPIA_PASSWORD}
|
||||||
NovelUpdateService__PendingImageUrl: https://files.fictionarchive.orfl.xyz/api/pendingupload.png
|
NovelUpdateService__PendingImageUrl: https://files.fictionarchive.orfl.xyz/api/pendingupload.png
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -51,13 +54,16 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
translation-service:
|
translation-service:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-translation-service:latest
|
||||||
context: .
|
|
||||||
dockerfile: FictionArchive.Service.TranslationService/Dockerfile
|
|
||||||
environment:
|
environment:
|
||||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_TranslationService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_TranslationService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||||
DeepL__ApiKey: ${DEEPL_API_KEY}
|
DeepL__ApiKey: ${DEEPL_API_KEY}
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -66,12 +72,15 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
scheduler-service:
|
scheduler-service:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-scheduler-service:latest
|
||||||
context: .
|
|
||||||
dockerfile: FictionArchive.Service.SchedulerService/Dockerfile
|
|
||||||
environment:
|
environment:
|
||||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_SchedulerService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_SchedulerService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -80,12 +89,15 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
user-service:
|
user-service:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-user-service:latest
|
||||||
context: .
|
|
||||||
dockerfile: FictionArchive.Service.UserService/Dockerfile
|
|
||||||
environment:
|
environment:
|
||||||
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_UserService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
ConnectionStrings__DefaultConnection: Host=postgres;Database=FictionArchive_UserService;Username=${POSTGRES_USER:-postgres};Password=${POSTGRES_PASSWORD:-postgres}
|
||||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -94,20 +106,21 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
authentication-service:
|
authentication-service:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-authentication-service:latest
|
||||||
context: .
|
|
||||||
dockerfile: FictionArchive.Service.AuthenticationService/Dockerfile
|
|
||||||
environment:
|
environment:
|
||||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
depends_on:
|
depends_on:
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
file-service:
|
file-service:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-file-service:latest
|
||||||
context: .
|
|
||||||
dockerfile: FictionArchive.Service.FileService/Dockerfile
|
|
||||||
environment:
|
environment:
|
||||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||||
S3__Endpoint: ${S3_ENDPOINT:-https://s3.orfl.xyz}
|
S3__Endpoint: ${S3_ENDPOINT:-https://s3.orfl.xyz}
|
||||||
@@ -115,6 +128,11 @@ services:
|
|||||||
S3__AccessKey: ${S3_ACCESS_KEY}
|
S3__AccessKey: ${S3_ACCESS_KEY}
|
||||||
S3__SecretKey: ${S3_SECRET_KEY}
|
S3__SecretKey: ${S3_SECRET_KEY}
|
||||||
Proxy__BaseUrl: https://files.orfl.xyz/api
|
Proxy__BaseUrl: https://files.orfl.xyz/api
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
labels:
|
labels:
|
||||||
- "traefik.enable=true"
|
- "traefik.enable=true"
|
||||||
- "traefik.http.routers.file-service.rule=Host(`files.orfl.xyz`)"
|
- "traefik.http.routers.file-service.rule=Host(`files.orfl.xyz`)"
|
||||||
@@ -130,11 +148,14 @@ services:
|
|||||||
# API Gateway
|
# API Gateway
|
||||||
# ===========================================
|
# ===========================================
|
||||||
api-gateway:
|
api-gateway:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-api:latest
|
||||||
context: .
|
|
||||||
dockerfile: FictionArchive.API/Dockerfile
|
|
||||||
environment:
|
environment:
|
||||||
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
ConnectionStrings__RabbitMQ: amqp://${RABBITMQ_USER:-guest}:${RABBITMQ_PASSWORD:-guest}@rabbitmq
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
labels:
|
labels:
|
||||||
- "traefik.enable=true"
|
- "traefik.enable=true"
|
||||||
- "traefik.http.routers.api-gateway.rule=Host(`api.fictionarchive.orfl.xyz`)"
|
- "traefik.http.routers.api-gateway.rule=Host(`api.fictionarchive.orfl.xyz`)"
|
||||||
@@ -154,15 +175,12 @@ services:
|
|||||||
# Frontend
|
# Frontend
|
||||||
# ===========================================
|
# ===========================================
|
||||||
frontend:
|
frontend:
|
||||||
build:
|
image: git.orfl.xyz/conco/fictionarchive-frontend:latest
|
||||||
context: ./fictionarchive-web
|
healthcheck:
|
||||||
dockerfile: Dockerfile
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost/"]
|
||||||
args:
|
interval: 30s
|
||||||
VITE_GRAPHQL_URI: https://api.fictionarchive.orfl.xyz/graphql/
|
timeout: 10s
|
||||||
VITE_OIDC_AUTHORITY: ${OIDC_AUTHORITY:-https://auth.orfl.xyz/application/o/fiction-archive/}
|
retries: 3
|
||||||
VITE_OIDC_CLIENT_ID: ${OIDC_CLIENT_ID}
|
|
||||||
VITE_OIDC_REDIRECT_URI: https://fictionarchive.orfl.xyz/
|
|
||||||
VITE_OIDC_POST_LOGOUT_REDIRECT_URI: https://fictionarchive.orfl.xyz/
|
|
||||||
labels:
|
labels:
|
||||||
- "traefik.enable=true"
|
- "traefik.enable=true"
|
||||||
- "traefik.http.routers.frontend.rule=Host(`fictionarchive.orfl.xyz`)"
|
- "traefik.http.routers.frontend.rule=Host(`fictionarchive.orfl.xyz`)"
|
||||||
|
|||||||
40
fictionarchive-web/.dockerignore
Normal file
40
fictionarchive-web/.dockerignore
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# Dependencies
|
||||||
|
node_modules
|
||||||
|
|
||||||
|
# Build output
|
||||||
|
dist
|
||||||
|
|
||||||
|
# Environment files
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
|
|
||||||
|
# IDE and editor
|
||||||
|
.vscode
|
||||||
|
.idea
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
|
||||||
|
# Test coverage
|
||||||
|
coverage
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
Dockerfile
|
||||||
|
.dockerignore
|
||||||
|
docker-compose*
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
README.md
|
||||||
|
*.md
|
||||||
|
|
||||||
|
# TypeScript build info
|
||||||
|
*.tsbuildinfo
|
||||||
Reference in New Issue
Block a user