Compare commits

..

53 Commits

Author SHA1 Message Date
gamer147
9bc39c3abf [FA-misc] Reporting service seems to be working 2026-02-01 10:19:52 -05:00
gamer147
bdb863a032 [FA-misc] CICD Updates 2026-01-31 10:48:14 -05:00
gamer147
7c3df7ab11 [FA-misc] Add ReportingService consumer unit tests 2026-01-30 16:47:26 -05:00
gamer147
2e4e2c26aa [FA-misc] Add ReportingService Dockerfile and docker-compose entry
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-30 16:45:16 -05:00
gamer147
1057e1bcd4 [FA-misc] Add initial ReportingService migration
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-30 16:43:48 -05:00
gamer147
1fda5ad440 [FA-misc] Wire up ReportingService Program.cs
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-30 16:43:26 -05:00
gamer147
2c14ab4936 [FA-misc] Add GraphQL job queries with filtering and pagination 2026-01-30 16:40:16 -05:00
gamer147
433f038051 [FA-misc] Add JobStatusUpdateConsumer with upsert logic 2026-01-30 16:39:11 -05:00
gamer147
3c835d9cc3 [FA-misc] Add Job entity and ReportingDbContext 2026-01-30 16:38:46 -05:00
gamer147
9577aa996a [FA-misc] Scaffold ReportingService project
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-30 16:33:43 -05:00
gamer147
c25f59a4b4 [FA-misc] Add IJobStatusUpdate event contract and publishing helper 2026-01-30 16:32:01 -05:00
gamer147
be1ebbea39 [FA-misc] Add JobStatus enum 2026-01-30 16:31:42 -05:00
67521d6530 Merge pull request '[FA-misc] Fix issues with novel imports' (#63) from epic/FA-misc_MassTransit into master
All checks were successful
CI / build-backend (push) Successful in 1m6s
CI / build-frontend (push) Successful in 43s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 49s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 45s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 47s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 51s
Build Gateway / build-subgraphs (map[name:usernoveldata-service project:FictionArchive.Service.UserNovelDataService subgraph:UserNovelData]) (push) Successful in 47s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m3s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m52s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m44s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m46s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserNovelDataService/Dockerfile name:usernoveldata-service]) (push) Successful in 1m38s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m40s
Release / build-frontend (push) Successful in 1m44s
Build Gateway / build-gateway (push) Successful in 3m29s
Reviewed-on: #63
2026-01-30 17:55:40 +00:00
gamer147
a6242fdb2a [FA-misc] Fix release.yml
All checks were successful
CI / build-backend (pull_request) Successful in 1m7s
CI / build-frontend (pull_request) Successful in 44s
2026-01-30 12:53:10 -05:00
gamer147
3c8c8c8707 [FA-misc] Fix issues with novel imports
All checks were successful
CI / build-backend (pull_request) Successful in 1m46s
CI / build-frontend (pull_request) Successful in 44s
2026-01-30 12:48:40 -05:00
3820cb3af9 Merge pull request 'epic/FA-misc_MassTransit' (#62) from epic/FA-misc_MassTransit into master
Some checks failed
CI / build-backend (push) Successful in 1m10s
CI / build-frontend (push) Successful in 45s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 48s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 44s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 56s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 45s
Build Gateway / build-subgraphs (map[name:usernoveldata-service project:FictionArchive.Service.UserNovelDataService subgraph:UserNovelData]) (push) Successful in 51s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Failing after 29s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m5s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m59s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m47s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 2m15s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserNovelDataService/Dockerfile name:usernoveldata-service]) (push) Successful in 1m46s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m46s
Release / build-frontend (push) Successful in 2m22s
Build Gateway / build-gateway (push) Successful in 3m39s
Reviewed-on: #62
2026-01-29 14:59:19 +00:00
gamer147
ec967770d3 [FA-misc] Saga seems to work, fixed a UserNovelDataService bug
All checks were successful
CI / build-backend (pull_request) Successful in 2m26s
CI / build-frontend (pull_request) Successful in 1m7s
2026-01-28 12:11:06 -05:00
gamer147
579e05b853 [FA-misc] Initial MassTransit implementation seems to work 2026-01-26 17:08:13 -05:00
e7435435c1 Merge pull request 'feature/FA-misc_ServersidedChapterReader' (#61) from feature/FA-misc_ServersidedChapterReader into master
All checks were successful
CI / build-backend (push) Successful in 1m5s
CI / build-frontend (push) Successful in 43s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 50s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 43s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 45s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 43s
Build Gateway / build-subgraphs (map[name:usernoveldata-service project:FictionArchive.Service.UserNovelDataService subgraph:UserNovelData]) (push) Successful in 45s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 1m59s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 1m53s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m47s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m41s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m43s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserNovelDataService/Dockerfile name:usernoveldata-service]) (push) Successful in 1m41s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m34s
Release / build-frontend (push) Successful in 1m43s
Build Gateway / build-gateway (push) Successful in 3m10s
Reviewed-on: #61
2026-01-26 16:32:52 +00:00
gamer147
dd7aa4b044 [FA-misc] Resolve lint issues
All checks were successful
CI / build-backend (pull_request) Successful in 1m45s
CI / build-frontend (pull_request) Successful in 45s
2026-01-26 11:29:51 -05:00
gamer147
1b9da7441c [FA-misc] Chapter reader is now serverside loaded with a clientside fallback, fixed a warning related to reading lists 2026-01-26 11:27:05 -05:00
055ef33666 Merge pull request '[FA-24] Reading lists' (#60) from feature/FA-24_ReadingLists into master
All checks were successful
CI / build-backend (push) Successful in 1m4s
CI / build-frontend (push) Successful in 41s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 49s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 45s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 46s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 43s
Build Gateway / build-subgraphs (map[name:usernoveldata-service project:FictionArchive.Service.UserNovelDataService subgraph:UserNovelData]) (push) Successful in 44s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 2m3s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 1m53s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m43s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m39s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m42s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserNovelDataService/Dockerfile name:usernoveldata-service]) (push) Successful in 1m38s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m32s
Release / build-frontend (push) Successful in 1m38s
Build Gateway / build-gateway (push) Successful in 3m18s
Reviewed-on: #60
2026-01-20 03:09:45 +00:00
gamer147
48ee43c4f6 [FA-24] Reading lists
All checks were successful
CI / build-backend (pull_request) Successful in 1m32s
CI / build-frontend (pull_request) Successful in 42s
2026-01-19 22:06:34 -05:00
98ae4ea4f2 Merge pull request 'feature/FA-27_Bookmarks' (#59) from feature/FA-27_Bookmarks into master
All checks were successful
CI / build-backend (push) Successful in 1m16s
CI / build-frontend (push) Successful in 40s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 47s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 42s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 45s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 43s
Build Gateway / build-subgraphs (map[name:usernoveldata-service project:FictionArchive.Service.UserNovelDataService subgraph:UserNovelData]) (push) Successful in 43s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 2m19s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m3s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m41s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m37s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m48s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserNovelDataService/Dockerfile name:usernoveldata-service]) (push) Successful in 1m34s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m33s
Release / build-frontend (push) Successful in 1m39s
Build Gateway / build-gateway (push) Successful in 3m11s
Reviewed-on: #59
2026-01-19 22:28:03 +00:00
gamer147
15e1a84f55 [FA-27] Update CICD
All checks were successful
CI / build-backend (pull_request) Successful in 1m6s
CI / build-frontend (pull_request) Successful in 41s
2026-01-19 17:03:44 -05:00
gamer147
70d4ba201a [FA-27] Fix unit test based on changes
All checks were successful
CI / build-backend (pull_request) Successful in 1m10s
CI / build-frontend (pull_request) Successful in 43s
2026-01-19 16:47:55 -05:00
gamer147
b69bcd6bf4 [FA-27] Fix user adding not using correct id
Some checks failed
CI / build-backend (pull_request) Failing after 1m2s
CI / build-frontend (pull_request) Successful in 41s
2026-01-19 16:14:49 -05:00
gamer147
c97654631b [FA-27] Still need to test events 2026-01-19 15:40:21 -05:00
gamer147
1ecfd9cc99 [FA-27] Need to test events but seems to mostly work 2026-01-19 15:13:14 -05:00
gamer147
19ae4a8089 Add .worktrees/ to .gitignore 2026-01-19 01:36:10 -05:00
gamer147
f8a45ad891 [FA-27] Bookmark implementation 2026-01-19 00:01:16 -05:00
gamer147
f67c5c610c Merge branch 'refs/heads/master' into feature/FA-27_Bookmarks 2025-12-30 11:07:36 -05:00
b5d4694f12 Merge pull request '[FA-misc] Update docker-compose.yml' (#58) from feature/FA-misc_AddDockerComposeUserService into master
All checks were successful
CI / build-backend (push) Successful in 1m7s
CI / build-frontend (push) Successful in 40s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 55s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 49s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 50s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 48s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 2m25s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m28s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 2m14s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 2m8s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 2m15s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m43s
Release / build-frontend (push) Successful in 1m43s
Build Gateway / build-gateway (push) Successful in 4m1s
Reviewed-on: #58
2025-12-30 03:26:06 +00:00
gamer147
6d47153a42 [FA-misc] Update docker-compose.yml
All checks were successful
CI / build-backend (pull_request) Successful in 1m26s
CI / build-frontend (pull_request) Successful in 50s
2025-12-29 22:23:29 -05:00
dbbc2fd8dc Merge pull request 'feature/FA-6_AuthorsPosts' (#57) from feature/FA-6_AuthorsPosts into master
All checks were successful
CI / build-backend (push) Successful in 1m16s
CI / build-frontend (push) Successful in 51s
Reviewed-on: #57
2025-12-30 03:14:53 +00:00
gamer147
176c94297b [FA-6] Author's posts seem to work
All checks were successful
CI / build-backend (pull_request) Successful in 2m4s
CI / build-frontend (pull_request) Successful in 46s
2025-12-29 22:06:12 -05:00
gamer147
8b3faa8f6c [FA-6] Good spot 2025-12-29 21:40:44 -05:00
gamer147
d87bd81190 [FA-6] Volumes work probably? 2025-12-29 21:28:07 -05:00
gamer147
bee805c441 [FA-6] Need to test Novelpia import 2025-12-29 20:27:04 -05:00
gamer147
5013da69c2 [FA-27] UserNovelDataService bootstrapped, going to do author's posts first i think 2025-12-29 14:54:01 -05:00
d8e3ec7ec9 Merge pull request 'feature/FA-55_UserServiceSetup' (#56) from feature/FA-55_UserServiceSetup into master
Some checks failed
CI / build-backend (push) Successful in 1m9s
CI / build-frontend (push) Failing after 44s
Reviewed-on: #56
2025-12-29 19:38:43 +00:00
gamer147
3612c89b99 [FA-55] Resolve linter error
All checks were successful
CI / build-backend (pull_request) Successful in 1m7s
CI / build-frontend (pull_request) Successful in 42s
2025-12-29 14:35:17 -05:00
gamer147
ebb2e6e7fc [FA-55] User service should be done
Some checks failed
CI / build-backend (pull_request) Successful in 2m2s
CI / build-frontend (pull_request) Failing after 30s
2025-12-29 14:33:08 -05:00
gamer147
01d3b94050 [FA-55] Finished aside from deactivation/integration events 2025-12-29 14:09:41 -05:00
gamer147
c0290cc5af [FA-55] User Service backend initial setup 2025-12-29 11:20:23 -05:00
1d950b7721 Merge pull request '[FA-misc] Whoops' (#53) from hotfix/FA-misc_LintFix into master
All checks were successful
CI / build-backend (push) Successful in 56s
CI / build-frontend (push) Successful in 39s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 49s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 49s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 49s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 46s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 3m54s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m10s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m56s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m58s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m58s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 3m28s
Release / build-frontend (push) Successful in 1m38s
Build Gateway / build-gateway (push) Successful in 4m7s
Reviewed-on: #53
2025-12-11 20:21:15 +00:00
gamer147
7738bcf438 [FA-misc] Whoops
Some checks failed
CI / build-frontend (pull_request) Has been cancelled
CI / build-backend (pull_request) Has been cancelled
2025-12-11 15:21:04 -05:00
61e0cb69d8 Merge pull request '[FA-misc] Add delete button' (#52) from hotfix/FA-misc_FixNovelRedownloads into master
Some checks failed
CI / build-backend (push) Successful in 1m3s
CI / build-frontend (push) Failing after 26s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 46s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 52s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 46s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 43s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 2m3s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m54s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m45s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m42s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m50s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m53s
Build Gateway / build-gateway (push) Has been cancelled
Release / build-frontend (push) Has been cancelled
Reviewed-on: #52
2025-12-11 20:01:43 +00:00
gamer147
02525d611a [FA-misc] Add delete button
Some checks failed
CI / build-backend (pull_request) Successful in 2m35s
CI / build-frontend (pull_request) Failing after 27s
2025-12-11 15:00:55 -05:00
c21fe0fbd5 Merge pull request '[FA-misc] Fix an oversight in the update process' (#51) from feature/FA-misc_NovelpiaResiliency into master
Some checks failed
CI / build-backend (push) Successful in 1m2s
CI / build-frontend (push) Successful in 41s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 1m1s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 48s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 45s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 42s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 2m12s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m3s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m48s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m44s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m58s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m55s
Release / build-frontend (push) Failing after 59s
Build Gateway / build-gateway (push) Successful in 3m38s
Reviewed-on: #51
2025-12-11 19:16:39 +00:00
gamer147
bbc0b5ec7d [FA-misc] Fix an oversight in the update process
All checks were successful
CI / build-backend (pull_request) Successful in 1m28s
CI / build-frontend (pull_request) Successful in 53s
2025-12-11 14:16:21 -05:00
5527c15ae7 Merge pull request '[FA-misc] Adds standard Polly Resiliency to Novelpia Http Clients' (#50) from feature/FA-misc_NovelpiaResiliency into master
All checks were successful
CI / build-backend (push) Successful in 1m0s
CI / build-frontend (push) Successful in 41s
Build Gateway / build-subgraphs (map[name:novel-service project:FictionArchive.Service.NovelService subgraph:Novel]) (push) Successful in 51s
Build Gateway / build-subgraphs (map[name:scheduler-service project:FictionArchive.Service.SchedulerService subgraph:Scheduler]) (push) Successful in 48s
Build Gateway / build-subgraphs (map[name:translation-service project:FictionArchive.Service.TranslationService subgraph:Translation]) (push) Successful in 49s
Build Gateway / build-subgraphs (map[name:user-service project:FictionArchive.Service.UserService subgraph:User]) (push) Successful in 52s
Release / build-and-push (map[dockerfile:FictionArchive.Service.AuthenticationService/Dockerfile name:authentication-service]) (push) Successful in 2m23s
Release / build-and-push (map[dockerfile:FictionArchive.Service.FileService/Dockerfile name:file-service]) (push) Successful in 2m24s
Release / build-and-push (map[dockerfile:FictionArchive.Service.NovelService/Dockerfile name:novel-service]) (push) Successful in 1m43s
Release / build-and-push (map[dockerfile:FictionArchive.Service.SchedulerService/Dockerfile name:scheduler-service]) (push) Successful in 1m38s
Release / build-and-push (map[dockerfile:FictionArchive.Service.TranslationService/Dockerfile name:translation-service]) (push) Successful in 1m51s
Release / build-and-push (map[dockerfile:FictionArchive.Service.UserService/Dockerfile name:user-service]) (push) Successful in 1m34s
Release / build-frontend (push) Successful in 1m33s
Build Gateway / build-gateway (push) Successful in 4m1s
Reviewed-on: #50
2025-12-11 14:54:12 +00:00
gamer147
1e374e6eeb [FA-misc] Adds standard Polly Resiliency to Novelpia Http Clients
All checks were successful
CI / build-backend (pull_request) Successful in 1m28s
CI / build-frontend (pull_request) Successful in 46s
2025-12-11 09:53:54 -05:00
239 changed files with 21625 additions and 1376 deletions

View File

@@ -28,6 +28,12 @@ jobs:
- name: user-service
project: FictionArchive.Service.UserService
subgraph: User
- name: usernoveldata-service
project: FictionArchive.Service.UserNovelDataService
subgraph: UserNovelData
- name: reporting-service
project: FictionArchive.Service.ReportingService
subgraph: Reporting
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -110,6 +116,18 @@ jobs:
name: user-service-subgraph
path: subgraphs/user
- name: Download UserNovelData Service subgraph
uses: christopherhx/gitea-download-artifact@v4
with:
name: usernoveldata-service-subgraph
path: subgraphs/usernoveldata
- name: Download Reporting Service subgraph
uses: christopherhx/gitea-download-artifact@v4
with:
name: reporting-service-subgraph
path: subgraphs/reporting
- name: Configure subgraph URLs for Docker
run: |
for fsp in subgraphs/*/*.fsp; do

View File

@@ -25,8 +25,10 @@ jobs:
dockerfile: FictionArchive.Service.FileService/Dockerfile
- name: scheduler-service
dockerfile: FictionArchive.Service.SchedulerService/Dockerfile
- name: authentication-service
dockerfile: FictionArchive.Service.AuthenticationService/Dockerfile
- name: usernoveldata-service
dockerfile: FictionArchive.Service.UserNovelDataService/Dockerfile
- name: reporting-service
dockerfile: FictionArchive.Service.ReportingService/Dockerfile
steps:
- name: Checkout
uses: actions/checkout@v4

3
.gitignore vendored
View File

@@ -140,3 +140,6 @@ appsettings.Local.json
schema.graphql
*.fsp
gateway.fgp
# Git worktrees
.worktrees/

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,9 @@
namespace FictionArchive.Common.Enums;
public enum JobStatus
{
Failed = -1,
Pending = 0,
InProgress = 1,
Completed = 2
}

View File

@@ -1,36 +0,0 @@
using FictionArchive.Service.AuthenticationService.Models.Requests;
using FictionArchive.Service.AuthenticationService.Models.IntegrationEvents;
using FictionArchive.Service.Shared.Services.EventBus;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace FictionArchive.Service.AuthenticationService.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class AuthenticationWebhookController : ControllerBase
{
private readonly IEventBus _eventBus;
public AuthenticationWebhookController(IEventBus eventBus)
{
_eventBus = eventBus;
}
[HttpPost(nameof(UserRegistered))]
public async Task<ActionResult> UserRegistered([FromBody] UserRegisteredWebhookPayload payload)
{
var authUserAddedEvent = new AuthUserAddedEvent
{
OAuthProviderId = payload.OAuthProviderId,
InviterOAuthProviderId = payload.InviterOAuthProviderId,
EventUserEmail = payload.EventUserEmail,
EventUserUsername = payload.EventUserUsername
};
await _eventBus.Publish(authUserAddedEvent);
return Ok();
}
}
}

View File

@@ -1,23 +0,0 @@
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
USER $APP_UID
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["FictionArchive.Service.AuthenticationService/FictionArchive.Service.AuthenticationService.csproj", "FictionArchive.Service.AuthenticationService/"]
RUN dotnet restore "FictionArchive.Service.AuthenticationService/FictionArchive.Service.AuthenticationService.csproj"
COPY . .
WORKDIR "/src/FictionArchive.Service.AuthenticationService"
RUN dotnet build "./FictionArchive.Service.AuthenticationService.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "./FictionArchive.Service.AuthenticationService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "FictionArchive.Service.AuthenticationService.dll"]

View File

@@ -1,6 +0,0 @@
@FictionArchive.Service.AuthenticationService_HostAddress = http://localhost:5091
GET {{FictionArchive.Service.AuthenticationService_HostAddress}}/weatherforecast/
Accept: application/json
###

View File

@@ -1,16 +0,0 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.AuthenticationService.Models.IntegrationEvents;
public class AuthUserAddedEvent : IIntegrationEvent
{
public string OAuthProviderId { get; set; }
public string InviterOAuthProviderId { get; set; }
// The email of the user that created the event
public string EventUserEmail { get; set; }
// The username of the user that created the event
public string EventUserUsername { get; set; }
}

View File

@@ -1,17 +0,0 @@
namespace FictionArchive.Service.AuthenticationService.Models.Requests;
public class UserRegisteredWebhookPayload
{
// The body of the notification message
public string Body { get; set; }
public string OAuthProviderId { get; set; }
public string InviterOAuthProviderId { get; set; }
// The email of the user that created the event
public string EventUserEmail { get; set; }
// The username of the user that created the event
public string EventUserUsername { get; set; }
}

View File

@@ -1,49 +0,0 @@
using FictionArchive.Service.Shared;
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
namespace FictionArchive.Service.AuthenticationService;
public class Program
{
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
// Add services to the container.
builder.Services.AddControllers();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();
#region Event Bus
builder.Services.AddRabbitMQ(opt =>
{
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
});
#endregion
builder.Services.AddHealthChecks();
var app = builder.Build();
// Configure the HTTP request pipeline.
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI();
}
app.UseHttpsRedirection();
app.MapHealthChecks("/healthz");
app.UseAuthorization();
app.MapControllers();
app.Run();
}
}

View File

@@ -1,13 +0,0 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"RabbitMQ": {
"ConnectionString": "amqp://localhost",
"ClientIdentifier": "AuthenticationService"
},
"AllowedHosts": "*"
}

View File

@@ -0,0 +1,91 @@
using Amazon.S3;
using Amazon.S3.Model;
using FictionArchive.Common.Enums;
using FictionArchive.Service.FileService.Models;
using FictionArchive.Service.Shared.Contracts.Events;
using FictionArchive.Service.Shared.Extensions;
using MassTransit;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace FictionArchive.Service.FileService.Consumers;
public class FileUploadRequestCreatedConsumer : IConsumer<IFileUploadRequestCreated>
{
private readonly ILogger<FileUploadRequestCreatedConsumer> _logger;
private readonly AmazonS3Client _amazonS3Client;
private readonly IPublishEndpoint _publishEndpoint;
private readonly S3Configuration _s3Configuration;
private readonly ProxyConfiguration _proxyConfiguration;
public FileUploadRequestCreatedConsumer(
ILogger<FileUploadRequestCreatedConsumer> logger,
AmazonS3Client amazonS3Client,
IPublishEndpoint publishEndpoint,
IOptions<S3Configuration> s3Configuration,
IOptions<ProxyConfiguration> proxyConfiguration)
{
_logger = logger;
_amazonS3Client = amazonS3Client;
_publishEndpoint = publishEndpoint;
_s3Configuration = s3Configuration.Value;
_proxyConfiguration = proxyConfiguration.Value;
}
public async Task Consume(ConsumeContext<IFileUploadRequestCreated> context)
{
var message = context.Message;
await _publishEndpoint.ReportJobStatus(
message.RequestId, "FileUpload", $"Upload {message.FilePath}",
JobStatus.InProgress, parentJobId: message.ImportId);
var putObjectRequest = new PutObjectRequest
{
BucketName = _s3Configuration.Bucket,
Key = message.FilePath,
UseChunkEncoding = false
};
using var memoryStream = new MemoryStream(message.FileData);
putObjectRequest.InputStream = memoryStream;
var s3Response = await _amazonS3Client.PutObjectAsync(putObjectRequest);
if (s3Response.HttpStatusCode != System.Net.HttpStatusCode.OK)
{
_logger.LogError("Failed to upload file {FilePath} to S3", message.FilePath);
await _publishEndpoint.Publish<IFileUploadRequestStatusUpdate>(
new FileUploadRequestStatusUpdate(
ImportId: message.ImportId,
RequestId: message.RequestId,
Status: RequestStatus.Failed,
FileAccessUrl: null,
ErrorMessage: "An error occurred while uploading file to S3."));
await _publishEndpoint.ReportJobStatus(
message.RequestId, "FileUpload", $"Upload {message.FilePath}",
JobStatus.Failed, parentJobId: message.ImportId,
errorMessage: "An error occurred while uploading file to S3.");
return;
}
var fileAccessUrl = _proxyConfiguration.BaseUrl + "/" + message.FilePath;
_logger.LogInformation("Successfully uploaded file {FilePath} to S3", message.FilePath);
await _publishEndpoint.Publish<IFileUploadRequestStatusUpdate>(
new FileUploadRequestStatusUpdate(
ImportId: message.ImportId,
RequestId: message.RequestId,
Status: RequestStatus.Success,
FileAccessUrl: fileAccessUrl,
ErrorMessage: null));
await _publishEndpoint.ReportJobStatus(
message.RequestId, "FileUpload", $"Upload {message.FilePath}",
JobStatus.Completed, parentJobId: message.ImportId,
metadata: new Dictionary<string, string> { ["FileAccessUrl"] = fileAccessUrl });
}
}

View File

@@ -1,10 +0,0 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.FileService.Models.IntegrationEvents;
public class FileUploadRequestCreatedEvent : IIntegrationEvent
{
public Guid RequestId { get; set; }
public string FilePath { get; set; }
public byte[] FileData { get; set; }
}

View File

@@ -1,22 +0,0 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.FileService.Models.IntegrationEvents;
public class FileUploadRequestStatusUpdateEvent : IIntegrationEvent
{
public Guid RequestId { get; set; }
public RequestStatus Status { get; set; }
#region Success
public string? FileAccessUrl { get; set; }
#endregion
#region Failure
public string? ErrorMessage { get; set; }
#endregion
}

View File

@@ -1,11 +1,9 @@
using Amazon.Runtime;
using Amazon.S3;
using FictionArchive.Common.Extensions;
using FictionArchive.Service.FileService.Consumers;
using FictionArchive.Service.FileService.Models;
using FictionArchive.Service.FileService.Models.IntegrationEvents;
using FictionArchive.Service.FileService.Services.EventHandlers;
using FictionArchive.Service.Shared.Extensions;
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
using Microsoft.Extensions.Options;
namespace FictionArchive.Service.FileService;
@@ -24,13 +22,14 @@ public class Program
builder.Services.AddHealthChecks();
#region Event Bus
#region MassTransit
builder.Services.AddRabbitMQ(opt =>
{
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
})
.Subscribe<FileUploadRequestCreatedEvent, FileUploadRequestCreatedEventHandler>();
builder.Services.AddFictionArchiveMassTransit(
builder.Configuration,
x =>
{
x.AddConsumer<FileUploadRequestCreatedConsumer>();
});
#endregion

View File

@@ -1,58 +0,0 @@
using Amazon.S3;
using Amazon.S3.Model;
using FictionArchive.Common.Enums;
using FictionArchive.Service.FileService.Models;
using FictionArchive.Service.FileService.Models.IntegrationEvents;
using FictionArchive.Service.Shared.Services.EventBus;
using Microsoft.Extensions.Options;
namespace FictionArchive.Service.FileService.Services.EventHandlers;
public class FileUploadRequestCreatedEventHandler : IIntegrationEventHandler<FileUploadRequestCreatedEvent>
{
private readonly ILogger<FileUploadRequestCreatedEventHandler> _logger;
private readonly AmazonS3Client _amazonS3Client;
private readonly IEventBus _eventBus;
private readonly S3Configuration _s3Configuration;
private readonly ProxyConfiguration _proxyConfiguration;
public FileUploadRequestCreatedEventHandler(ILogger<FileUploadRequestCreatedEventHandler> logger, AmazonS3Client amazonS3Client, IEventBus eventBus, IOptions<S3Configuration> s3Configuration, IOptions<ProxyConfiguration> proxyConfiguration)
{
_logger = logger;
_amazonS3Client = amazonS3Client;
_eventBus = eventBus;
_proxyConfiguration = proxyConfiguration.Value;
_s3Configuration = s3Configuration.Value;
}
public async Task Handle(FileUploadRequestCreatedEvent @event)
{
var putObjectRequest = new PutObjectRequest();
putObjectRequest.BucketName = _s3Configuration.Bucket;
putObjectRequest.Key = @event.FilePath;
putObjectRequest.UseChunkEncoding = false; // Needed to avoid an error with Garage
using MemoryStream memoryStream = new MemoryStream(@event.FileData);
putObjectRequest.InputStream = memoryStream;
var s3Response = await _amazonS3Client.PutObjectAsync(putObjectRequest);
if (s3Response.HttpStatusCode != System.Net.HttpStatusCode.OK)
{
_logger.LogError("An error occurred while uploading file to S3. Response code: {responsecode}", s3Response.HttpStatusCode);
await _eventBus.Publish(new FileUploadRequestStatusUpdateEvent()
{
RequestId = @event.RequestId,
Status = RequestStatus.Failed,
ErrorMessage = "An error occurred while uploading file to S3."
});
return;
}
await _eventBus.Publish(new FileUploadRequestStatusUpdateEvent()
{
Status = RequestStatus.Success,
RequestId = @event.RequestId,
FileAccessUrl = _proxyConfiguration.BaseUrl + "/" + @event.FilePath
});
}
}

View File

@@ -2,7 +2,8 @@
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore": "Warning"
}
},
"ProxyConfiguration": {

View File

@@ -9,8 +9,10 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="MassTransit" Version="8.5.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.11" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="NodaTime.Testing" Version="3.3.0" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">

View File

@@ -1,5 +1,4 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.FileService.IntegrationEvents;
using FictionArchive.Service.NovelService.Models.Configuration;
using FictionArchive.Service.NovelService.Models.Enums;
using FictionArchive.Service.NovelService.Models.Images;
@@ -8,12 +7,14 @@ using FictionArchive.Service.NovelService.Models.Novels;
using FictionArchive.Service.NovelService.Models.SourceAdapters;
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.NovelService.Services.SourceAdapters;
using FictionArchive.Service.Shared.Services.EventBus;
using FictionArchive.Service.Shared.Contracts.Events;
using FluentAssertions;
using HtmlAgilityPack;
using MassTransit;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using NodaTime;
using NSubstitute;
using Xunit;
@@ -42,6 +43,13 @@ public class NovelUpdateServiceTests
Images = new List<Image>()
};
var volume = new Volume
{
Order = 1,
Name = LocalizationKey.CreateFromText("Main Story", Language.En),
Chapters = new List<Chapter> { chapter }
};
var novel = new Novel
{
Url = "http://demo/novel",
@@ -52,20 +60,20 @@ public class NovelUpdateServiceTests
Source = source,
Name = LocalizationKey.CreateFromText("Demo Novel", Language.En),
Description = LocalizationKey.CreateFromText("Description", Language.En),
Chapters = new List<Chapter> { chapter },
Volumes = new List<Volume> { volume },
Tags = new List<NovelTag>()
};
dbContext.Novels.Add(novel);
dbContext.SaveChanges();
return new NovelCreateResult(novel, chapter);
return new NovelCreateResult(novel, volume, chapter);
}
private static NovelUpdateService CreateService(
NovelServiceDbContext dbContext,
ISourceAdapter adapter,
IEventBus eventBus,
IPublishEndpoint publishEndpoint,
string pendingImageUrl = "https://pending/placeholder.jpg")
{
var options = Options.Create(new NovelUpdateServiceConfiguration
@@ -73,7 +81,10 @@ public class NovelUpdateServiceTests
PendingImageUrl = pendingImageUrl
});
return new NovelUpdateService(dbContext, NullLogger<NovelUpdateService>.Instance, new[] { adapter }, eventBus, options);
var clock = Substitute.For<IClock>();
clock.GetCurrentInstant().Returns(Instant.FromUnixTimeSeconds(0));
return new NovelUpdateService(dbContext, NullLogger<NovelUpdateService>.Instance, new[] { adapter }, publishEndpoint, options, clock);
}
[Fact]
@@ -81,7 +92,7 @@ public class NovelUpdateServiceTests
{
using var dbContext = CreateDbContext();
var source = new Source { Name = "Demo", Key = "demo", Url = "http://demo" };
var (novel, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var (novel, volume, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var rawHtml = "<p>Hello</p><img src=\"http://img/x1.jpg\" alt=\"first\" /><img src=\"http://img/x2.jpg\" alt=\"second\" />";
var image1 = new ImageData { Url = "http://img/x1.jpg", Data = new byte[] { 1, 2, 3 } };
@@ -95,16 +106,18 @@ public class NovelUpdateServiceTests
ImageData = new List<ImageData> { image1, image2 }
}));
var publishedEvents = new List<FileUploadRequestCreatedEvent>();
var eventBus = Substitute.For<IEventBus>();
eventBus.Publish(Arg.Do<FileUploadRequestCreatedEvent>(publishedEvents.Add)).Returns(Task.CompletedTask);
eventBus.Publish(Arg.Any<object>(), Arg.Any<string>()).Returns(Task.CompletedTask);
var publishedEvents = new List<IFileUploadRequestCreated>();
var publishEndpoint = Substitute.For<IPublishEndpoint>();
publishEndpoint.Publish(Arg.Do<IFileUploadRequestCreated>(e => publishedEvents.Add(e)), Arg.Any<CancellationToken>())
.Returns(Task.CompletedTask);
var pendingImageUrl = "https://pending/placeholder.jpg";
var service = CreateService(dbContext, adapter, eventBus, pendingImageUrl);
var service = CreateService(dbContext, adapter, publishEndpoint, pendingImageUrl);
var updatedChapter = await service.PullChapterContents(novel.Id, chapter.Order);
var importId = Guid.NewGuid();
var (updatedChapter, imageCount) = await service.PullChapterContents(importId, novel.Id, volume.Id, chapter.Order);
imageCount.Should().Be(2);
updatedChapter.Images.Should().HaveCount(2);
updatedChapter.Images.Select(i => i.OriginalPath).Should().BeEquivalentTo(new[] { image1.Url, image2.Url });
updatedChapter.Images.All(i => i.Id != Guid.Empty).Should().BeTrue();
@@ -121,9 +134,10 @@ public class NovelUpdateServiceTests
.BeEquivalentTo(updatedChapter.Images.Select(img => img.Id.ToString()));
publishedEvents.Should().HaveCount(2);
publishedEvents.Should().OnlyContain(e => e.ImportId == importId);
publishedEvents.Select(e => e.RequestId).Should().BeEquivalentTo(updatedChapter.Images.Select(i => i.Id));
publishedEvents.Select(e => e.FileData).Should().BeEquivalentTo(new[] { image1.Data, image2.Data });
publishedEvents.Should().OnlyContain(e => e.FilePath.StartsWith($"{novel.Id}/Images/Chapter-{updatedChapter.Id}/"));
publishedEvents.Should().OnlyContain(e => e.FilePath.StartsWith($"Novels/{novel.Id}/Images/Chapter-{updatedChapter.Id}/"));
}
[Fact]
@@ -131,7 +145,7 @@ public class NovelUpdateServiceTests
{
using var dbContext = CreateDbContext();
var source = new Source { Name = "Demo", Key = "demo", Url = "http://demo" };
var (novel, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var (novel, volume, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var rawHtml = "<p>Hi</p><img src=\"http://img/x1.jpg\">";
var image = new ImageData { Url = "http://img/x1.jpg", Data = new byte[] { 7, 8, 9 } };
@@ -144,14 +158,14 @@ public class NovelUpdateServiceTests
ImageData = new List<ImageData> { image }
}));
var eventBus = Substitute.For<IEventBus>();
eventBus.Publish(Arg.Any<FileUploadRequestCreatedEvent>()).Returns(Task.CompletedTask);
eventBus.Publish(Arg.Any<object>(), Arg.Any<string>()).Returns(Task.CompletedTask);
var publishEndpoint = Substitute.For<IPublishEndpoint>();
var service = CreateService(dbContext, adapter, eventBus);
var service = CreateService(dbContext, adapter, publishEndpoint);
var updatedChapter = await service.PullChapterContents(novel.Id, chapter.Order);
var importId = Guid.NewGuid();
var (updatedChapter, imageCount) = await service.PullChapterContents(importId, novel.Id, volume.Id, chapter.Order);
imageCount.Should().Be(1);
var storedHtml = updatedChapter.Body.Texts.Single().Text;
var doc = new HtmlDocument();
doc.LoadHtml(storedHtml);
@@ -161,7 +175,7 @@ public class NovelUpdateServiceTests
imgNode.GetAttributeValue("src", string.Empty).Should().Be("https://pending/placeholder.jpg");
}
private record NovelCreateResult(Novel Novel, Chapter Chapter);
private record NovelCreateResult(Novel Novel, Volume Volume, Chapter Chapter);
#region UpdateImage Tests
@@ -179,8 +193,8 @@ public class NovelUpdateServiceTests
await dbContext.SaveChangesAsync();
var adapter = Substitute.For<ISourceAdapter>();
var eventBus = Substitute.For<IEventBus>();
var service = CreateService(dbContext, adapter, eventBus);
var publishEndpoint = Substitute.For<IPublishEndpoint>();
var service = CreateService(dbContext, adapter, publishEndpoint);
var newUrl = "https://cdn.example.com/uploaded/cover.jpg";
@@ -199,7 +213,7 @@ public class NovelUpdateServiceTests
// Arrange
using var dbContext = CreateDbContext();
var source = new Source { Name = "Demo", Key = "demo", Url = "http://demo" };
var (novel, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var (novel, _, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var image = new Image
{
@@ -221,8 +235,8 @@ public class NovelUpdateServiceTests
await dbContext.SaveChangesAsync();
var adapter = Substitute.For<ISourceAdapter>();
var eventBus = Substitute.For<IEventBus>();
var service = CreateService(dbContext, adapter, eventBus, pendingUrl);
var publishEndpoint = Substitute.For<IPublishEndpoint>();
var service = CreateService(dbContext, adapter, publishEndpoint, pendingUrl);
var newUrl = "https://cdn.example.com/uploaded/image.jpg";
@@ -252,7 +266,7 @@ public class NovelUpdateServiceTests
// Arrange
using var dbContext = CreateDbContext();
var source = new Source { Name = "Demo", Key = "demo", Url = "http://demo" };
var (novel, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var (_, _, chapter) = CreateNovelWithSingleChapter(dbContext, source);
var image1 = new Image { OriginalPath = "http://original/img1.jpg", Chapter = chapter };
var image2 = new Image { OriginalPath = "http://original/img2.jpg", Chapter = chapter };
@@ -270,8 +284,8 @@ public class NovelUpdateServiceTests
await dbContext.SaveChangesAsync();
var adapter = Substitute.For<ISourceAdapter>();
var eventBus = Substitute.For<IEventBus>();
var service = CreateService(dbContext, adapter, eventBus, pendingUrl);
var publishEndpoint = Substitute.For<IPublishEndpoint>();
var service = CreateService(dbContext, adapter, publishEndpoint, pendingUrl);
var newUrl = "https://cdn.example.com/uploaded/img1.jpg";

View File

@@ -0,0 +1,194 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.NovelService.Sagas;
using FictionArchive.Service.Shared.Contracts.Events;
using FluentAssertions;
using MassTransit;
using MassTransit.Testing;
using Microsoft.Extensions.DependencyInjection;
using NodaTime;
using NodaTime.Testing;
using Xunit;
namespace FictionArchive.Service.NovelService.Tests.Sagas;
public class NovelImportSagaTests
{
private readonly FakeClock _clock = new(Instant.FromUtc(2026, 1, 27, 12, 0, 0));
[Fact]
public async Task Should_transition_to_importing_on_import_requested()
{
await using var provider = CreateTestProvider();
var harness = provider.GetRequiredService<ITestHarness>();
await harness.Start();
var importId = Guid.NewGuid();
await harness.Bus.Publish<INovelImportRequested>(new NovelImportRequested(importId, "https://example.com/novel"));
var sagaHarness = harness.GetSagaStateMachineHarness<NovelImportSaga, NovelImportSagaState>();
(await sagaHarness.Exists(importId, x => x.Importing)).HasValue.Should().BeTrue();
(await harness.Published.Any<IJobStatusUpdate>(x =>
x.Context.Message.JobId == importId &&
x.Context.Message.Status == JobStatus.InProgress &&
x.Context.Message.JobType == "NovelImport")).Should().BeTrue();
}
[Fact]
public async Task Should_transition_to_completed_when_no_chapters()
{
await using var provider = CreateTestProvider();
var harness = provider.GetRequiredService<ITestHarness>();
await harness.Start();
var importId = Guid.NewGuid();
await harness.Bus.Publish<INovelImportRequested>(new NovelImportRequested(importId, "https://example.com/novel"));
await harness.Bus.Publish<INovelMetadataImported>(new NovelMetadataImported(importId, 1, 0, false));
var sagaHarness = harness.GetSagaStateMachineHarness<NovelImportSaga, NovelImportSagaState>();
(await sagaHarness.Exists(importId, x => x.Completed)).HasValue.Should().BeTrue();
(await harness.Published.Any<INovelImportCompleted>(x =>
x.Context.Message.ImportId == importId && x.Context.Message.Success)).Should().BeTrue();
(await harness.Published.Any<IJobStatusUpdate>(x =>
x.Context.Message.JobId == importId &&
x.Context.Message.Status == JobStatus.Completed &&
x.Context.Message.JobType == "NovelImport")).Should().BeTrue();
}
[Fact]
public async Task Should_transition_to_processing_when_chapters_pending()
{
await using var provider = CreateTestProvider();
var harness = provider.GetRequiredService<ITestHarness>();
await harness.Start();
var importId = Guid.NewGuid();
await harness.Bus.Publish<INovelImportRequested>(new NovelImportRequested(importId, "https://example.com/novel"));
await harness.Bus.Publish<INovelMetadataImported>(new NovelMetadataImported(importId, 1, 2, false));
var sagaHarness = harness.GetSagaStateMachineHarness<NovelImportSaga, NovelImportSagaState>();
(await sagaHarness.Exists(importId, x => x.Processing)).HasValue.Should().BeTrue();
}
[Fact]
public async Task Should_complete_when_all_chapters_pulled_and_images_uploaded()
{
await using var provider = CreateTestProvider();
var harness = provider.GetRequiredService<ITestHarness>();
await harness.Start();
var importId = Guid.NewGuid();
await harness.Bus.Publish<INovelImportRequested>(new NovelImportRequested(importId, "https://example.com/novel"));
await harness.Bus.Publish<INovelMetadataImported>(new NovelMetadataImported(importId, 1, 2, false));
await harness.Bus.Publish<IChapterPullCompleted>(new ChapterPullCompleted(importId, 1, 1));
await harness.Bus.Publish<IChapterPullCompleted>(new ChapterPullCompleted(importId, 2, 0));
await harness.Bus.Publish<IFileUploadRequestStatusUpdate>(new FileUploadRequestStatusUpdate(
importId, Guid.NewGuid(), RequestStatus.Success, "https://cdn.example.com/image.jpg", null));
var sagaHarness = harness.GetSagaStateMachineHarness<NovelImportSaga, NovelImportSagaState>();
(await sagaHarness.Exists(importId, x => x.Completed)).HasValue.Should().BeTrue();
(await harness.Published.Any<IJobStatusUpdate>(x =>
x.Context.Message.JobId == importId &&
x.Context.Message.Status == JobStatus.Completed &&
x.Context.Message.JobType == "NovelImport")).Should().BeTrue();
}
[Fact]
public async Task Should_transition_to_processing_when_cover_image_queued_with_no_chapters()
{
await using var provider = CreateTestProvider();
var harness = provider.GetRequiredService<ITestHarness>();
await harness.Start();
var importId = Guid.NewGuid();
await harness.Bus.Publish<INovelImportRequested>(new NovelImportRequested(importId, "https://example.com/novel"));
await harness.Bus.Publish<INovelMetadataImported>(new NovelMetadataImported(importId, 1, 0, true));
var sagaHarness = harness.GetSagaStateMachineHarness<NovelImportSaga, NovelImportSagaState>();
(await sagaHarness.Exists(importId, x => x.Processing)).HasValue.Should().BeTrue();
}
[Fact]
public async Task Should_complete_when_chapters_pulled_images_uploaded_and_cover_uploaded()
{
await using var provider = CreateTestProvider();
var harness = provider.GetRequiredService<ITestHarness>();
await harness.Start();
var importId = Guid.NewGuid();
await harness.Bus.Publish<INovelImportRequested>(new NovelImportRequested(importId, "https://example.com/novel"));
await harness.Bus.Publish<INovelMetadataImported>(new NovelMetadataImported(importId, 1, 1, true));
await harness.Bus.Publish<IChapterPullCompleted>(new ChapterPullCompleted(importId, 1, 0));
var sagaHarness = harness.GetSagaStateMachineHarness<NovelImportSaga, NovelImportSagaState>();
// Should still be processing - cover image not yet uploaded
(await sagaHarness.Exists(importId, x => x.Processing)).HasValue.Should().BeTrue();
// Upload cover image
await harness.Bus.Publish<IFileUploadRequestStatusUpdate>(new FileUploadRequestStatusUpdate(
importId, Guid.NewGuid(), RequestStatus.Success, "https://cdn.example.com/cover.jpg", null));
(await sagaHarness.Exists(importId, x => x.Completed)).HasValue.Should().BeTrue();
(await harness.Published.Any<INovelImportCompleted>(x =>
x.Context.Message.ImportId == importId && x.Context.Message.Success)).Should().BeTrue();
(await harness.Published.Any<IJobStatusUpdate>(x =>
x.Context.Message.JobId == importId &&
x.Context.Message.Status == JobStatus.Completed &&
x.Context.Message.JobType == "NovelImport")).Should().BeTrue();
}
[Fact]
public async Task Should_publish_failed_job_status_on_chapter_pull_fault()
{
await using var provider = CreateTestProvider();
var harness = provider.GetRequiredService<ITestHarness>();
await harness.Start();
var importId = Guid.NewGuid();
await harness.Bus.Publish<INovelImportRequested>(new NovelImportRequested(importId, "https://example.com/novel"));
await harness.Bus.Publish<INovelMetadataImported>(new NovelMetadataImported(importId, 1, 1, false));
var sagaHarness = harness.GetSagaStateMachineHarness<NovelImportSaga, NovelImportSagaState>();
(await sagaHarness.Exists(importId, x => x.Processing)).HasValue.Should().BeTrue();
await harness.Bus.Publish<Fault<IChapterPullRequested>>(new
{
Message = new ChapterPullRequested(importId, 1, 1, 1),
Exceptions = new[]
{
new
{
ExceptionType = typeof(Exception).FullName!,
Message = "Chapter pull failed",
StackTrace = "stack trace",
InnerException = (object?)null
}
}
});
(await sagaHarness.Exists(importId, x => x.Failed)).HasValue.Should().BeTrue();
(await harness.Published.Any<IJobStatusUpdate>(x =>
x.Context.Message.JobId == importId &&
x.Context.Message.Status == JobStatus.Failed &&
x.Context.Message.JobType == "NovelImport")).Should().BeTrue();
}
private ServiceProvider CreateTestProvider()
{
return new ServiceCollection()
.AddSingleton<IClock>(_clock)
.AddMassTransitTestHarness(cfg =>
{
cfg.AddSagaStateMachine<NovelImportSaga, NovelImportSagaState>()
.InMemoryRepository();
})
.BuildServiceProvider(true);
}
}

View File

@@ -0,0 +1,49 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.Shared.Contracts.Events;
using FictionArchive.Service.Shared.Extensions;
using MassTransit;
using Microsoft.Extensions.Logging;
namespace FictionArchive.Service.NovelService.Consumers;
public class ChapterPullRequestedConsumer : IConsumer<IChapterPullRequested>
{
private readonly ILogger<ChapterPullRequestedConsumer> _logger;
private readonly NovelUpdateService _novelUpdateService;
public ChapterPullRequestedConsumer(
ILogger<ChapterPullRequestedConsumer> logger,
NovelUpdateService novelUpdateService)
{
_logger = logger;
_novelUpdateService = novelUpdateService;
}
public async Task Consume(ConsumeContext<IChapterPullRequested> context)
{
var message = context.Message;
var chapterJobId = Guid.NewGuid();
await context.ReportJobStatus(
chapterJobId, "ChapterPull", $"Pull chapter {message.ChapterOrder}",
JobStatus.InProgress, parentJobId: message.ImportId);
var (chapter, imageCount) = await _novelUpdateService.PullChapterContents(
message.ImportId,
message.NovelId,
message.VolumeId,
message.ChapterOrder);
await context.Publish<IChapterPullCompleted>(new ChapterPullCompleted(
message.ImportId,
chapter.Id,
imageCount
));
await context.ReportJobStatus(
chapterJobId, "ChapterPull", $"Pull chapter {message.ChapterOrder}",
JobStatus.Completed, parentJobId: message.ImportId,
metadata: new Dictionary<string, string> { ["ChapterId"] = chapter.Id.ToString() });
}
}

View File

@@ -0,0 +1,47 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.Shared.Contracts.Events;
using MassTransit;
using Microsoft.Extensions.Logging;
namespace FictionArchive.Service.NovelService.Consumers;
public class FileUploadRequestStatusUpdateConsumer : IConsumer<IFileUploadRequestStatusUpdate>
{
private readonly ILogger<FileUploadRequestStatusUpdateConsumer> _logger;
private readonly NovelServiceDbContext _dbContext;
private readonly NovelUpdateService _novelUpdateService;
public FileUploadRequestStatusUpdateConsumer(
ILogger<FileUploadRequestStatusUpdateConsumer> logger,
NovelServiceDbContext dbContext,
NovelUpdateService novelUpdateService)
{
_logger = logger;
_dbContext = dbContext;
_novelUpdateService = novelUpdateService;
}
public async Task Consume(ConsumeContext<IFileUploadRequestStatusUpdate> context)
{
var message = context.Message;
var image = await _dbContext.Images.FindAsync(message.RequestId);
if (image == null)
{
// Not a request we care about.
return;
}
if (message.Status == RequestStatus.Failed)
{
_logger.LogError("Image upload failed for image with id {imageId}", image.Id);
return;
}
else if (message.Status == RequestStatus.Success)
{
_logger.LogInformation("Image upload succeeded for image with id {imageId}", image.Id);
await _novelUpdateService.UpdateImage(image.Id, message.FileAccessUrl);
}
}
}

View File

@@ -0,0 +1,43 @@
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.Shared.Contracts.Events;
using MassTransit;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace FictionArchive.Service.NovelService.Consumers;
public class NovelImportCompletedConsumer : IConsumer<INovelImportCompleted>
{
private readonly ILogger<NovelImportCompletedConsumer> _logger;
private readonly NovelServiceDbContext _dbContext;
public NovelImportCompletedConsumer(
ILogger<NovelImportCompletedConsumer> logger,
NovelServiceDbContext dbContext)
{
_logger = logger;
_dbContext = dbContext;
}
public async Task Consume(ConsumeContext<INovelImportCompleted> context)
{
var message = context.Message;
_logger.LogInformation(
"Novel import {ImportId} completed. Success: {Success}, NovelId: {NovelId}, Error: {Error}",
message.ImportId,
message.Success,
message.NovelId,
message.ErrorMessage);
// Remove from ActiveImports to allow future imports
var activeImport = await _dbContext.ActiveImports
.FirstOrDefaultAsync(a => a.ImportId == message.ImportId);
if (activeImport != null)
{
_dbContext.ActiveImports.Remove(activeImport);
await _dbContext.SaveChangesAsync();
}
}
}

View File

@@ -0,0 +1,29 @@
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.Shared.Contracts.Events;
using MassTransit;
using Microsoft.Extensions.Logging;
namespace FictionArchive.Service.NovelService.Consumers;
public class NovelImportRequestedConsumer : IConsumer<INovelImportRequested>
{
private readonly ILogger<NovelImportRequestedConsumer> _logger;
private readonly NovelUpdateService _novelUpdateService;
public NovelImportRequestedConsumer(
ILogger<NovelImportRequestedConsumer> logger,
NovelUpdateService novelUpdateService)
{
_logger = logger;
_novelUpdateService = novelUpdateService;
}
public async Task Consume(ConsumeContext<INovelImportRequested> context)
{
var message = context.Message;
_logger.LogInformation("Starting novel import for {NovelUrl} with ImportId {ImportId}",
message.NovelUrl, message.ImportId);
await _novelUpdateService.ImportNovel(message.ImportId, message.NovelUrl);
}
}

View File

@@ -0,0 +1,48 @@
using FictionArchive.Service.NovelService.Models.Localization;
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.Shared.Contracts.Events;
using MassTransit;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace FictionArchive.Service.NovelService.Consumers;
public class TranslationRequestCompletedConsumer : IConsumer<ITranslationRequestCompleted>
{
private readonly ILogger<TranslationRequestCompletedConsumer> _logger;
private readonly NovelServiceDbContext _dbContext;
public TranslationRequestCompletedConsumer(
ILogger<TranslationRequestCompletedConsumer> logger,
NovelServiceDbContext dbContext)
{
_logger = logger;
_dbContext = dbContext;
}
public async Task Consume(ConsumeContext<ITranslationRequestCompleted> context)
{
var message = context.Message;
var localizationRequest = await _dbContext.LocalizationRequests
.Include(r => r.KeyRequestedForTranslation)
.ThenInclude(lk => lk.Texts)
.FirstOrDefaultAsync(lk => lk.Id == message.TranslationRequestId);
if (localizationRequest == null)
{
// Not one of our requests, discard it
return;
}
localizationRequest.KeyRequestedForTranslation.Texts.Add(new LocalizationText
{
Language = localizationRequest.TranslateTo,
Text = message.TranslatedText,
TranslationEngine = localizationRequest.Engine
});
_dbContext.LocalizationRequests.Remove(localizationRequest);
await _dbContext.SaveChangesAsync();
}
}

View File

@@ -0,0 +1,11 @@
using FictionArchive.Service.Shared.Contracts.Events;
namespace FictionArchive.Service.NovelService.Contracts;
public record ChapterCreated(
uint ChapterId,
uint NovelId,
uint VolumeId,
uint VolumeOrder,
uint ChapterOrder,
string ChapterTitle) : IChapterCreated;

View File

@@ -0,0 +1,3 @@
namespace FictionArchive.Service.NovelService.Contracts;
public record ImportNovelResult(Guid ImportId, string NovelUrl);

View File

@@ -0,0 +1,11 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Contracts.Events;
namespace FictionArchive.Service.NovelService.Contracts;
public record NovelCreated(
uint NovelId,
string Title,
Language OriginalLanguage,
string Source,
string AuthorName) : INovelCreated;

View File

@@ -0,0 +1,11 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Contracts.Events;
namespace FictionArchive.Service.NovelService.Contracts;
public record TranslationRequestCreated(
Guid TranslationRequestId,
Language From,
Language To,
string Body,
string TranslationEngineKey) : ITranslationRequestCreated;

View File

@@ -10,11 +10,12 @@
<ItemGroup>
<PackageReference Include="HotChocolate.AspNetCore.CommandLine" Version="15.1.11" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.4" />
<PackageReference Include="MassTransit.EntityFrameworkCore" Version="8.5.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.11">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2"/>
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2" />
</ItemGroup>
<ItemGroup>

View File

@@ -1,29 +1,42 @@
using FictionArchive.Service.NovelService.Contracts;
using FictionArchive.Service.NovelService.Models.Enums;
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
using FictionArchive.Service.NovelService.Models.Localization;
using FictionArchive.Service.NovelService.Models.Novels;
using FictionArchive.Service.NovelService.Models.SourceAdapters;
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.NovelService.Services.SourceAdapters;
using FictionArchive.Service.Shared.Services.EventBus;
using FictionArchive.Service.Shared.Contracts.Events;
using HotChocolate.Authorization;
using HotChocolate.Types;
using Microsoft.EntityFrameworkCore;
namespace FictionArchive.Service.NovelService.GraphQL;
public class Mutation
{
[Error<InvalidOperationException>]
[Authorize]
public async Task<NovelUpdateRequestedEvent> ImportNovel(string novelUrl, NovelUpdateService service)
public async Task<ImportNovelResult> ImportNovel(string novelUrl, NovelUpdateService service)
{
return await service.QueueNovelImport(novelUrl);
}
[Authorize]
public async Task<ChapterPullRequestedEvent> FetchChapterContents(uint novelId,
uint chapterNumber,
public async Task<ChapterPullRequested> FetchChapterContents(
Guid importId,
uint novelId,
uint volumeId,
uint chapterOrder,
NovelUpdateService service)
{
return await service.QueueChapterPull(novelId, chapterNumber);
return await service.QueueChapterPull(importId, novelId, volumeId, chapterOrder);
}
[Error<KeyNotFoundException>]
[Authorize]
public async Task<bool> DeleteNovel(uint novelId, NovelUpdateService service)
{
await service.DeleteNovel(novelId);
return true;
}
}

View File

@@ -77,32 +77,45 @@ public class Query
}
: null,
Chapters = novel.Chapters.Select(chapter => new ChapterDto
Volumes = novel.Volumes.OrderBy(v => v.Order).Select(volume => new VolumeDto
{
Id = chapter.Id,
CreatedTime = chapter.CreatedTime,
LastUpdatedTime = chapter.LastUpdatedTime,
Revision = chapter.Revision,
Order = chapter.Order,
Url = chapter.Url,
Name = chapter.Name.Texts
Id = volume.Id,
CreatedTime = volume.CreatedTime,
LastUpdatedTime = volume.LastUpdatedTime,
Order = volume.Order,
Name = volume.Name.Texts
.Where(t => t.Language == preferredLanguage)
.Select(t => t.Text)
.FirstOrDefault()
?? chapter.Name.Texts.Select(t => t.Text).FirstOrDefault()
?? volume.Name.Texts.Select(t => t.Text).FirstOrDefault()
?? "",
Body = chapter.Body.Texts
.Where(t => t.Language == preferredLanguage)
.Select(t => t.Text)
.FirstOrDefault()
?? chapter.Body.Texts.Select(t => t.Text).FirstOrDefault()
?? "",
Images = chapter.Images.Select(image => new ImageDto
Chapters = volume.Chapters.OrderBy(c => c.Order).Select(chapter => new ChapterDto
{
Id = image.Id,
CreatedTime = image.CreatedTime,
LastUpdatedTime = image.LastUpdatedTime,
NewPath = image.NewPath
Id = chapter.Id,
CreatedTime = chapter.CreatedTime,
LastUpdatedTime = chapter.LastUpdatedTime,
Revision = chapter.Revision,
Order = chapter.Order,
Url = chapter.Url,
Name = chapter.Name.Texts
.Where(t => t.Language == preferredLanguage)
.Select(t => t.Text)
.FirstOrDefault()
?? chapter.Name.Texts.Select(t => t.Text).FirstOrDefault()
?? "",
Body = chapter.Body.Texts
.Where(t => t.Language == preferredLanguage)
.Select(t => t.Text)
.FirstOrDefault()
?? chapter.Body.Texts.Select(t => t.Text).FirstOrDefault()
?? "",
Images = chapter.Images.Select(image => new ImageDto
{
Id = image.Id,
CreatedTime = image.CreatedTime,
LastUpdatedTime = image.LastUpdatedTime,
NewPath = image.NewPath
}).ToList()
}).ToList()
}).ToList(),
@@ -140,11 +153,12 @@ public class Query
public IQueryable<ChapterReaderDto> GetChapter(
NovelServiceDbContext dbContext,
uint novelId,
uint volumeOrder,
uint chapterOrder,
Language preferredLanguage = Language.En)
{
return dbContext.Chapters
.Where(c => c.Novel.Id == novelId && c.Order == chapterOrder)
.Where(c => c.Volume.Novel.Id == novelId && c.Volume.Order == volumeOrder && c.Order == chapterOrder)
.Select(chapter => new ChapterReaderDto
{
Id = chapter.Id,
@@ -176,24 +190,74 @@ public class Query
NewPath = image.NewPath
}).ToList(),
NovelId = chapter.Novel.Id,
NovelName = chapter.Novel.Name.Texts
NovelId = chapter.Volume.Novel.Id,
NovelName = chapter.Volume.Novel.Name.Texts
.Where(t => t.Language == preferredLanguage)
.Select(t => t.Text)
.FirstOrDefault()
?? chapter.Novel.Name.Texts.Select(t => t.Text).FirstOrDefault()
?? chapter.Volume.Novel.Name.Texts.Select(t => t.Text).FirstOrDefault()
?? "",
TotalChapters = chapter.Novel.Chapters.Count,
PrevChapterOrder = chapter.Novel.Chapters
// Volume context
VolumeId = chapter.Volume.Id,
VolumeName = chapter.Volume.Name.Texts
.Where(t => t.Language == preferredLanguage)
.Select(t => t.Text)
.FirstOrDefault()
?? chapter.Volume.Name.Texts.Select(t => t.Text).FirstOrDefault()
?? "",
VolumeOrder = chapter.Volume.Order,
TotalChaptersInVolume = chapter.Volume.Chapters.Count,
// Previous chapter: first try same volume, then last chapter of previous volume
PrevChapterVolumeOrder = chapter.Volume.Chapters
.Where(c => c.Order < chapterOrder)
.OrderByDescending(c => c.Order)
.Select(c => (int?)chapter.Volume.Order)
.FirstOrDefault()
?? chapter.Volume.Novel.Volumes
.Where(v => v.Order < chapter.Volume.Order)
.OrderByDescending(v => v.Order)
.SelectMany(v => v.Chapters.OrderByDescending(c => c.Order).Take(1))
.Select(c => (int?)c.Volume.Order)
.FirstOrDefault(),
PrevChapterOrder = chapter.Volume.Chapters
.Where(c => c.Order < chapterOrder)
.OrderByDescending(c => c.Order)
.Select(c => (uint?)c.Order)
.FirstOrDefault(),
NextChapterOrder = chapter.Novel.Chapters
.FirstOrDefault()
?? chapter.Volume.Novel.Volumes
.Where(v => v.Order < chapter.Volume.Order)
.OrderByDescending(v => v.Order)
.SelectMany(v => v.Chapters.OrderByDescending(c => c.Order).Take(1))
.Select(c => (uint?)c.Order)
.FirstOrDefault(),
// Next chapter: first try same volume, then first chapter of next volume
NextChapterVolumeOrder = chapter.Volume.Chapters
.Where(c => c.Order > chapterOrder)
.OrderBy(c => c.Order)
.Select(c => (int?)chapter.Volume.Order)
.FirstOrDefault()
?? chapter.Volume.Novel.Volumes
.Where(v => v.Order > chapter.Volume.Order)
.OrderBy(v => v.Order)
.SelectMany(v => v.Chapters.OrderBy(c => c.Order).Take(1))
.Select(c => (int?)c.Volume.Order)
.FirstOrDefault(),
NextChapterOrder = chapter.Volume.Chapters
.Where(c => c.Order > chapterOrder)
.OrderBy(c => c.Order)
.Select(c => (uint?)c.Order)
.FirstOrDefault()
?? chapter.Volume.Novel.Volumes
.Where(v => v.Order > chapter.Volume.Order)
.OrderBy(v => v.Order)
.SelectMany(v => v.Chapters.OrderBy(c => c.Order).Take(1))
.Select(c => (uint?)c.Order)
.FirstOrDefault()
});
}
}

View File

@@ -0,0 +1,605 @@
// <auto-generated />
using System;
using FictionArchive.Service.NovelService.Services;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace FictionArchive.Service.NovelService.Migrations
{
[DbContext(typeof(NovelServiceDbContext))]
[Migration("20251229203027_AddVolumes")]
partial class AddVolumes
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<long?>("ChapterId")
.HasColumnType("bigint");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("NewPath")
.HasColumnType("text");
b.Property<string>("OriginalPath")
.IsRequired()
.HasColumnType("text");
b.HasKey("Id");
b.HasIndex("ChapterId");
b.ToTable("Images");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.HasKey("Id");
b.ToTable("LocalizationKeys");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationRequest", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long>("EngineId")
.HasColumnType("bigint");
b.Property<Guid>("KeyRequestedForTranslationId")
.HasColumnType("uuid");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<int>("TranslateTo")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("EngineId");
b.HasIndex("KeyRequestedForTranslationId");
b.ToTable("LocalizationRequests");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationText", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<int>("Language")
.HasColumnType("integer");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid?>("LocalizationKeyId")
.HasColumnType("uuid");
b.Property<string>("Text")
.IsRequired()
.HasColumnType("text");
b.Property<long?>("TranslationEngineId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("LocalizationKeyId");
b.HasIndex("TranslationEngineId");
b.ToTable("LocalizationText");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Guid>("BodyId")
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<long>("Order")
.HasColumnType("bigint");
b.Property<long>("Revision")
.HasColumnType("bigint");
b.Property<string>("Url")
.HasColumnType("text");
b.Property<long>("VolumeId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("BodyId");
b.HasIndex("NameId");
b.HasIndex("VolumeId", "Order")
.IsUnique();
b.ToTable("Chapter");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<long>("AuthorId")
.HasColumnType("bigint");
b.Property<Guid?>("CoverImageId")
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("DescriptionId")
.HasColumnType("uuid");
b.Property<string>("ExternalId")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<int>("RawLanguage")
.HasColumnType("integer");
b.Property<int>("RawStatus")
.HasColumnType("integer");
b.Property<long>("SourceId")
.HasColumnType("bigint");
b.Property<int?>("StatusOverride")
.HasColumnType("integer");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.HasKey("Id");
b.HasIndex("AuthorId");
b.HasIndex("CoverImageId");
b.HasIndex("DescriptionId");
b.HasIndex("NameId");
b.HasIndex("SourceId");
b.HasIndex("ExternalId", "SourceId")
.IsUnique();
b.ToTable("Novels");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.NovelTag", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("DisplayNameId")
.HasColumnType("uuid");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long?>("SourceId")
.HasColumnType("bigint");
b.Property<int>("TagType")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("DisplayNameId");
b.HasIndex("SourceId");
b.ToTable("Tags");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Person", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("ExternalUrl")
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.HasKey("Id");
b.HasIndex("NameId");
b.ToTable("Person");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Source", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.HasKey("Id");
b.ToTable("Sources");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.HasKey("Id");
b.ToTable("TranslationEngines");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<long>("NovelId")
.HasColumnType("bigint");
b.Property<int>("Order")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("NameId");
b.HasIndex("NovelId", "Order")
.IsUnique();
b.ToTable("Volume");
});
modelBuilder.Entity("NovelNovelTag", b =>
{
b.Property<long>("NovelsId")
.HasColumnType("bigint");
b.Property<long>("TagsId")
.HasColumnType("bigint");
b.HasKey("NovelsId", "TagsId");
b.HasIndex("TagsId");
b.ToTable("NovelNovelTag");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Chapter", "Chapter")
.WithMany("Images")
.HasForeignKey("ChapterId");
b.Navigation("Chapter");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationRequest", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", "Engine")
.WithMany()
.HasForeignKey("EngineId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "KeyRequestedForTranslation")
.WithMany()
.HasForeignKey("KeyRequestedForTranslationId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Engine");
b.Navigation("KeyRequestedForTranslation");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationText", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", null)
.WithMany("Texts")
.HasForeignKey("LocalizationKeyId");
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", "TranslationEngine")
.WithMany()
.HasForeignKey("TranslationEngineId");
b.Navigation("TranslationEngine");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Body")
.WithMany()
.HasForeignKey("BodyId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Body");
b.Navigation("Name");
b.Navigation("Volume");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Person", "Author")
.WithMany()
.HasForeignKey("AuthorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Images.Image", "CoverImage")
.WithMany()
.HasForeignKey("CoverImageId");
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Description")
.WithMany()
.HasForeignKey("DescriptionId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Source", "Source")
.WithMany()
.HasForeignKey("SourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Author");
b.Navigation("CoverImage");
b.Navigation("Description");
b.Navigation("Name");
b.Navigation("Source");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.NovelTag", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "DisplayName")
.WithMany()
.HasForeignKey("DisplayNameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Source", "Source")
.WithMany()
.HasForeignKey("SourceId");
b.Navigation("DisplayName");
b.Navigation("Source");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Person", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Name");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", "Novel")
.WithMany("Volumes")
.HasForeignKey("NovelId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Name");
b.Navigation("Novel");
});
modelBuilder.Entity("NovelNovelTag", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", null)
.WithMany()
.HasForeignKey("NovelsId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.NovelTag", null)
.WithMany()
.HasForeignKey("TagsId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", b =>
{
b.Navigation("Texts");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
{
b.Navigation("Images");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,195 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace FictionArchive.Service.NovelService.Migrations
{
/// <inheritdoc />
public partial class AddVolumes : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
// 1. Create the Volume table
migrationBuilder.CreateTable(
name: "Volume",
columns: table => new
{
Id = table.Column<long>(type: "bigint", nullable: false)
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
Order = table.Column<int>(type: "integer", nullable: false),
NameId = table.Column<Guid>(type: "uuid", nullable: false),
NovelId = table.Column<long>(type: "bigint", nullable: false),
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Volume", x => x.Id);
table.ForeignKey(
name: "FK_Volume_LocalizationKeys_NameId",
column: x => x.NameId,
principalTable: "LocalizationKeys",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Volume_Novels_NovelId",
column: x => x.NovelId,
principalTable: "Novels",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_Volume_NameId",
table: "Volume",
column: "NameId");
migrationBuilder.CreateIndex(
name: "IX_Volume_NovelId_Order",
table: "Volume",
columns: new[] { "NovelId", "Order" },
unique: true);
// 2. Add nullable VolumeId column to Chapter (keep NovelId for now)
migrationBuilder.AddColumn<long>(
name: "VolumeId",
table: "Chapter",
type: "bigint",
nullable: true);
// 3. Data migration: Create volumes and link chapters for each novel
migrationBuilder.Sql(@"
DO $$
DECLARE
novel_rec RECORD;
loc_key_id uuid;
volume_id bigint;
BEGIN
FOR novel_rec IN SELECT ""Id"", ""RawLanguage"" FROM ""Novels"" LOOP
-- Create LocalizationKey for volume name
loc_key_id := gen_random_uuid();
INSERT INTO ""LocalizationKeys"" (""Id"", ""CreatedTime"", ""LastUpdatedTime"")
VALUES (loc_key_id, NOW(), NOW());
-- Create LocalizationText for 'Main Story' in novel's raw language
INSERT INTO ""LocalizationText"" (""Id"", ""LocalizationKeyId"", ""Language"", ""Text"", ""CreatedTime"", ""LastUpdatedTime"")
VALUES (gen_random_uuid(), loc_key_id, novel_rec.""RawLanguage"", 'Main Story', NOW(), NOW());
-- Create Volume for this novel
INSERT INTO ""Volume"" (""Order"", ""NameId"", ""NovelId"", ""CreatedTime"", ""LastUpdatedTime"")
VALUES (1, loc_key_id, novel_rec.""Id"", NOW(), NOW())
RETURNING ""Id"" INTO volume_id;
-- Link all chapters of this novel to the new volume
UPDATE ""Chapter"" SET ""VolumeId"" = volume_id WHERE ""NovelId"" = novel_rec.""Id"";
END LOOP;
END $$;
");
// 4. Drop old FK and index for NovelId
migrationBuilder.DropForeignKey(
name: "FK_Chapter_Novels_NovelId",
table: "Chapter");
migrationBuilder.DropIndex(
name: "IX_Chapter_NovelId",
table: "Chapter");
// 5. Drop NovelId column from Chapter
migrationBuilder.DropColumn(
name: "NovelId",
table: "Chapter");
// 6. Make VolumeId non-nullable
migrationBuilder.AlterColumn<long>(
name: "VolumeId",
table: "Chapter",
type: "bigint",
nullable: false,
oldClrType: typeof(long),
oldType: "bigint",
oldNullable: true);
// 7. Add unique index and FK for VolumeId
migrationBuilder.CreateIndex(
name: "IX_Chapter_VolumeId_Order",
table: "Chapter",
columns: new[] { "VolumeId", "Order" },
unique: true);
migrationBuilder.AddForeignKey(
name: "FK_Chapter_Volume_VolumeId",
table: "Chapter",
column: "VolumeId",
principalTable: "Volume",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
// Add back NovelId column
migrationBuilder.AddColumn<long>(
name: "NovelId",
table: "Chapter",
type: "bigint",
nullable: true);
// Migrate data back: set NovelId from Volume
migrationBuilder.Sql(@"
UPDATE ""Chapter"" c
SET ""NovelId"" = v.""NovelId""
FROM ""Volume"" v
WHERE c.""VolumeId"" = v.""Id"";
");
// Make NovelId non-nullable
migrationBuilder.AlterColumn<long>(
name: "NovelId",
table: "Chapter",
type: "bigint",
nullable: false,
oldClrType: typeof(long),
oldType: "bigint",
oldNullable: true);
// Drop VolumeId FK and index
migrationBuilder.DropForeignKey(
name: "FK_Chapter_Volume_VolumeId",
table: "Chapter");
migrationBuilder.DropIndex(
name: "IX_Chapter_VolumeId_Order",
table: "Chapter");
// Drop VolumeId column
migrationBuilder.DropColumn(
name: "VolumeId",
table: "Chapter");
// Recreate NovelId index and FK
migrationBuilder.CreateIndex(
name: "IX_Chapter_NovelId",
table: "Chapter",
column: "NovelId");
migrationBuilder.AddForeignKey(
name: "FK_Chapter_Novels_NovelId",
table: "Chapter",
column: "NovelId",
principalTable: "Novels",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
// Note: Volume LocalizationKeys are not cleaned up in Down migration
// as they may have been modified. Manual cleanup may be needed.
migrationBuilder.DropTable(
name: "Volume");
}
}
}

View File

@@ -0,0 +1,673 @@
// <auto-generated />
using System;
using FictionArchive.Service.NovelService.Services;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace FictionArchive.Service.NovelService.Migrations
{
[DbContext(typeof(NovelServiceDbContext))]
[Migration("20260127161500_AddNovelImportSaga")]
partial class AddNovelImportSaga
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.ActiveImport", b =>
{
b.Property<Guid>("ImportId")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<string>("NovelUrl")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("StartedAt")
.HasColumnType("timestamp with time zone");
b.HasKey("ImportId");
b.HasIndex("NovelUrl")
.IsUnique();
b.ToTable("ActiveImports");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<long?>("ChapterId")
.HasColumnType("bigint");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("NewPath")
.HasColumnType("text");
b.Property<string>("OriginalPath")
.IsRequired()
.HasColumnType("text");
b.HasKey("Id");
b.HasIndex("ChapterId");
b.ToTable("Images");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.HasKey("Id");
b.ToTable("LocalizationKeys");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationRequest", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long>("EngineId")
.HasColumnType("bigint");
b.Property<Guid>("KeyRequestedForTranslationId")
.HasColumnType("uuid");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<int>("TranslateTo")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("EngineId");
b.HasIndex("KeyRequestedForTranslationId");
b.ToTable("LocalizationRequests");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationText", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<int>("Language")
.HasColumnType("integer");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid?>("LocalizationKeyId")
.HasColumnType("uuid");
b.Property<string>("Text")
.IsRequired()
.HasColumnType("text");
b.Property<long?>("TranslationEngineId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("LocalizationKeyId");
b.HasIndex("TranslationEngineId");
b.ToTable("LocalizationText");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Guid>("BodyId")
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<long>("Order")
.HasColumnType("bigint");
b.Property<long>("Revision")
.HasColumnType("bigint");
b.Property<string>("Url")
.HasColumnType("text");
b.Property<long>("VolumeId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("BodyId");
b.HasIndex("NameId");
b.HasIndex("VolumeId", "Order")
.IsUnique();
b.ToTable("Chapter");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<long>("AuthorId")
.HasColumnType("bigint");
b.Property<Guid?>("CoverImageId")
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("DescriptionId")
.HasColumnType("uuid");
b.Property<string>("ExternalId")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<int>("RawLanguage")
.HasColumnType("integer");
b.Property<int>("RawStatus")
.HasColumnType("integer");
b.Property<long>("SourceId")
.HasColumnType("bigint");
b.Property<int?>("StatusOverride")
.HasColumnType("integer");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.HasKey("Id");
b.HasIndex("AuthorId");
b.HasIndex("CoverImageId");
b.HasIndex("DescriptionId");
b.HasIndex("NameId");
b.HasIndex("SourceId");
b.HasIndex("ExternalId", "SourceId")
.IsUnique();
b.ToTable("Novels");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.NovelTag", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("DisplayNameId")
.HasColumnType("uuid");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long?>("SourceId")
.HasColumnType("bigint");
b.Property<int>("TagType")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("DisplayNameId");
b.HasIndex("SourceId");
b.ToTable("Tags");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Person", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("ExternalUrl")
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.HasKey("Id");
b.HasIndex("NameId");
b.ToTable("Person");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Source", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.HasKey("Id");
b.ToTable("Sources");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.HasKey("Id");
b.ToTable("TranslationEngines");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<long>("NovelId")
.HasColumnType("bigint");
b.Property<int>("Order")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("NameId");
b.HasIndex("NovelId", "Order")
.IsUnique();
b.ToTable("Volume");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Sagas.NovelImportSagaState", b =>
{
b.Property<Guid>("CorrelationId")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant?>("CompletedAt")
.HasColumnType("timestamp with time zone");
b.Property<int>("CompletedChapters")
.HasColumnType("integer");
b.Property<int>("CompletedImages")
.HasColumnType("integer");
b.Property<string>("CurrentState")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ErrorMessage")
.HasColumnType("text");
b.Property<int>("ExpectedChapters")
.HasColumnType("integer");
b.Property<int>("ExpectedImages")
.HasColumnType("integer");
b.Property<long?>("NovelId")
.HasColumnType("bigint");
b.Property<string>("NovelUrl")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("StartedAt")
.HasColumnType("timestamp with time zone");
b.HasKey("CorrelationId");
b.HasIndex("CurrentState");
b.HasIndex("NovelUrl");
b.ToTable("NovelImportSagaStates");
});
modelBuilder.Entity("NovelNovelTag", b =>
{
b.Property<long>("NovelsId")
.HasColumnType("bigint");
b.Property<long>("TagsId")
.HasColumnType("bigint");
b.HasKey("NovelsId", "TagsId");
b.HasIndex("TagsId");
b.ToTable("NovelNovelTag");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Chapter", "Chapter")
.WithMany("Images")
.HasForeignKey("ChapterId");
b.Navigation("Chapter");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationRequest", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", "Engine")
.WithMany()
.HasForeignKey("EngineId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "KeyRequestedForTranslation")
.WithMany()
.HasForeignKey("KeyRequestedForTranslationId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Engine");
b.Navigation("KeyRequestedForTranslation");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationText", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", null)
.WithMany("Texts")
.HasForeignKey("LocalizationKeyId");
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.TranslationEngine", "TranslationEngine")
.WithMany()
.HasForeignKey("TranslationEngineId");
b.Navigation("TranslationEngine");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Body")
.WithMany()
.HasForeignKey("BodyId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Body");
b.Navigation("Name");
b.Navigation("Volume");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Person", "Author")
.WithMany()
.HasForeignKey("AuthorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Images.Image", "CoverImage")
.WithMany()
.HasForeignKey("CoverImageId");
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Description")
.WithMany()
.HasForeignKey("DescriptionId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Source", "Source")
.WithMany()
.HasForeignKey("SourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Author");
b.Navigation("CoverImage");
b.Navigation("Description");
b.Navigation("Name");
b.Navigation("Source");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.NovelTag", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "DisplayName")
.WithMany()
.HasForeignKey("DisplayNameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Source", "Source")
.WithMany()
.HasForeignKey("SourceId");
b.Navigation("DisplayName");
b.Navigation("Source");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Person", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Name");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", "Novel")
.WithMany("Volumes")
.HasForeignKey("NovelId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Name");
b.Navigation("Novel");
});
modelBuilder.Entity("NovelNovelTag", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", null)
.WithMany()
.HasForeignKey("NovelsId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.NovelTag", null)
.WithMany()
.HasForeignKey("TagsId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", b =>
{
b.Navigation("Texts");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Chapter", b =>
{
b.Navigation("Images");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,76 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
#nullable disable
namespace FictionArchive.Service.NovelService.Migrations
{
/// <inheritdoc />
public partial class AddNovelImportSaga : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "ActiveImports",
columns: table => new
{
ImportId = table.Column<Guid>(type: "uuid", nullable: false),
NovelUrl = table.Column<string>(type: "text", nullable: false),
StartedAt = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_ActiveImports", x => x.ImportId);
});
migrationBuilder.CreateTable(
name: "NovelImportSagaStates",
columns: table => new
{
CorrelationId = table.Column<Guid>(type: "uuid", nullable: false),
CurrentState = table.Column<string>(type: "text", nullable: false),
NovelUrl = table.Column<string>(type: "text", nullable: false),
NovelId = table.Column<long>(type: "bigint", nullable: true),
ExpectedChapters = table.Column<int>(type: "integer", nullable: false),
CompletedChapters = table.Column<int>(type: "integer", nullable: false),
ExpectedImages = table.Column<int>(type: "integer", nullable: false),
CompletedImages = table.Column<int>(type: "integer", nullable: false),
StartedAt = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
CompletedAt = table.Column<Instant>(type: "timestamp with time zone", nullable: true),
ErrorMessage = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_NovelImportSagaStates", x => x.CorrelationId);
});
migrationBuilder.CreateIndex(
name: "IX_ActiveImports_NovelUrl",
table: "ActiveImports",
column: "NovelUrl",
unique: true);
migrationBuilder.CreateIndex(
name: "IX_NovelImportSagaStates_CurrentState",
table: "NovelImportSagaStates",
column: "CurrentState");
migrationBuilder.CreateIndex(
name: "IX_NovelImportSagaStates_NovelUrl",
table: "NovelImportSagaStates",
column: "NovelUrl");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "ActiveImports");
migrationBuilder.DropTable(
name: "NovelImportSagaStates");
}
}
}

View File

@@ -23,6 +23,27 @@ namespace FictionArchive.Service.NovelService.Migrations
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.ActiveImport", b =>
{
b.Property<Guid>("ImportId")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<string>("NovelUrl")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("StartedAt")
.HasColumnType("timestamp with time zone");
b.HasKey("ImportId");
b.HasIndex("NovelUrl")
.IsUnique();
b.ToTable("ActiveImports");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Images.Image", b =>
{
b.Property<Guid>("Id")
@@ -153,9 +174,6 @@ namespace FictionArchive.Service.NovelService.Migrations
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<long>("NovelId")
.HasColumnType("bigint");
b.Property<long>("Order")
.HasColumnType("bigint");
@@ -165,13 +183,17 @@ namespace FictionArchive.Service.NovelService.Migrations
b.Property<string>("Url")
.HasColumnType("text");
b.Property<long>("VolumeId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("BodyId");
b.HasIndex("NameId");
b.HasIndex("NovelId");
b.HasIndex("VolumeId", "Order")
.IsUnique();
b.ToTable("Chapter");
});
@@ -357,6 +379,86 @@ namespace FictionArchive.Service.NovelService.Migrations
b.ToTable("TranslationEngines");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Guid>("NameId")
.HasColumnType("uuid");
b.Property<long>("NovelId")
.HasColumnType("bigint");
b.Property<int>("Order")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("NameId");
b.HasIndex("NovelId", "Order")
.IsUnique();
b.ToTable("Volume");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Sagas.NovelImportSagaState", b =>
{
b.Property<Guid>("CorrelationId")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant?>("CompletedAt")
.HasColumnType("timestamp with time zone");
b.Property<int>("CompletedChapters")
.HasColumnType("integer");
b.Property<int>("CompletedImages")
.HasColumnType("integer");
b.Property<string>("CurrentState")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ErrorMessage")
.HasColumnType("text");
b.Property<int>("ExpectedChapters")
.HasColumnType("integer");
b.Property<int>("ExpectedImages")
.HasColumnType("integer");
b.Property<long?>("NovelId")
.HasColumnType("bigint");
b.Property<string>("NovelUrl")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("StartedAt")
.HasColumnType("timestamp with time zone");
b.HasKey("CorrelationId");
b.HasIndex("CurrentState");
b.HasIndex("NovelUrl");
b.ToTable("NovelImportSagaStates");
});
modelBuilder.Entity("NovelNovelTag", b =>
{
b.Property<long>("NovelsId")
@@ -427,9 +529,9 @@ namespace FictionArchive.Service.NovelService.Migrations
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", "Novel")
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("NovelId")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
@@ -437,7 +539,7 @@ namespace FictionArchive.Service.NovelService.Migrations
b.Navigation("Name");
b.Navigation("Novel");
b.Navigation("Volume");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
@@ -509,6 +611,25 @@ namespace FictionArchive.Service.NovelService.Migrations
b.Navigation("Name");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Localization.LocalizationKey", "Name")
.WithMany()
.HasForeignKey("NameId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", "Novel")
.WithMany("Volumes")
.HasForeignKey("NovelId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Name");
b.Navigation("Novel");
});
modelBuilder.Entity("NovelNovelTag", b =>
{
b.HasOne("FictionArchive.Service.NovelService.Models.Novels.Novel", null)
@@ -535,6 +656,11 @@ namespace FictionArchive.Service.NovelService.Migrations
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Novel", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("FictionArchive.Service.NovelService.Models.Novels.Volume", b =>
{
b.Navigation("Chapters");
});

View File

@@ -0,0 +1,10 @@
using NodaTime;
namespace FictionArchive.Service.NovelService.Models;
public class ActiveImport
{
public Guid ImportId { get; set; }
public required string NovelUrl { get; set; }
public Instant StartedAt { get; set; }
}

View File

@@ -12,7 +12,16 @@ public class ChapterReaderDto : BaseDto<uint>
// Navigation context
public uint NovelId { get; init; }
public required string NovelName { get; init; }
public int TotalChapters { get; init; }
// Volume context
public uint VolumeId { get; init; }
public required string VolumeName { get; init; }
public int VolumeOrder { get; init; }
public int TotalChaptersInVolume { get; init; }
// Cross-volume navigation (VolumeOrder + ChapterOrder identify a chapter)
public int? PrevChapterVolumeOrder { get; init; }
public uint? PrevChapterOrder { get; init; }
public int? NextChapterVolumeOrder { get; init; }
public uint? NextChapterOrder { get; init; }
}

View File

@@ -14,7 +14,7 @@ public class NovelDto : BaseDto<uint>
public required string ExternalId { get; init; }
public required string Name { get; init; }
public required string Description { get; init; }
public required List<ChapterDto> Chapters { get; init; }
public required List<VolumeDto> Volumes { get; init; }
public required List<NovelTagDto> Tags { get; init; }
public ImageDto? CoverImage { get; init; }
}

View File

@@ -0,0 +1,8 @@
namespace FictionArchive.Service.NovelService.Models.DTOs;
public class VolumeDto : BaseDto<uint>
{
public int Order { get; init; }
public required string Name { get; init; }
public required List<ChapterDto> Chapters { get; init; }
}

View File

@@ -1,9 +0,0 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
public class ChapterPullRequestedEvent : IIntegrationEvent
{
public uint NovelId { get; set; }
public uint ChapterNumber { get; set; }
}

View File

@@ -1,10 +0,0 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.FileService.IntegrationEvents;
public class FileUploadRequestCreatedEvent : IIntegrationEvent
{
public Guid RequestId { get; set; }
public string FilePath { get; set; }
public byte[] FileData { get; set; }
}

View File

@@ -1,22 +0,0 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
public class FileUploadRequestStatusUpdateEvent : IIntegrationEvent
{
public Guid RequestId { get; set; }
public RequestStatus Status { get; set; }
#region Success
public string? FileAccessUrl { get; set; }
#endregion
#region Failure
public string? ErrorMessage { get; set; }
#endregion
}

View File

@@ -1,8 +0,0 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
public class NovelUpdateRequestedEvent : IIntegrationEvent
{
public string NovelUrl { get; set; }
}

View File

@@ -1,17 +0,0 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
public class TranslationRequestCompletedEvent : IIntegrationEvent
{
/// <summary>
/// Maps this event back to a triggering request.
/// </summary>
public Guid? TranslationRequestId { get; set; }
/// <summary>
/// The resulting text.
/// </summary>
public string? TranslatedText { get; set; }
}

View File

@@ -1,13 +0,0 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
public class TranslationRequestCreatedEvent : IIntegrationEvent
{
public Guid TranslationRequestId { get; set; }
public Language From { get; set; }
public Language To { get; set; }
public string Body { get; set; }
public string TranslationEngineKey { get; set; }
}

View File

@@ -20,7 +20,7 @@ public class Chapter : BaseEntity<uint>
#region Navigation Properties
public Novel Novel { get; set; }
public Volume Volume { get; set; }
#endregion
}

View File

@@ -21,7 +21,7 @@ public class Novel : BaseEntity<uint>
public LocalizationKey Name { get; set; }
public LocalizationKey Description { get; set; }
public List<Chapter> Chapters { get; set; }
public List<Volume> Volumes { get; set; }
public List<NovelTag> Tags { get; set; }
public Image? CoverImage { get; set; }
}

View File

@@ -0,0 +1,24 @@
using System.ComponentModel.DataAnnotations.Schema;
using FictionArchive.Service.NovelService.Models.Localization;
using FictionArchive.Service.Shared.Models;
namespace FictionArchive.Service.NovelService.Models.Novels;
[Table("Volume")]
public class Volume : BaseEntity<uint>
{
/// <summary>
/// Signed int to allow special ordering like -1 for "Author Notes" at top.
/// </summary>
public int Order { get; set; }
public LocalizationKey Name { get; set; }
public List<Chapter> Chapters { get; set; }
#region Navigation Properties
public Novel Novel { get; set; }
#endregion
}

View File

@@ -16,7 +16,7 @@ public class NovelMetadata
public Language RawLanguage { get; set; }
public NovelStatus RawStatus { get; set; }
public List<ChapterMetadata> Chapters { get; set; }
public List<VolumeMetadata> Volumes { get; set; }
public List<string> SourceTags { get; set; }
public List<string> SystemTags { get; set; }
public SourceDescriptor SourceDescriptor { get; set; }

View File

@@ -0,0 +1,8 @@
namespace FictionArchive.Service.NovelService.Models.SourceAdapters;
public class VolumeMetadata
{
public int Order { get; set; }
public string Name { get; set; }
public List<ChapterMetadata> Chapters { get; set; }
}

View File

@@ -1,16 +1,17 @@
using FictionArchive.Common.Extensions;
using FictionArchive.Service.NovelService.Consumers;
using FictionArchive.Service.NovelService.GraphQL;
using FictionArchive.Service.NovelService.Models.Configuration;
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
using FictionArchive.Service.NovelService.Sagas;
using FictionArchive.Service.NovelService.Services;
using FictionArchive.Service.NovelService.Services.EventHandlers;
using FictionArchive.Service.NovelService.Services.SourceAdapters;
using FictionArchive.Service.NovelService.Services.SourceAdapters.Novelpia;
using FictionArchive.Service.Shared;
using FictionArchive.Service.Shared.Extensions;
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
using FictionArchive.Service.Shared.Services.GraphQL;
using MassTransit;
using Microsoft.EntityFrameworkCore;
using NodaTime;
namespace FictionArchive.Service.NovelService;
@@ -25,18 +26,28 @@ public class Program
builder.Services.AddMemoryCache();
#region Event Bus
#region MassTransit
if (!isSchemaExport)
{
builder.Services.AddRabbitMQ(opt =>
{
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
})
.Subscribe<TranslationRequestCompletedEvent, TranslationRequestCompletedEventHandler>()
.Subscribe<NovelUpdateRequestedEvent, NovelUpdateRequestedEventHandler>()
.Subscribe<ChapterPullRequestedEvent, ChapterPullRequestedEventHandler>()
.Subscribe<FileUploadRequestStatusUpdateEvent, FileUploadRequestStatusUpdateEventHandler>();
builder.Services.AddFictionArchiveMassTransit(
builder.Configuration,
x =>
{
x.AddConsumer<TranslationRequestCompletedConsumer>();
x.AddConsumer<FileUploadRequestStatusUpdateConsumer>();
x.AddConsumer<ChapterPullRequestedConsumer>();
x.AddConsumer<NovelImportRequestedConsumer>();
x.AddConsumer<NovelImportCompletedConsumer>();
x.AddSagaStateMachine<NovelImportSaga, NovelImportSagaState>()
.EntityFrameworkRepository(r =>
{
r.ConcurrencyMode = ConcurrencyMode.Optimistic;
r.ExistingDbContext<NovelServiceDbContext>();
r.UsePostgres();
});
});
}
#endregion
@@ -62,18 +73,23 @@ public class Program
builder.Services.AddHttpClient<NovelpiaAuthMessageHandler>(client =>
{
client.BaseAddress = new Uri("https://novelpia.com");
});
})
.AddStandardResilienceHandler();
builder.Services.AddHttpClient<ISourceAdapter, NovelpiaAdapter>(client =>
{
client.BaseAddress = new Uri("https://novelpia.com");
})
.AddHttpMessageHandler<NovelpiaAuthMessageHandler>();
.AddHttpMessageHandler<NovelpiaAuthMessageHandler>()
.AddStandardResilienceHandler();
builder.Services.Configure<NovelUpdateServiceConfiguration>(builder.Configuration.GetSection("UpdateService"));
builder.Services.AddTransient<NovelUpdateService>();
#endregion
// Register IClock for saga and service use
builder.Services.AddSingleton<IClock>(SystemClock.Instance);
builder.Services.AddHealthChecks();
// Authentication & Authorization

View File

@@ -0,0 +1,161 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Contracts.Events;
using MassTransit;
using NodaTime;
namespace FictionArchive.Service.NovelService.Sagas;
public class NovelImportSaga : MassTransitStateMachine<NovelImportSagaState>
{
public State Importing { get; private set; } = null!;
public State Processing { get; private set; } = null!;
public State Completed { get; private set; } = null!;
public State Failed { get; private set; } = null!;
public Event<INovelImportRequested> NovelImportRequested { get; private set; } = null!;
public Event<INovelMetadataImported> NovelMetadataImported { get; private set; } = null!;
public Event<IChapterPullCompleted> ChapterPullCompleted { get; private set; } = null!;
public Event<IFileUploadRequestStatusUpdate> FileUploadStatusUpdate { get; private set; } = null!;
public Event<Fault<IChapterPullRequested>> ChapterPullFaulted { get; private set; } = null!;
public Event<Fault<IFileUploadRequestCreated>> FileUploadFaulted { get; private set; } = null!;
private readonly IClock _clock;
public NovelImportSaga(IClock clock)
{
_clock = clock;
InstanceState(x => x.CurrentState);
Event(() => NovelImportRequested, x => x.CorrelateById(ctx => ctx.Message.ImportId));
Event(() => NovelMetadataImported, x => x.CorrelateById(ctx => ctx.Message.ImportId));
Event(() => ChapterPullCompleted, x => x.CorrelateById(ctx => ctx.Message.ImportId));
Event(() => FileUploadStatusUpdate, x =>
{
x.CorrelateById(ctx => ctx.Message.ImportId ?? Guid.Empty);
x.OnMissingInstance(m => m.Discard());
});
Event(() => ChapterPullFaulted, x => x.CorrelateById(ctx => ctx.Message.Message.ImportId));
Event(() => FileUploadFaulted, x =>
{
x.CorrelateById(ctx => ctx.Message.Message.ImportId ?? Guid.Empty);
x.OnMissingInstance(m => m.Discard());
});
Initially(
When(NovelImportRequested)
.Then(ctx =>
{
ctx.Saga.NovelUrl = ctx.Message.NovelUrl;
ctx.Saga.StartedAt = _clock.GetCurrentInstant();
})
.TransitionTo(Importing)
.PublishAsync(ctx => ctx.Init<IJobStatusUpdate>(new JobStatusUpdate(
ctx.Saga.CorrelationId, null, "NovelImport",
$"Import {ctx.Saga.NovelUrl}", JobStatus.InProgress,
null, new Dictionary<string, string> { ["NovelUrl"] = ctx.Saga.NovelUrl })))
);
During(Importing,
When(NovelMetadataImported)
.Then(ctx =>
{
ctx.Saga.NovelId = ctx.Message.NovelId;
ctx.Saga.ExpectedChapters = ctx.Message.ChaptersPendingPull;
ctx.Saga.ExpectedImages += ctx.Message.CoverImageQueued ? 1 : 0;
})
.IfElse(
ctx => ctx.Saga.ExpectedChapters == 0 && !ctx.Message.CoverImageQueued,
thenBinder => thenBinder
.Then(ctx => ctx.Saga.CompletedAt = _clock.GetCurrentInstant())
.TransitionTo(Completed)
.PublishAsync(ctx => ctx.Init<INovelImportCompleted>(new NovelImportCompleted(
ctx.Saga.CorrelationId,
ctx.Saga.NovelId,
true,
null)))
.PublishAsync(ctx => ctx.Init<IJobStatusUpdate>(new JobStatusUpdate(
ctx.Saga.CorrelationId, null, "NovelImport",
$"Import {ctx.Saga.NovelUrl}", JobStatus.Completed,
null, new Dictionary<string, string> { ["NovelId"] = ctx.Saga.NovelId.ToString() }))),
elseBinder => elseBinder.TransitionTo(Processing)
)
);
During(Processing,
When(ChapterPullCompleted)
.Then(ctx =>
{
ctx.Saga.CompletedChapters++;
ctx.Saga.ExpectedImages += ctx.Message.ImagesQueued;
})
.If(ctx => IsComplete(ctx.Saga), ctx => ctx
.Then(c => c.Saga.CompletedAt = _clock.GetCurrentInstant())
.TransitionTo(Completed)
.PublishAsync(c => c.Init<INovelImportCompleted>(new NovelImportCompleted(
c.Saga.CorrelationId,
c.Saga.NovelId,
true,
null)))
.PublishAsync(c => c.Init<IJobStatusUpdate>(new JobStatusUpdate(
c.Saga.CorrelationId, null, "NovelImport",
$"Import {c.Saga.NovelUrl}", JobStatus.Completed,
null, new Dictionary<string, string> { ["NovelId"] = c.Saga.NovelId.ToString() })))),
When(FileUploadStatusUpdate)
.Then(ctx => ctx.Saga.CompletedImages++)
.If(ctx => IsComplete(ctx.Saga), ctx => ctx
.Then(c => c.Saga.CompletedAt = _clock.GetCurrentInstant())
.TransitionTo(Completed)
.PublishAsync(c => c.Init<INovelImportCompleted>(new NovelImportCompleted(
c.Saga.CorrelationId,
c.Saga.NovelId,
true,
null)))
.PublishAsync(c => c.Init<IJobStatusUpdate>(new JobStatusUpdate(
c.Saga.CorrelationId, null, "NovelImport",
$"Import {c.Saga.NovelUrl}", JobStatus.Completed,
null, new Dictionary<string, string> { ["NovelId"] = c.Saga.NovelId.ToString() })))),
When(ChapterPullFaulted)
.Then(ctx =>
{
ctx.Saga.ErrorMessage = ctx.Message.Exceptions.FirstOrDefault()?.Message;
ctx.Saga.CompletedAt = _clock.GetCurrentInstant();
})
.TransitionTo(Failed)
.PublishAsync(ctx => ctx.Init<INovelImportCompleted>(new NovelImportCompleted(
ctx.Saga.CorrelationId,
ctx.Saga.NovelId,
false,
ctx.Saga.ErrorMessage)))
.PublishAsync(ctx => ctx.Init<IJobStatusUpdate>(new JobStatusUpdate(
ctx.Saga.CorrelationId, null, "NovelImport",
$"Import {ctx.Saga.NovelUrl}", JobStatus.Failed,
ctx.Saga.ErrorMessage, null))),
When(FileUploadFaulted)
.Then(ctx =>
{
ctx.Saga.ErrorMessage = ctx.Message.Exceptions.FirstOrDefault()?.Message;
ctx.Saga.CompletedAt = _clock.GetCurrentInstant();
})
.TransitionTo(Failed)
.PublishAsync(ctx => ctx.Init<INovelImportCompleted>(new NovelImportCompleted(
ctx.Saga.CorrelationId,
ctx.Saga.NovelId,
false,
ctx.Saga.ErrorMessage)))
.PublishAsync(ctx => ctx.Init<IJobStatusUpdate>(new JobStatusUpdate(
ctx.Saga.CorrelationId, null, "NovelImport",
$"Import {ctx.Saga.NovelUrl}", JobStatus.Failed,
ctx.Saga.ErrorMessage, null)))
);
SetCompletedWhenFinalized();
}
private static bool IsComplete(NovelImportSagaState saga) =>
saga.CompletedChapters >= saga.ExpectedChapters &&
saga.CompletedImages >= saga.ExpectedImages;
}

View File

@@ -0,0 +1,29 @@
using MassTransit;
using NodaTime;
namespace FictionArchive.Service.NovelService.Sagas;
public class NovelImportSagaState : SagaStateMachineInstance
{
public Guid CorrelationId { get; set; }
public string CurrentState { get; set; } = null!;
// Identity
public string NovelUrl { get; set; } = null!;
public uint? NovelId { get; set; }
// Chapter tracking
public int ExpectedChapters { get; set; }
public int CompletedChapters { get; set; }
// Image tracking
public int ExpectedImages { get; set; }
public int CompletedImages { get; set; }
// Timestamps
public Instant StartedAt { get; set; }
public Instant? CompletedAt { get; set; }
// Error info
public string? ErrorMessage { get; set; }
}

View File

@@ -1,19 +0,0 @@
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Services.EventHandlers;
public class ChapterPullRequestedEventHandler : IIntegrationEventHandler<ChapterPullRequestedEvent>
{
private readonly NovelUpdateService _novelUpdateService;
public ChapterPullRequestedEventHandler(NovelUpdateService novelUpdateService)
{
_novelUpdateService = novelUpdateService;
}
public async Task Handle(ChapterPullRequestedEvent @event)
{
await _novelUpdateService.PullChapterContents(@event.NovelId, @event.ChapterNumber);
}
}

View File

@@ -1,39 +0,0 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Services.EventHandlers;
public class FileUploadRequestStatusUpdateEventHandler : IIntegrationEventHandler<FileUploadRequestStatusUpdateEvent>
{
private readonly ILogger<FileUploadRequestStatusUpdateEventHandler> _logger;
private readonly NovelServiceDbContext _context;
private readonly NovelUpdateService _novelUpdateService;
public FileUploadRequestStatusUpdateEventHandler(ILogger<FileUploadRequestStatusUpdateEventHandler> logger, NovelServiceDbContext context, NovelUpdateService novelUpdateService)
{
_logger = logger;
_context = context;
_novelUpdateService = novelUpdateService;
}
public async Task Handle(FileUploadRequestStatusUpdateEvent @event)
{
var image = await _context.Images.FindAsync(@event.RequestId);
if (image == null)
{
// Not a request we care about.
return;
}
if (@event.Status == RequestStatus.Failed)
{
_logger.LogError("Image upload failed for image with id {imageId}", image.Id);
return;
}
else if (@event.Status == RequestStatus.Success)
{
_logger.LogInformation("Image upload succeeded for image with id {imageId}", image.Id);
await _novelUpdateService.UpdateImage(image.Id, @event.FileAccessUrl);
}
}
}

View File

@@ -1,23 +0,0 @@
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.NovelService.Services.EventHandlers;
public class NovelUpdateRequestedEventHandler : IIntegrationEventHandler<NovelUpdateRequestedEvent>
{
private readonly ILogger<NovelUpdateRequestedEventHandler> _logger;
private readonly IEventBus _eventBus;
private readonly NovelUpdateService _novelUpdateService;
public NovelUpdateRequestedEventHandler(ILogger<NovelUpdateRequestedEventHandler> logger, IEventBus eventBus, NovelUpdateService novelUpdateService)
{
_logger = logger;
_eventBus = eventBus;
_novelUpdateService = novelUpdateService;
}
public async Task Handle(NovelUpdateRequestedEvent @event)
{
await _novelUpdateService.ImportNovel(@event.NovelUrl);
}
}

View File

@@ -1,39 +0,0 @@
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
using FictionArchive.Service.NovelService.Models.Localization;
using FictionArchive.Service.Shared.Services.EventBus;
using Microsoft.EntityFrameworkCore;
namespace FictionArchive.Service.NovelService.Services.EventHandlers;
public class TranslationRequestCompletedEventHandler : IIntegrationEventHandler<TranslationRequestCompletedEvent>
{
private readonly ILogger<TranslationRequestCompletedEventHandler> _logger;
private readonly NovelServiceDbContext _dbContext;
public TranslationRequestCompletedEventHandler(ILogger<TranslationRequestCompletedEventHandler> logger, NovelServiceDbContext dbContext)
{
_logger = logger;
_dbContext = dbContext;
}
public async Task Handle(TranslationRequestCompletedEvent @event)
{
var localizationRequest = await _dbContext.LocalizationRequests.Include(r => r.KeyRequestedForTranslation)
.ThenInclude(lk => lk.Texts)
.FirstOrDefaultAsync(lk => lk.Id == @event.TranslationRequestId);
if (localizationRequest == null)
{
// Not one of our requests, discard it
return;
}
localizationRequest.KeyRequestedForTranslation.Texts.Add(new LocalizationText()
{
Language = localizationRequest.TranslateTo,
Text = @event.TranslatedText,
TranslationEngine = localizationRequest.Engine
});
_dbContext.LocalizationRequests.Remove(localizationRequest);
await _dbContext.SaveChangesAsync();
}
}

View File

@@ -1,6 +1,8 @@
using FictionArchive.Service.NovelService.Models;
using FictionArchive.Service.NovelService.Models.Images;
using FictionArchive.Service.NovelService.Models.Localization;
using FictionArchive.Service.NovelService.Models.Novels;
using FictionArchive.Service.NovelService.Sagas;
using FictionArchive.Service.Shared.Services.Database;
using Microsoft.EntityFrameworkCore;
@@ -10,6 +12,7 @@ public class NovelServiceDbContext(DbContextOptions options, ILogger<NovelServic
: FictionArchiveDbContext(options, logger)
{
public DbSet<Novel> Novels { get; set; }
public DbSet<Volume> Volumes { get; set; }
public DbSet<Chapter> Chapters { get; set; }
public DbSet<Source> Sources { get; set; }
public DbSet<TranslationEngine> TranslationEngines { get; set; }
@@ -17,6 +20,8 @@ public class NovelServiceDbContext(DbContextOptions options, ILogger<NovelServic
public DbSet<LocalizationKey> LocalizationKeys { get; set; }
public DbSet<LocalizationRequest> LocalizationRequests { get; set; }
public DbSet<Image> Images { get; set; }
public DbSet<ActiveImport> ActiveImports { get; set; }
public DbSet<NovelImportSagaState> NovelImportSagaStates { get; set; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
@@ -25,5 +30,28 @@ public class NovelServiceDbContext(DbContextOptions options, ILogger<NovelServic
modelBuilder.Entity<Novel>()
.HasIndex("ExternalId", "SourceId")
.IsUnique();
// Volume.Order is unique per Novel
modelBuilder.Entity<Volume>()
.HasIndex("NovelId", "Order")
.IsUnique();
// Chapter.Order is unique per Volume
modelBuilder.Entity<Chapter>()
.HasIndex("VolumeId", "Order")
.IsUnique();
modelBuilder.Entity<ActiveImport>(entity =>
{
entity.HasKey(e => e.ImportId);
entity.HasIndex(e => e.NovelUrl).IsUnique();
});
modelBuilder.Entity<NovelImportSagaState>(entity =>
{
entity.HasKey(e => e.CorrelationId);
entity.HasIndex(e => e.NovelUrl);
entity.HasIndex(e => e.CurrentState);
});
}
}

View File

@@ -1,17 +1,19 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.FileService.IntegrationEvents;
using FictionArchive.Service.NovelService.Contracts;
using FictionArchive.Service.NovelService.Models;
using FictionArchive.Service.NovelService.Models.Configuration;
using FictionArchive.Service.NovelService.Models.Enums;
using FictionArchive.Service.NovelService.Models.Images;
using FictionArchive.Service.NovelService.Models.IntegrationEvents;
using FictionArchive.Service.NovelService.Models.Localization;
using FictionArchive.Service.NovelService.Models.Novels;
using FictionArchive.Service.NovelService.Models.SourceAdapters;
using FictionArchive.Service.NovelService.Services.SourceAdapters;
using FictionArchive.Service.Shared.Services.EventBus;
using FictionArchive.Service.Shared.Contracts.Events;
using HtmlAgilityPack;
using MassTransit;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Options;
using NodaTime;
namespace FictionArchive.Service.NovelService.Services;
@@ -20,16 +22,18 @@ public class NovelUpdateService
private readonly NovelServiceDbContext _dbContext;
private readonly ILogger<NovelUpdateService> _logger;
private readonly IEnumerable<ISourceAdapter> _sourceAdapters;
private readonly IEventBus _eventBus;
private readonly IPublishEndpoint _publishEndpoint;
private readonly NovelUpdateServiceConfiguration _novelUpdateServiceConfiguration;
private readonly IClock _clock;
public NovelUpdateService(NovelServiceDbContext dbContext, ILogger<NovelUpdateService> logger, IEnumerable<ISourceAdapter> sourceAdapters, IEventBus eventBus, IOptions<NovelUpdateServiceConfiguration> novelUpdateServiceConfiguration)
public NovelUpdateService(NovelServiceDbContext dbContext, ILogger<NovelUpdateService> logger, IEnumerable<ISourceAdapter> sourceAdapters, IPublishEndpoint publishEndpoint, IOptions<NovelUpdateServiceConfiguration> novelUpdateServiceConfiguration, IClock clock)
{
_dbContext = dbContext;
_logger = logger;
_sourceAdapters = sourceAdapters;
_eventBus = eventBus;
_publishEndpoint = publishEndpoint;
_novelUpdateServiceConfiguration = novelUpdateServiceConfiguration.Value;
_clock = clock;
}
#region Helper Methods
@@ -190,6 +194,48 @@ public class NovelUpdateService
return existingChapters.Concat(newChapters).ToList();
}
private static List<Volume> SynchronizeVolumes(
List<VolumeMetadata> metadataVolumes,
Language rawLanguage,
List<Volume>? existingVolumes)
{
existingVolumes ??= new List<Volume>();
var result = new List<Volume>();
foreach (var metaVolume in metadataVolumes)
{
// Match volumes by Order (unique per novel)
var existingVolume = existingVolumes.FirstOrDefault(v => v.Order == metaVolume.Order);
if (existingVolume != null)
{
// Volume exists - sync its chapters
existingVolume.Chapters = SynchronizeChapters(
metaVolume.Chapters,
rawLanguage,
existingVolume.Chapters);
result.Add(existingVolume);
}
else
{
// New volume - create it with synced chapters
var newVolume = new Volume
{
Order = metaVolume.Order,
Name = LocalizationKey.CreateFromText(metaVolume.Name, rawLanguage),
Chapters = SynchronizeChapters(metaVolume.Chapters, rawLanguage, null)
};
result.Add(newVolume);
}
}
// Keep existing volumes not in metadata (user-created volumes)
var metaOrders = metadataVolumes.Select(v => v.Order).ToHashSet();
result.AddRange(existingVolumes.Where(v => !metaOrders.Contains(v.Order)));
return result;
}
private static (Image? image, bool shouldPublishEvent) HandleCoverImage(
ImageData? newCoverData,
Image? existingCoverImage)
@@ -232,7 +278,7 @@ public class NovelUpdateService
metadata.SystemTags,
metadata.RawLanguage);
var chapters = SynchronizeChapters(metadata.Chapters, metadata.RawLanguage, null);
var volumes = SynchronizeVolumes(metadata.Volumes, metadata.RawLanguage, null);
var novel = new Novel
{
@@ -243,7 +289,7 @@ public class NovelUpdateService
CoverImage = metadata.CoverImage != null
? new Image { OriginalPath = metadata.CoverImage.Url }
: null,
Chapters = chapters,
Volumes = volumes,
Description = LocalizationKey.CreateFromText(metadata.Description, metadata.RawLanguage),
Name = LocalizationKey.CreateFromText(metadata.Name, metadata.RawLanguage),
RawStatus = metadata.RawStatus,
@@ -257,7 +303,7 @@ public class NovelUpdateService
#endregion
public async Task<Novel> ImportNovel(string novelUrl)
public async Task<Novel> ImportNovel(Guid importId, string novelUrl)
{
// Step 1: Get metadata from source adapter
NovelMetadata? metadata = null;
@@ -281,16 +327,20 @@ public class NovelUpdateService
// Step 3: Check for existing novel by ExternalId + Source.Key
var existingNovel = await _dbContext.Novels
.Include(n => n.Author)
.ThenInclude(a => a.Name)
.ThenInclude(lk => lk.Texts)
.ThenInclude(a => a.Name)
.ThenInclude(lk => lk.Texts)
.Include(n => n.Source)
.Include(n => n.Name)
.ThenInclude(lk => lk.Texts)
.ThenInclude(lk => lk.Texts)
.Include(n => n.Description)
.ThenInclude(lk => lk.Texts)
.ThenInclude(lk => lk.Texts)
.Include(n => n.Tags)
.Include(n => n.Chapters)
.Include(n => n.CoverImage)
.Include(n => n.Volumes)
.ThenInclude(volume => volume.Chapters)
.ThenInclude(chapter => chapter.Body)
.ThenInclude(localizationKey => localizationKey.Texts)
.Include(n => n.CoverImage).Include(novel => novel.Volumes).ThenInclude(volume => volume.Chapters)
.ThenInclude(chapter => chapter.Name)
.FirstOrDefaultAsync(n =>
n.ExternalId == metadata.ExternalId &&
n.Source.Key == metadata.SourceDescriptor.Key);
@@ -298,6 +348,12 @@ public class NovelUpdateService
Novel novel;
bool shouldPublishCoverEvent;
// Capture existing chapter IDs to detect new chapters later
var existingChapterIds = existingNovel?.Volumes
.SelectMany(v => v.Chapters)
.Select(c => c.Id)
.ToHashSet() ?? new HashSet<uint>();
if (existingNovel == null)
{
// CREATE PATH: New novel
@@ -325,11 +381,11 @@ public class NovelUpdateService
metadata.SystemTags,
metadata.RawLanguage);
// Synchronize chapters (add only)
novel.Chapters = SynchronizeChapters(
metadata.Chapters,
// Synchronize volumes (and their chapters)
novel.Volumes = SynchronizeVolumes(
metadata.Volumes,
metadata.RawLanguage,
existingNovel.Chapters);
existingNovel.Volumes);
// Handle cover image
(novel.CoverImage, shouldPublishCoverEvent) = HandleCoverImage(
@@ -339,51 +395,111 @@ public class NovelUpdateService
await _dbContext.SaveChangesAsync();
// Publish cover image event if needed
if (shouldPublishCoverEvent && novel.CoverImage != null && metadata.CoverImage != null)
// Publish novel created event for new novels
if (existingNovel == null)
{
await _eventBus.Publish(new FileUploadRequestCreatedEvent
{
RequestId = novel.CoverImage.Id,
FileData = metadata.CoverImage.Data,
FilePath = $"Novels/{novel.Id}/Images/cover.jpg"
});
await _publishEndpoint.Publish<INovelCreated>(new NovelCreated(
novel.Id,
novel.Name.Texts.First(t => t.Language == novel.RawLanguage).Text,
novel.RawLanguage,
novel.Source.Key,
novel.Author.Name.Texts.First(t => t.Language == novel.RawLanguage).Text));
}
// Publish chapter pull events for chapters without body content
var chaptersNeedingPull = novel.Chapters
// Publish chapter created events for new chapters
foreach (var volume in novel.Volumes)
{
foreach (var chapter in volume.Chapters.Where(c => !existingChapterIds.Contains(c.Id)))
{
await _publishEndpoint.Publish<IChapterCreated>(new ChapterCreated(
chapter.Id,
novel.Id,
volume.Id,
(uint)volume.Order,
chapter.Order,
chapter.Name.Texts.First(t => t.Language == novel.RawLanguage).Text));
}
}
// Count chapters that need pulling
var chaptersNeedingPull = novel.Volumes
.SelectMany(v => v.Chapters)
.Where(c => c.Body?.Texts == null || !c.Body.Texts.Any())
.ToList();
foreach (var chapter in chaptersNeedingPull)
var hasCoverToUpload = shouldPublishCoverEvent && novel.CoverImage != null && metadata.CoverImage != null;
// Publish metadata imported event for saga
await _publishEndpoint.Publish<INovelMetadataImported>(new NovelMetadataImported(
importId,
novel.Id,
chaptersNeedingPull.Count,
hasCoverToUpload
));
// Publish cover image event if needed
if (hasCoverToUpload)
{
await _eventBus.Publish(new ChapterPullRequestedEvent
await _publishEndpoint.Publish<IFileUploadRequestCreated>(new FileUploadRequestCreated(
importId,
novel.CoverImage.Id,
$"Novels/{novel.Id}/Images/cover.jpg",
metadata.CoverImage.Data));
}
// Publish chapter pull events for chapters without body content
foreach (var volume in novel.Volumes)
{
var volumeChaptersNeedingPull = volume.Chapters
.Where(c => c.Body?.Texts == null || !c.Body.Texts.Any())
.ToList();
foreach (var chapter in volumeChaptersNeedingPull)
{
NovelId = novel.Id,
ChapterNumber = chapter.Order
});
await _publishEndpoint.Publish<IChapterPullRequested>(new ChapterPullRequested(
importId,
novel.Id,
volume.Id,
chapter.Order));
}
}
return novel;
}
public async Task<Chapter> PullChapterContents(uint novelId, uint chapterNumber)
public async Task<(Chapter chapter, int imageCount)> PullChapterContents(Guid importId, uint novelId, uint volumeId, uint chapterOrder)
{
var novel = await _dbContext.Novels.Where(novel => novel.Id == novelId)
.Include(novel => novel.Chapters)
.Include(novel => novel.Volumes)
.ThenInclude(volume => volume.Chapters)
.ThenInclude(chapter => chapter.Body)
.ThenInclude(body => body.Texts)
.Include(novel => novel.Source).Include(novel => novel.Chapters).ThenInclude(chapter => chapter.Images)
.Include(novel => novel.Source)
.Include(novel => novel.Volumes)
.ThenInclude(volume => volume.Chapters)
.ThenInclude(chapter => chapter.Images)
.FirstOrDefaultAsync();
var chapter = novel.Chapters.Where(chapter => chapter.Order == chapterNumber).FirstOrDefault();
var volume = novel.Volumes.FirstOrDefault(v => v.Id == volumeId);
var chapter = volume.Chapters.FirstOrDefault(c => c.Order == chapterOrder);
var adapter = _sourceAdapters.FirstOrDefault(adapter => adapter.SourceDescriptor.Key == novel.Source.Key);
var rawChapter = await adapter.GetRawChapter(chapter.Url);
var localizationText = new LocalizationText()
// If we already have the raw for this, overwrite it for now. Revisions will come later.
var localizationText = chapter.Body.Texts.FirstOrDefault(text => text.Language == novel.RawLanguage);
if (localizationText == null)
{
Text = rawChapter.Text,
Language = novel.RawLanguage
};
chapter.Body.Texts.Add(localizationText);
localizationText = new LocalizationText()
{
Text = rawChapter.Text,
Language = novel.RawLanguage
};
chapter.Body.Texts.Add(localizationText);
}
else
{
localizationText.Text = rawChapter.Text;
}
chapter.Images = rawChapter.ImageData.Select(img => new Image()
{
OriginalPath = img.Url
@@ -417,15 +533,14 @@ public class NovelUpdateService
foreach (var image in chapter.Images)
{
var data = rawChapter.ImageData.FirstOrDefault(img => img.Url == image.OriginalPath);
await _eventBus.Publish(new FileUploadRequestCreatedEvent()
{
FileData = data.Data,
FilePath = $"{novel.Id}/Images/Chapter-{chapter.Id}/{imgCount++}.jpg",
RequestId = image.Id
});
await _publishEndpoint.Publish<IFileUploadRequestCreated>(new FileUploadRequestCreated(
importId,
image.Id,
$"Novels/{novel.Id}/Images/Chapter-{chapter.Id}/{imgCount++}.jpg",
data.Data));
}
return chapter;
return (chapter, chapter.Images.Count);
}
public async Task UpdateImage(Guid imageId, string newUrl)
@@ -456,24 +571,92 @@ public class NovelUpdateService
await _dbContext.SaveChangesAsync();
}
public async Task<NovelUpdateRequestedEvent> QueueNovelImport(string novelUrl)
public async Task<ImportNovelResult> QueueNovelImport(string novelUrl)
{
var importNovelRequestEvent = new NovelUpdateRequestedEvent()
var importId = Guid.NewGuid();
var activeImport = new ActiveImport
{
NovelUrl = novelUrl
ImportId = importId,
NovelUrl = novelUrl,
StartedAt = _clock.GetCurrentInstant()
};
await _eventBus.Publish(importNovelRequestEvent);
return importNovelRequestEvent;
try
{
await _dbContext.ActiveImports.AddAsync(activeImport);
await _dbContext.SaveChangesAsync();
}
catch (DbUpdateException)
{
throw new InvalidOperationException($"An import is already in progress for {novelUrl}");
}
var importNovelRequestEvent = new NovelImportRequested(importId, novelUrl);
await _publishEndpoint.Publish<INovelImportRequested>(importNovelRequestEvent);
return new ImportNovelResult(importId, novelUrl);
}
public async Task<ChapterPullRequestedEvent> QueueChapterPull(uint novelId, uint chapterNumber)
public async Task<ChapterPullRequested> QueueChapterPull(Guid importId, uint novelId, uint volumeId, uint chapterOrder)
{
var chapterPullEvent = new ChapterPullRequestedEvent()
{
NovelId = novelId,
ChapterNumber = chapterNumber
};
await _eventBus.Publish(chapterPullEvent);
var chapterPullEvent = new ChapterPullRequested(importId, novelId, volumeId, chapterOrder);
await _publishEndpoint.Publish<IChapterPullRequested>(chapterPullEvent);
return chapterPullEvent;
}
public async Task DeleteNovel(uint novelId)
{
var novel = await _dbContext.Novels
.Include(n => n.CoverImage)
.Include(n => n.Name).ThenInclude(k => k.Texts)
.Include(n => n.Description).ThenInclude(k => k.Texts)
.Include(n => n.Volumes).ThenInclude(v => v.Name).ThenInclude(k => k.Texts)
.Include(n => n.Volumes).ThenInclude(v => v.Chapters).ThenInclude(c => c.Images)
.Include(n => n.Volumes).ThenInclude(v => v.Chapters).ThenInclude(c => c.Name).ThenInclude(k => k.Texts)
.Include(n => n.Volumes).ThenInclude(v => v.Chapters).ThenInclude(c => c.Body).ThenInclude(k => k.Texts)
.FirstOrDefaultAsync(n => n.Id == novelId);
if (novel == null)
throw new KeyNotFoundException($"Novel with ID '{novelId}' not found");
// Collect all LocalizationKey IDs for cleanup
var locKeyIds = new List<Guid> { novel.Name.Id, novel.Description.Id };
foreach (var volume in novel.Volumes)
{
locKeyIds.Add(volume.Name.Id);
locKeyIds.AddRange(volume.Chapters.Select(c => c.Name.Id));
locKeyIds.AddRange(volume.Chapters.Select(c => c.Body.Id));
}
// 1. Remove LocalizationRequests referencing these keys
var locRequests = await _dbContext.LocalizationRequests
.Where(r => locKeyIds.Contains(r.KeyRequestedForTranslation.Id))
.ToListAsync();
_dbContext.LocalizationRequests.RemoveRange(locRequests);
// 2. Remove LocalizationTexts (NO ACTION FK - won't cascade)
_dbContext.RemoveRange(novel.Name.Texts);
_dbContext.RemoveRange(novel.Description.Texts);
foreach (var volume in novel.Volumes)
{
_dbContext.RemoveRange(volume.Name.Texts);
foreach (var chapter in volume.Chapters)
{
_dbContext.RemoveRange(chapter.Name.Texts);
_dbContext.RemoveRange(chapter.Body.Texts);
}
}
// 3. Remove Images (NO ACTION FK - won't cascade)
if (novel.CoverImage != null)
_dbContext.Images.Remove(novel.CoverImage);
foreach (var volume in novel.Volumes)
{
foreach (var chapter in volume.Chapters)
_dbContext.Images.RemoveRange(chapter.Images);
}
// 4. Remove novel - cascades: volumes, chapters, localization keys, tag mappings
_dbContext.Novels.Remove(novel);
await _dbContext.SaveChangesAsync();
}
}

View File

@@ -66,7 +66,7 @@ public class NovelpiaAdapter : ISourceAdapter
ExternalId = novelId.ToString(),
SystemTags = new List<string>(),
SourceTags = new List<string>(),
Chapters = new List<ChapterMetadata>(),
Volumes = new List<VolumeMetadata>(),
SourceDescriptor = SourceDescriptor
};
@@ -133,6 +133,9 @@ public class NovelpiaAdapter : ISourceAdapter
novel.SourceTags.Add(tag);
}
// Author's posts (from notice_table in the page HTML)
var authorsPosts = ParseAuthorsPosts(novelData);
// Chapters
uint page = 0;
List<ChapterMetadata> chapters = new List<ChapterMetadata>();
@@ -168,7 +171,25 @@ public class NovelpiaAdapter : ISourceAdapter
}
page++;
}
novel.Chapters = chapters;
// Add Author's Posts volume if there are any
if (authorsPosts.Count > 0)
{
novel.Volumes.Add(new VolumeMetadata
{
Order = 0,
Name = "Author's Posts",
Chapters = authorsPosts
});
}
// Main Story volume
novel.Volumes.Add(new VolumeMetadata
{
Order = 1,
Name = "Main Story",
Chapters = chapters
});
return novel;
}
@@ -241,4 +262,40 @@ public class NovelpiaAdapter : ISourceAdapter
}
return await image.Content.ReadAsByteArrayAsync();
}
private List<ChapterMetadata> ParseAuthorsPosts(string novelHtml)
{
var posts = new List<ChapterMetadata>();
// Find the notice_table section
var noticeTableMatch = Regex.Match(novelHtml,
@"(?s)<table[^>]*class=""notice_table[^""]*""[^>]*>(.*?)</table>");
if (!noticeTableMatch.Success)
return posts;
var tableContent = noticeTableMatch.Groups[1].Value;
// Find all td elements with onclick containing viewer URL and extract title from <b>
// HTML structure: <td ... onclick="...location='/viewer/3330612';"><b>Title</b>
var postMatches = Regex.Matches(tableContent,
@"onclick=""[^""]*location='/viewer/(\d+)'[^""]*""[^>]*><b>([^<]+)</b>");
uint order = 1;
foreach (Match match in postMatches)
{
string viewerId = match.Groups[1].Value;
string title = WebUtility.HtmlDecode(match.Groups[2].Value.Trim());
posts.Add(new ChapterMetadata
{
Revision = 0,
Order = order,
Url = $"https://novelpia.com/viewer/{viewerId}",
Name = title
});
order++;
}
return posts;
}
}

View File

@@ -2,7 +2,8 @@
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore": "Warning"
}
},
"Novelpia": {

View File

@@ -0,0 +1,233 @@
using System.Text.Json;
using FictionArchive.Common.Enums;
using FictionArchive.Service.ReportingService.Consumers;
using FictionArchive.Service.ReportingService.Models;
using FictionArchive.Service.ReportingService.Services;
using FictionArchive.Service.Shared.Contracts.Events;
using FluentAssertions;
using MassTransit;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;
using Xunit;
namespace FictionArchive.Service.ReportingService.Tests.Consumers;
public class JobStatusUpdateConsumerTests : IDisposable
{
private readonly ReportingDbContext _dbContext;
private readonly JobStatusUpdateConsumer _consumer;
public JobStatusUpdateConsumerTests()
{
var options = new DbContextOptionsBuilder<ReportingDbContext>()
.UseInMemoryDatabase(databaseName: Guid.NewGuid().ToString())
.Options;
_dbContext = new TestReportingDbContext(options, NullLogger<ReportingDbContext>.Instance);
_consumer = new JobStatusUpdateConsumer(
NullLogger<JobStatusUpdateConsumer>.Instance,
_dbContext);
}
[Fact]
public async Task Should_create_new_job_on_first_event()
{
var jobId = Guid.NewGuid();
var context = CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job display",
JobStatus.InProgress, null, new() { ["key1"] = "value1" }));
await _consumer.Consume(context);
var job = await _dbContext.Jobs.FindAsync(jobId);
job.Should().NotBeNull();
job!.JobType.Should().Be("TestJob");
job.DisplayName.Should().Be("Test job display");
job.Status.Should().Be(JobStatus.InProgress);
job.Metadata.Should().ContainKey("key1").WhoseValue.Should().Be("value1");
}
[Fact]
public async Task Should_update_status_on_subsequent_event()
{
var jobId = Guid.NewGuid();
// First event: create
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.InProgress, null, null)));
// Second event: update
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.Completed, null, null)));
var job = await _dbContext.Jobs.FindAsync(jobId);
job!.Status.Should().Be(JobStatus.Completed);
}
[Fact]
public async Task Should_merge_metadata_on_update()
{
var jobId = Guid.NewGuid();
// First event with initial metadata
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.InProgress, null, new() { ["NovelId"] = "42" })));
// Second event with additional metadata
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.Completed, null, new() { ["ChapterId"] = "7" })));
var job = await _dbContext.Jobs.FindAsync(jobId);
job!.Metadata.Should().ContainKey("NovelId").WhoseValue.Should().Be("42");
job.Metadata.Should().ContainKey("ChapterId").WhoseValue.Should().Be("7");
}
[Fact]
public async Task Should_not_overwrite_job_type_on_update()
{
var jobId = Guid.NewGuid();
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "OriginalType", "Test job",
JobStatus.InProgress, null, null)));
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "DifferentType", "Test job",
JobStatus.Completed, null, null)));
var job = await _dbContext.Jobs.FindAsync(jobId);
job!.JobType.Should().Be("OriginalType");
}
[Fact]
public async Task Should_not_overwrite_parent_job_id_on_update()
{
var jobId = Guid.NewGuid();
var parentId = Guid.NewGuid();
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, parentId, "TestJob", "Test job",
JobStatus.InProgress, null, null)));
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.Completed, null, null)));
var job = await _dbContext.Jobs.FindAsync(jobId);
job!.ParentJobId.Should().Be(parentId);
}
[Fact]
public async Task Should_set_error_message_on_failure()
{
var jobId = Guid.NewGuid();
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.Failed, "Something went wrong", null)));
var job = await _dbContext.Jobs.FindAsync(jobId);
job!.Status.Should().Be(JobStatus.Failed);
job.ErrorMessage.Should().Be("Something went wrong");
}
[Fact]
public async Task Should_store_parent_job_id()
{
var parentId = Guid.NewGuid();
var childId = Guid.NewGuid();
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
parentId, null, "ParentJob", "Parent",
JobStatus.InProgress, null, null)));
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
childId, parentId, "ChildJob", "Child",
JobStatus.InProgress, null, null)));
var child = await _dbContext.Jobs.FindAsync(childId);
child!.ParentJobId.Should().Be(parentId);
}
[Fact]
public async Task Should_handle_null_metadata_on_create()
{
var jobId = Guid.NewGuid();
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.InProgress, null, null)));
var job = await _dbContext.Jobs.FindAsync(jobId);
job!.Metadata.Should().BeNull();
}
[Fact]
public async Task Should_add_metadata_to_job_with_null_metadata()
{
var jobId = Guid.NewGuid();
// First event: no metadata
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.InProgress, null, null)));
// Second event: adds metadata
await _consumer.Consume(CreateConsumeContext(new JobStatusUpdate(
jobId, null, "TestJob", "Test job",
JobStatus.Completed, null, new() { ["result"] = "success" })));
var job = await _dbContext.Jobs.FindAsync(jobId);
job!.Metadata.Should().ContainKey("result").WhoseValue.Should().Be("success");
}
private static ConsumeContext<IJobStatusUpdate> CreateConsumeContext(JobStatusUpdate message)
{
var context = Substitute.For<ConsumeContext<IJobStatusUpdate>>();
context.Message.Returns(message);
return context;
}
public void Dispose()
{
_dbContext.Dispose();
}
/// <summary>
/// Test-specific subclass that adds a JSON value converter for Dictionary properties,
/// since the InMemory provider does not support the jsonb column type used in production.
/// </summary>
private class TestReportingDbContext : ReportingDbContext
{
public TestReportingDbContext(DbContextOptions options, ILogger<ReportingDbContext> logger)
: base(options, logger)
{
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.Entity<Job>(entity =>
{
entity.Property(j => j.Metadata)
.HasConversion(
v => v == null ? null : JsonSerializer.Serialize(v, (JsonSerializerOptions?)null),
v => v == null ? null : JsonSerializer.Deserialize<Dictionary<string, string>>(v, (JsonSerializerOptions?)null))
.HasColumnType(null!);
});
}
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
// Skip base OnConfiguring to avoid adding AuditInterceptor
// which is not needed for unit tests
}
}
}

View File

@@ -0,0 +1,32 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="MassTransit" Version="8.5.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.11" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="NodaTime.Testing" Version="3.3.0" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\FictionArchive.Service.ReportingService\FictionArchive.Service.ReportingService.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,66 @@
using FictionArchive.Service.ReportingService.Models;
using FictionArchive.Service.ReportingService.Services;
using FictionArchive.Service.Shared.Contracts.Events;
using MassTransit;
using Microsoft.EntityFrameworkCore;
namespace FictionArchive.Service.ReportingService.Consumers;
public class JobStatusUpdateConsumer : IConsumer<IJobStatusUpdate>
{
private readonly ILogger<JobStatusUpdateConsumer> _logger;
private readonly ReportingDbContext _dbContext;
public JobStatusUpdateConsumer(
ILogger<JobStatusUpdateConsumer> logger,
ReportingDbContext dbContext)
{
_logger = logger;
_dbContext = dbContext;
}
public async Task Consume(ConsumeContext<IJobStatusUpdate> context)
{
var message = context.Message;
var existingJob = await _dbContext.Jobs.FirstOrDefaultAsync(j => j.Id == message.JobId);
if (existingJob == null)
{
var job = new Job
{
Id = message.JobId,
ParentJobId = message.ParentJobId,
JobType = message.JobType,
DisplayName = message.DisplayName,
Status = message.Status,
ErrorMessage = message.ErrorMessage,
Metadata = message.Metadata != null
? new Dictionary<string, string>(message.Metadata)
: null
};
_dbContext.Jobs.Add(job);
_logger.LogInformation("Created job {JobId} of type {JobType}", message.JobId, message.JobType);
}
else
{
existingJob.Status = message.Status;
existingJob.DisplayName = message.DisplayName;
existingJob.ErrorMessage = message.ErrorMessage;
if (message.Metadata != null)
{
existingJob.Metadata ??= new Dictionary<string, string>();
foreach (var kvp in message.Metadata)
{
existingJob.Metadata[kvp.Key] = kvp.Value;
}
}
_logger.LogInformation("Updated job {JobId} to status {Status}", message.JobId, message.Status);
}
await _dbContext.SaveChangesAsync();
}
}

View File

@@ -0,0 +1,23 @@
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
USER $APP_UID
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["FictionArchive.Service.ReportingService/FictionArchive.Service.ReportingService.csproj", "FictionArchive.Service.ReportingService/"]
RUN dotnet restore "FictionArchive.Service.ReportingService/FictionArchive.Service.ReportingService.csproj"
COPY . .
WORKDIR "/src/FictionArchive.Service.ReportingService"
RUN dotnet build "./FictionArchive.Service.ReportingService.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "./FictionArchive.Service.ReportingService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "FictionArchive.Service.ReportingService.dll"]

View File

@@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="HotChocolate.AspNetCore" Version="15.1.11" />
<PackageReference Include="HotChocolate.Data.EntityFramework" Version="15.1.11" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.11">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Content Include="..\.dockerignore">
<Link>.dockerignore</Link>
</Content>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\FictionArchive.Service.Shared\FictionArchive.Service.Shared.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,26 @@
using FictionArchive.Service.ReportingService.Models;
using FictionArchive.Service.ReportingService.Services;
using HotChocolate.Authorization;
using HotChocolate.Data;
namespace FictionArchive.Service.ReportingService.GraphQL;
[QueryType]
public static class JobQueries
{
[UseProjection]
[Authorize]
[UseFirstOrDefault]
public static IQueryable<Job> GetJobById(
Guid jobId,
ReportingDbContext db)
=> db.Jobs.Where(j => j.Id == jobId);
[UsePaging]
[UseProjection]
[UseFiltering]
[UseSorting]
[Authorize]
public static IQueryable<Job> GetJobs(ReportingDbContext db)
=> db.Jobs;
}

View File

@@ -0,0 +1,86 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using FictionArchive.Service.ReportingService.Services;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace FictionArchive.Service.ReportingService.Migrations
{
[DbContext(typeof(ReportingDbContext))]
[Migration("20260130214338_InitialCreate")]
partial class InitialCreate
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("FictionArchive.Service.ReportingService.Models.Job", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("DisplayName")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ErrorMessage")
.HasColumnType("text");
b.Property<string>("JobType")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Dictionary<string, string>>("Metadata")
.HasColumnType("jsonb");
b.Property<Guid?>("ParentJobId")
.HasColumnType("uuid");
b.Property<int>("Status")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("ParentJobId");
b.ToTable("Jobs");
});
modelBuilder.Entity("FictionArchive.Service.ReportingService.Models.Job", b =>
{
b.HasOne("FictionArchive.Service.ReportingService.Models.Job", "ParentJob")
.WithMany("ChildJobs")
.HasForeignKey("ParentJobId")
.OnDelete(DeleteBehavior.SetNull);
b.Navigation("ParentJob");
});
modelBuilder.Entity("FictionArchive.Service.ReportingService.Models.Job", b =>
{
b.Navigation("ChildJobs");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Collections.Generic;
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
#nullable disable
namespace FictionArchive.Service.ReportingService.Migrations
{
/// <inheritdoc />
public partial class InitialCreate : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Jobs",
columns: table => new
{
Id = table.Column<Guid>(type: "uuid", nullable: false),
ParentJobId = table.Column<Guid>(type: "uuid", nullable: true),
JobType = table.Column<string>(type: "text", nullable: false),
DisplayName = table.Column<string>(type: "text", nullable: false),
Status = table.Column<int>(type: "integer", nullable: false),
ErrorMessage = table.Column<string>(type: "text", nullable: true),
Metadata = table.Column<Dictionary<string, string>>(type: "jsonb", nullable: true),
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Jobs", x => x.Id);
table.ForeignKey(
name: "FK_Jobs_Jobs_ParentJobId",
column: x => x.ParentJobId,
principalTable: "Jobs",
principalColumn: "Id",
onDelete: ReferentialAction.SetNull);
});
migrationBuilder.CreateIndex(
name: "IX_Jobs_ParentJobId",
table: "Jobs",
column: "ParentJobId");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "Jobs");
}
}
}

View File

@@ -0,0 +1,83 @@
// <auto-generated />
using System;
using System.Collections.Generic;
using FictionArchive.Service.ReportingService.Services;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace FictionArchive.Service.ReportingService.Migrations
{
[DbContext(typeof(ReportingDbContext))]
partial class ReportingDbContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("FictionArchive.Service.ReportingService.Models.Job", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("DisplayName")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ErrorMessage")
.HasColumnType("text");
b.Property<string>("JobType")
.IsRequired()
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Dictionary<string, string>>("Metadata")
.HasColumnType("jsonb");
b.Property<Guid?>("ParentJobId")
.HasColumnType("uuid");
b.Property<int>("Status")
.HasColumnType("integer");
b.HasKey("Id");
b.HasIndex("ParentJobId");
b.ToTable("Jobs");
});
modelBuilder.Entity("FictionArchive.Service.ReportingService.Models.Job", b =>
{
b.HasOne("FictionArchive.Service.ReportingService.Models.Job", "ParentJob")
.WithMany("ChildJobs")
.HasForeignKey("ParentJobId")
.OnDelete(DeleteBehavior.SetNull);
b.Navigation("ParentJob");
});
modelBuilder.Entity("FictionArchive.Service.ReportingService.Models.Job", b =>
{
b.Navigation("ChildJobs");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,18 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Models;
namespace FictionArchive.Service.ReportingService.Models;
public class Job : BaseEntity<Guid>
{
public Guid? ParentJobId { get; set; }
public string JobType { get; set; } = null!;
public string DisplayName { get; set; } = null!;
public JobStatus Status { get; set; }
public string? ErrorMessage { get; set; }
public Dictionary<string, string>? Metadata { get; set; }
// Navigation
public Job? ParentJob { get; set; }
public List<Job> ChildJobs { get; set; } = [];
}

View File

@@ -0,0 +1,79 @@
using FictionArchive.Common.Extensions;
using FictionArchive.Service.ReportingService.Consumers;
using FictionArchive.Service.ReportingService.Services;
using FictionArchive.Service.ReportingService.GraphQL;
using FictionArchive.Service.Shared;
using FictionArchive.Service.Shared.Extensions;
namespace FictionArchive.Service.ReportingService;
public class Program
{
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
var isSchemaExport = SchemaExportDetector.IsSchemaExportMode(args);
builder.AddLocalAppsettings();
builder.Services.AddHealthChecks();
#region MassTransit
if (!isSchemaExport)
{
builder.Services.AddFictionArchiveMassTransit(
builder.Configuration,
x =>
{
x.AddConsumer<JobStatusUpdateConsumer>();
});
}
#endregion
#region GraphQL
builder.Services.AddGraphQLServer()
.AddQueryConventions()
.AddTypeExtension(typeof(JobQueries))
.ApplySaneDefaults()
.AddAuthorization();
#endregion
#region Database
builder.Services.RegisterDbContext<ReportingDbContext>(
builder.Configuration.GetConnectionString("DefaultConnection"),
skipInfrastructure: isSchemaExport);
#endregion
// Authentication & Authorization
builder.Services.AddOidcAuthentication(builder.Configuration);
builder.Services.AddFictionArchiveAuthorization();
var app = builder.Build();
// Update database (skip in schema export mode)
if (!isSchemaExport)
{
using var scope = app.Services.CreateScope();
var dbContext = scope.ServiceProvider.GetRequiredService<ReportingDbContext>();
dbContext.UpdateDatabase();
}
app.UseHttpsRedirection();
app.MapHealthChecks("/healthz");
app.UseAuthentication();
app.UseAuthorization();
app.MapGraphQL();
app.RunWithGraphQLCommands(args);
}
}

View File

@@ -0,0 +1,23 @@
{
"$schema": "http://json.schemastore.org/launchsettings.json",
"profiles": {
"http": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"applicationUrl": "http://localhost:5140",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"https": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"applicationUrl": "https://localhost:7310;http://localhost:5140",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
}
}

View File

@@ -0,0 +1,32 @@
using FictionArchive.Service.ReportingService.Models;
using FictionArchive.Service.Shared.Services.Database;
using Microsoft.EntityFrameworkCore;
namespace FictionArchive.Service.ReportingService.Services;
public class ReportingDbContext : FictionArchiveDbContext
{
public DbSet<Job> Jobs { get; set; }
public ReportingDbContext(DbContextOptions options, ILogger<ReportingDbContext> logger) : base(options, logger)
{
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.Entity<Job>(entity =>
{
entity.HasIndex(j => j.ParentJobId);
entity.Property(j => j.Metadata)
.HasColumnType("jsonb");
entity.HasOne(j => j.ParentJob)
.WithMany(j => j.ChildJobs)
.HasForeignKey(j => j.ParentJobId)
.OnDelete(DeleteBehavior.SetNull);
});
}
}

View File

@@ -0,0 +1,27 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore": "Warning"
}
},
"ConnectionStrings": {
"DefaultConnection": "Host=localhost;Database=FictionArchive_Reporting;Username=postgres;password=postgres"
},
"RabbitMQ": {
"ConnectionString": "amqp://localhost",
"ClientIdentifier": "ReportingService"
},
"OIDC": {
"Authority": "https://auth.orfl.xyz/application/o/fiction-archive/",
"ClientId": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
"Audience": "ldi5IpEidq2WW0Ka1lehVskb2SOBjnYRaZCpEyBh",
"ValidIssuer": "https://auth.orfl.xyz/application/o/fiction-archive/",
"ValidateIssuer": true,
"ValidateAudience": true,
"ValidateLifetime": true,
"ValidateIssuerSigningKey": true
},
"AllowedHosts": "*"
}

View File

@@ -0,0 +1,6 @@
{
"subgraph": "Reporting",
"http": {
"baseAddress": "http://localhost:5140/graphql"
}
}

View File

@@ -1,4 +1,4 @@
using FictionArchive.Service.Shared.Services.EventBus;
using MassTransit;
using Newtonsoft.Json;
using Quartz;
@@ -6,15 +6,15 @@ namespace FictionArchive.Service.SchedulerService.Models.JobTemplates;
public class EventJobTemplate : IJob
{
private readonly IEventBus _eventBus;
private readonly IBus _bus;
private readonly ILogger<EventJobTemplate> _logger;
public const string EventTypeParameter = "RoutingKey";
public const string EventDataParameter = "MessageData";
public EventJobTemplate(IEventBus eventBus, ILogger<EventJobTemplate> logger)
public EventJobTemplate(IBus bus, ILogger<EventJobTemplate> logger)
{
_eventBus = eventBus;
_bus = bus;
_logger = logger;
}
@@ -25,7 +25,7 @@ public class EventJobTemplate : IJob
var eventData = context.MergedJobDataMap.GetString(EventDataParameter);
var eventType = context.MergedJobDataMap.GetString(EventTypeParameter);
var eventObject = JsonConvert.DeserializeObject(eventData);
await _eventBus.Publish(eventObject, eventType);
await _bus.Publish(eventObject);
}
catch (Exception ex)
{

View File

@@ -2,7 +2,6 @@ using FictionArchive.Service.SchedulerService.GraphQL;
using FictionArchive.Service.SchedulerService.Services;
using FictionArchive.Service.Shared;
using FictionArchive.Service.Shared.Extensions;
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
using Quartz;
using Quartz.Impl.AdoJobStore;
@@ -38,10 +37,7 @@ public class Program
if (!isSchemaExport)
{
builder.Services.AddRabbitMQ(opt =>
{
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
});
builder.Services.AddFictionArchiveMassTransit(builder.Configuration);
}
#endregion

View File

@@ -1,7 +1,6 @@
using System.Data;
using FictionArchive.Service.SchedulerService.Models;
using FictionArchive.Service.SchedulerService.Models.JobTemplates;
using FictionArchive.Service.Shared.Services.EventBus;
using Quartz;
using Quartz.Impl.Matchers;

View File

@@ -2,7 +2,8 @@
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore": "Warning"
}
},
"RabbitMQ": {

View File

@@ -0,0 +1,11 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface IChapterCreated
{
uint ChapterId { get; }
uint NovelId { get; }
uint VolumeId { get; }
uint VolumeOrder { get; }
uint ChapterOrder { get; }
string ChapterTitle { get; }
}

View File

@@ -0,0 +1,10 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface IChapterPullCompleted
{
Guid ImportId { get; }
uint ChapterId { get; }
int ImagesQueued { get; }
}
public record ChapterPullCompleted(Guid ImportId, uint ChapterId, int ImagesQueued) : IChapterPullCompleted;

View File

@@ -0,0 +1,11 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface IChapterPullRequested
{
Guid ImportId { get; }
uint NovelId { get; }
uint VolumeId { get; }
uint ChapterOrder { get; }
}
public record ChapterPullRequested(Guid ImportId, uint NovelId, uint VolumeId, uint ChapterOrder) : IChapterPullRequested;

View File

@@ -0,0 +1,11 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface IFileUploadRequestCreated
{
Guid? ImportId { get; }
Guid RequestId { get; }
string FilePath { get; }
byte[] FileData { get; }
}
public record FileUploadRequestCreated(Guid? ImportId, Guid RequestId, string FilePath, byte[] FileData) : IFileUploadRequestCreated;

View File

@@ -0,0 +1,14 @@
using FictionArchive.Common.Enums;
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface IFileUploadRequestStatusUpdate
{
Guid? ImportId { get; }
Guid RequestId { get; }
RequestStatus Status { get; }
string? FileAccessUrl { get; }
string? ErrorMessage { get; }
}
public record FileUploadRequestStatusUpdate(Guid? ImportId, Guid RequestId, RequestStatus Status, string? FileAccessUrl, string? ErrorMessage) : IFileUploadRequestStatusUpdate;

View File

@@ -0,0 +1,23 @@
using FictionArchive.Common.Enums;
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface IJobStatusUpdate
{
Guid JobId { get; }
Guid? ParentJobId { get; }
string JobType { get; }
string DisplayName { get; }
JobStatus Status { get; }
string? ErrorMessage { get; }
Dictionary<string, string>? Metadata { get; }
}
public record JobStatusUpdate(
Guid JobId,
Guid? ParentJobId,
string JobType,
string DisplayName,
JobStatus Status,
string? ErrorMessage,
Dictionary<string, string>? Metadata) : IJobStatusUpdate;

View File

@@ -0,0 +1,12 @@
using FictionArchive.Common.Enums;
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface INovelCreated
{
uint NovelId { get; }
string Title { get; }
Language OriginalLanguage { get; }
string Source { get; }
string AuthorName { get; }
}

View File

@@ -0,0 +1,11 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface INovelImportCompleted
{
Guid ImportId { get; }
uint? NovelId { get; }
bool Success { get; }
string? ErrorMessage { get; }
}
public record NovelImportCompleted(Guid ImportId, uint? NovelId, bool Success, string? ErrorMessage) : INovelImportCompleted;

View File

@@ -0,0 +1,9 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface INovelImportRequested
{
Guid ImportId { get; }
string NovelUrl { get; }
}
public record NovelImportRequested(Guid ImportId, string NovelUrl) : INovelImportRequested;

View File

@@ -0,0 +1,11 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface INovelMetadataImported
{
Guid ImportId { get; }
uint NovelId { get; }
int ChaptersPendingPull { get; }
bool CoverImageQueued { get; }
}
public record NovelMetadataImported(Guid ImportId, uint NovelId, int ChaptersPendingPull, bool CoverImageQueued) : INovelMetadataImported;

View File

@@ -0,0 +1,7 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface ITranslationRequestCompleted
{
Guid? TranslationRequestId { get; }
string? TranslatedText { get; }
}

View File

@@ -0,0 +1,12 @@
using FictionArchive.Common.Enums;
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface ITranslationRequestCreated
{
Guid TranslationRequestId { get; }
Language From { get; }
Language To { get; }
string Body { get; }
string TranslationEngineKey { get; }
}

View File

@@ -0,0 +1,12 @@
namespace FictionArchive.Service.Shared.Contracts.Events;
public interface IUserInvited
{
string InvitedUserId { get; }
string InvitedUsername { get; }
string InvitedEmail { get; }
string InvitedOAuthProviderId { get; }
string InviterId { get; }
string InviterUsername { get; }
string InviterOAuthProviderId { get; }
}

View File

@@ -6,6 +6,7 @@ using Microsoft.IdentityModel.Tokens;
using FictionArchive.Service.Shared.Constants;
using FictionArchive.Service.Shared.Models.Authentication;
using System.Linq;
using System.Security.Claims;
namespace FictionArchive.Service.Shared.Extensions;
@@ -78,7 +79,7 @@ public static class AuthenticationExtensions
logger.LogDebug(
"JWT token validated for subject: {Subject}",
context.Principal?.FindFirst("sub")?.Value ?? "unknown");
context.Principal?.FindFirst(ClaimTypes.NameIdentifier)?.Value ?? "unknown");
return existingEvents?.OnTokenValidated?.Invoke(context) ?? Task.CompletedTask;
}

View File

@@ -1,6 +1,7 @@
using FictionArchive.Service.Shared.Services.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Npgsql;
namespace FictionArchive.Service.Shared.Extensions;
@@ -21,9 +22,14 @@ public static class DatabaseExtensions
}
else
{
var dataSourceBuilder = new Npgsql.NpgsqlDataSourceBuilder(connectionString);
dataSourceBuilder.UseNodaTime();
dataSourceBuilder.UseJsonNet();
var dataSource = dataSourceBuilder.Build();
services.AddDbContext<TContext>(options =>
{
options.UseNpgsql(connectionString, o =>
options.UseNpgsql(dataSource, o =>
{
o.UseNodaTime();
});

View File

@@ -0,0 +1,20 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Contracts.Events;
using MassTransit;
namespace FictionArchive.Service.Shared.Extensions;
public static class JobStatusPublisher
{
public static Task ReportJobStatus(
this IPublishEndpoint endpoint,
Guid jobId,
string jobType,
string displayName,
JobStatus status,
Guid? parentJobId = null,
string? errorMessage = null,
Dictionary<string, string>? metadata = null)
=> endpoint.Publish<IJobStatusUpdate>(new JobStatusUpdate(
jobId, parentJobId, jobType, displayName, status, errorMessage, metadata));
}

View File

@@ -0,0 +1,118 @@
using System.Text.RegularExpressions;
using FictionArchive.Service.Shared.Services.Filters;
using MassTransit;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace FictionArchive.Service.Shared.Extensions;
public static class MassTransitExtensions
{
public static IServiceCollection AddFictionArchiveMassTransit(
this IServiceCollection services,
IConfiguration configuration,
Action<IBusRegistrationConfigurator>? configureConsumers = null)
{
services.AddMassTransit(x =>
{
configureConsumers?.Invoke(x);
x.UsingRabbitMq((context, cfg) =>
{
var (host, username, password) = ParseRabbitMqConfiguration(configuration);
cfg.Host(host, h =>
{
h.Username(username);
h.Password(password);
});
cfg.UseMessageRetry(r => r.Exponential(
retryLimit: 5,
minInterval: TimeSpan.FromSeconds(1),
maxInterval: TimeSpan.FromMinutes(1),
intervalDelta: TimeSpan.FromSeconds(2)));
cfg.UseConsumeFilter(typeof(LoggingConsumeFilter<>), context);
// Process one message at a time per consumer (matches old EventBus behavior)
cfg.PrefetchCount = 1;
cfg.ConfigureEndpoints(context);
});
});
return services;
}
/// <summary>
/// Parses RabbitMQ configuration from either ConnectionString format or separate Host/Username/Password keys.
/// ConnectionString format: amqp://[username:password@]host[:port]
/// </summary>
private static (string Host, string Username, string Password) ParseRabbitMqConfiguration(IConfiguration configuration)
{
var connectionString = configuration["RabbitMQ:ConnectionString"];
if (!string.IsNullOrEmpty(connectionString))
{
return ParseConnectionString(connectionString);
}
// Fallback to separate configuration keys
var host = configuration["RabbitMQ:Host"] ?? "localhost";
var username = configuration["RabbitMQ:Username"] ?? "guest";
var password = configuration["RabbitMQ:Password"] ?? "guest";
return (host, username, password);
}
/// <summary>
/// Parses an AMQP connection string into host, username, and password components.
/// Supports formats:
/// - amqp://host
/// - amqp://host:port
/// - amqp://username:password@host
/// - amqp://username:password@host:port
/// </summary>
private static (string Host, string Username, string Password) ParseConnectionString(string connectionString)
{
var username = "guest";
var password = "guest";
var host = "localhost";
// Try to parse as URI first
if (Uri.TryCreate(connectionString, UriKind.Absolute, out var uri))
{
host = uri.Host;
if (!string.IsNullOrEmpty(uri.UserInfo))
{
var userInfoParts = uri.UserInfo.Split(':', 2);
username = Uri.UnescapeDataString(userInfoParts[0]);
if (userInfoParts.Length > 1)
{
password = Uri.UnescapeDataString(userInfoParts[1]);
}
}
}
else
{
// Fallback regex parsing for edge cases
var match = Regex.Match(connectionString, @"amqp://(?:([^:]+):([^@]+)@)?([^:/]+)");
if (match.Success)
{
if (match.Groups[1].Success && match.Groups[2].Success)
{
username = match.Groups[1].Value;
password = match.Groups[2].Value;
}
if (match.Groups[3].Success)
{
host = match.Groups[3].Value;
}
}
}
return (host, username, password);
}
}

Some files were not shown because too many files have changed in this diff Show More