From 1ecfd9cc99a54b8c8f73fd768a714d9dca3843e7 Mon Sep 17 00:00:00 2001 From: gamer147 Date: Mon, 19 Jan 2026 15:13:14 -0500 Subject: [PATCH] [FA-27] Need to test events but seems to mostly work --- .../IntegrationEvents/ChapterCreatedEvent.cs | 13 ++ .../IntegrationEvents/NovelCreatedEvent.cs | 13 ++ .../Services/NovelUpdateService.cs | 36 ++++ ...19184741_AddNovelVolumeChapter.Designer.cs | 198 ++++++++++++++++++ .../20260119184741_AddNovelVolumeChapter.cs | 95 +++++++++ ...rNovelDataServiceDbContextModelSnapshot.cs | 99 +++++++++ .../Models/Database/Chapter.cs | 3 +- .../Models/Database/Novel.cs | 2 +- .../Models/Database/Volume.cs | 4 +- .../IntegrationEvents/ChapterCreatedEvent.cs | 13 ++ .../IntegrationEvents/NovelCreatedEvent.cs | 13 ++ .../IntegrationEvents/UserInvitedEvent.cs | 15 ++ .../Program.cs | 7 +- .../Scripts/00_README.md | 93 ++++++++ .../01_extract_users_from_userservice.sql | 28 +++ .../02_extract_novels_from_novelservice.sql | 24 +++ .../03_extract_volumes_from_novelservice.sql | 26 +++ .../04_extract_chapters_from_novelservice.sql | 26 +++ ...5_insert_users_to_usernoveldataservice.sql | 32 +++ ..._insert_novels_to_usernoveldataservice.sql | 31 +++ ...insert_volumes_to_usernoveldataservice.sql | 34 +++ ...nsert_chapters_to_usernoveldataservice.sql | 34 +++ .../ChapterCreatedEventHandler.cs | 53 +++++ .../EventHandlers/NovelCreatedEventHandler.cs | 36 ++++ .../EventHandlers/UserInvitedEventHandler.cs | 40 ++++ .../Services/UserNovelDataServiceDbContext.cs | 3 + 26 files changed, 967 insertions(+), 4 deletions(-) create mode 100644 FictionArchive.Service.NovelService/Models/IntegrationEvents/ChapterCreatedEvent.cs create mode 100644 FictionArchive.Service.NovelService/Models/IntegrationEvents/NovelCreatedEvent.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.Designer.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/ChapterCreatedEvent.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/NovelCreatedEvent.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/UserInvitedEvent.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/00_README.md create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/01_extract_users_from_userservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/02_extract_novels_from_novelservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/03_extract_volumes_from_novelservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/04_extract_chapters_from_novelservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/05_insert_users_to_usernoveldataservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/06_insert_novels_to_usernoveldataservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/07_insert_volumes_to_usernoveldataservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Scripts/08_insert_chapters_to_usernoveldataservice.sql create mode 100644 FictionArchive.Service.UserNovelDataService/Services/EventHandlers/ChapterCreatedEventHandler.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Services/EventHandlers/NovelCreatedEventHandler.cs create mode 100644 FictionArchive.Service.UserNovelDataService/Services/EventHandlers/UserInvitedEventHandler.cs diff --git a/FictionArchive.Service.NovelService/Models/IntegrationEvents/ChapterCreatedEvent.cs b/FictionArchive.Service.NovelService/Models/IntegrationEvents/ChapterCreatedEvent.cs new file mode 100644 index 0000000..3608c2f --- /dev/null +++ b/FictionArchive.Service.NovelService/Models/IntegrationEvents/ChapterCreatedEvent.cs @@ -0,0 +1,13 @@ +using FictionArchive.Service.Shared.Services.EventBus; + +namespace FictionArchive.Service.NovelService.Models.IntegrationEvents; + +public class ChapterCreatedEvent : IIntegrationEvent +{ + public required uint ChapterId { get; init; } + public required uint NovelId { get; init; } + public required uint VolumeId { get; init; } + public required int VolumeOrder { get; init; } + public required uint ChapterOrder { get; init; } + public required string ChapterTitle { get; init; } +} diff --git a/FictionArchive.Service.NovelService/Models/IntegrationEvents/NovelCreatedEvent.cs b/FictionArchive.Service.NovelService/Models/IntegrationEvents/NovelCreatedEvent.cs new file mode 100644 index 0000000..50ede95 --- /dev/null +++ b/FictionArchive.Service.NovelService/Models/IntegrationEvents/NovelCreatedEvent.cs @@ -0,0 +1,13 @@ +using FictionArchive.Common.Enums; +using FictionArchive.Service.Shared.Services.EventBus; + +namespace FictionArchive.Service.NovelService.Models.IntegrationEvents; + +public class NovelCreatedEvent : IIntegrationEvent +{ + public required uint NovelId { get; init; } + public required string Title { get; init; } + public required Language OriginalLanguage { get; init; } + public required string Source { get; init; } + public required string AuthorName { get; init; } +} diff --git a/FictionArchive.Service.NovelService/Services/NovelUpdateService.cs b/FictionArchive.Service.NovelService/Services/NovelUpdateService.cs index e9e33be..d95e865 100644 --- a/FictionArchive.Service.NovelService/Services/NovelUpdateService.cs +++ b/FictionArchive.Service.NovelService/Services/NovelUpdateService.cs @@ -343,6 +343,12 @@ public class NovelUpdateService Novel novel; bool shouldPublishCoverEvent; + // Capture existing chapter IDs to detect new chapters later + var existingChapterIds = existingNovel?.Volumes + .SelectMany(v => v.Chapters) + .Select(c => c.Id) + .ToHashSet() ?? new HashSet(); + if (existingNovel == null) { // CREATE PATH: New novel @@ -384,6 +390,36 @@ public class NovelUpdateService await _dbContext.SaveChangesAsync(); + // Publish novel created event for new novels + if (existingNovel == null) + { + await _eventBus.Publish(new NovelCreatedEvent + { + NovelId = novel.Id, + Title = novel.Name.Texts.First(t => t.Language == novel.RawLanguage).Text, + OriginalLanguage = novel.RawLanguage, + Source = novel.Source.Key, + AuthorName = novel.Author.Name.Texts.First(t => t.Language == novel.RawLanguage).Text + }); + } + + // Publish chapter created events for new chapters + foreach (var volume in novel.Volumes) + { + foreach (var chapter in volume.Chapters.Where(c => !existingChapterIds.Contains(c.Id))) + { + await _eventBus.Publish(new ChapterCreatedEvent + { + ChapterId = chapter.Id, + NovelId = novel.Id, + VolumeId = volume.Id, + VolumeOrder = volume.Order, + ChapterOrder = chapter.Order, + ChapterTitle = chapter.Name.Texts.First(t => t.Language == novel.RawLanguage).Text + }); + } + } + // Publish cover image event if needed if (shouldPublishCoverEvent && novel.CoverImage != null && metadata.CoverImage != null) { diff --git a/FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.Designer.cs b/FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.Designer.cs new file mode 100644 index 0000000..eb6cd91 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.Designer.cs @@ -0,0 +1,198 @@ +// +using System; +using FictionArchive.Service.UserNovelDataService.Services; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using NodaTime; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace FictionArchive.Service.UserNovelDataService.Migrations +{ + [DbContext(typeof(UserNovelDataServiceDbContext))] + [Migration("20260119184741_AddNovelVolumeChapter")] + partial class AddNovelVolumeChapter + { + /// + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "9.0.11") + .HasAnnotation("Relational:MaxIdentifierLength", 63); + + NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("integer"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("ChapterId") + .HasColumnType("bigint"); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("Description") + .HasColumnType("text"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("NovelId") + .HasColumnType("bigint"); + + b.Property("UserId") + .HasColumnType("uuid"); + + b.HasKey("Id"); + + b.HasIndex("UserId", "ChapterId") + .IsUnique(); + + b.HasIndex("UserId", "NovelId"); + + b.ToTable("Bookmarks"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("bigint"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("VolumeId") + .HasColumnType("bigint"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapters"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("bigint"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.HasKey("Id"); + + b.ToTable("Novels"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.User", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("uuid"); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("OAuthProviderId") + .IsRequired() + .HasColumnType("text"); + + b.HasKey("Id"); + + b.ToTable("Users"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("bigint"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("NovelId") + .HasColumnType("bigint"); + + b.HasKey("Id"); + + b.HasIndex("NovelId"); + + b.ToTable("Volumes"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b => + { + b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.User", "User") + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("User"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b => + { + b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b => + { + b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", "Novel") + .WithMany("Volumes") + .HasForeignKey("NovelId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Novel"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.cs b/FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.cs new file mode 100644 index 0000000..ae3e0c4 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.cs @@ -0,0 +1,95 @@ +using Microsoft.EntityFrameworkCore.Migrations; +using NodaTime; +using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; + +#nullable disable + +namespace FictionArchive.Service.UserNovelDataService.Migrations +{ + /// + public partial class AddNovelVolumeChapter : Migration + { + /// + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "Novels", + columns: table => new + { + Id = table.Column(type: "bigint", nullable: false) + .Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn), + CreatedTime = table.Column(type: "timestamp with time zone", nullable: false), + LastUpdatedTime = table.Column(type: "timestamp with time zone", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_Novels", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "Volumes", + columns: table => new + { + Id = table.Column(type: "bigint", nullable: false) + .Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn), + NovelId = table.Column(type: "bigint", nullable: false), + CreatedTime = table.Column(type: "timestamp with time zone", nullable: false), + LastUpdatedTime = table.Column(type: "timestamp with time zone", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_Volumes", x => x.Id); + table.ForeignKey( + name: "FK_Volumes_Novels_NovelId", + column: x => x.NovelId, + principalTable: "Novels", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateTable( + name: "Chapters", + columns: table => new + { + Id = table.Column(type: "bigint", nullable: false) + .Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn), + VolumeId = table.Column(type: "bigint", nullable: false), + CreatedTime = table.Column(type: "timestamp with time zone", nullable: false), + LastUpdatedTime = table.Column(type: "timestamp with time zone", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_Chapters", x => x.Id); + table.ForeignKey( + name: "FK_Chapters_Volumes_VolumeId", + column: x => x.VolumeId, + principalTable: "Volumes", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateIndex( + name: "IX_Chapters_VolumeId", + table: "Chapters", + column: "VolumeId"); + + migrationBuilder.CreateIndex( + name: "IX_Volumes_NovelId", + table: "Volumes", + column: "NovelId"); + } + + /// + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "Chapters"); + + migrationBuilder.DropTable( + name: "Volumes"); + + migrationBuilder.DropTable( + name: "Novels"); + } + } +} diff --git a/FictionArchive.Service.UserNovelDataService/Migrations/UserNovelDataServiceDbContextModelSnapshot.cs b/FictionArchive.Service.UserNovelDataService/Migrations/UserNovelDataServiceDbContextModelSnapshot.cs index 1e50402..507067f 100644 --- a/FictionArchive.Service.UserNovelDataService/Migrations/UserNovelDataServiceDbContextModelSnapshot.cs +++ b/FictionArchive.Service.UserNovelDataService/Migrations/UserNovelDataServiceDbContextModelSnapshot.cs @@ -59,6 +59,49 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations b.ToTable("Bookmarks"); }); + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("bigint"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("VolumeId") + .HasColumnType("bigint"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapters"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("bigint"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.HasKey("Id"); + + b.ToTable("Novels"); + }); + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.User", b => { b.Property("Id") @@ -80,6 +123,30 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations b.ToTable("Users"); }); + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("bigint"); + + NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id")); + + b.Property("CreatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("LastUpdatedTime") + .HasColumnType("timestamp with time zone"); + + b.Property("NovelId") + .HasColumnType("bigint"); + + b.HasKey("Id"); + + b.HasIndex("NovelId"); + + b.ToTable("Volumes"); + }); + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b => { b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.User", "User") @@ -90,6 +157,38 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations b.Navigation("User"); }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b => + { + b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b => + { + b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", "Novel") + .WithMany("Volumes") + .HasForeignKey("NovelId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Novel"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b => + { + b.Navigation("Chapters"); + }); #pragma warning restore 612, 618 } } diff --git a/FictionArchive.Service.UserNovelDataService/Models/Database/Chapter.cs b/FictionArchive.Service.UserNovelDataService/Models/Database/Chapter.cs index 1cc7e91..189a7d5 100644 --- a/FictionArchive.Service.UserNovelDataService/Models/Database/Chapter.cs +++ b/FictionArchive.Service.UserNovelDataService/Models/Database/Chapter.cs @@ -4,5 +4,6 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database; public class Chapter : BaseEntity { - + public uint VolumeId { get; set; } + public virtual Volume Volume { get; set; } = null!; } \ No newline at end of file diff --git a/FictionArchive.Service.UserNovelDataService/Models/Database/Novel.cs b/FictionArchive.Service.UserNovelDataService/Models/Database/Novel.cs index fc979d8..36b0ea2 100644 --- a/FictionArchive.Service.UserNovelDataService/Models/Database/Novel.cs +++ b/FictionArchive.Service.UserNovelDataService/Models/Database/Novel.cs @@ -4,5 +4,5 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database; public class Novel : BaseEntity { - public virtual ICollection Volumes { get; set; } + public virtual ICollection Volumes { get; set; } = new List(); } \ No newline at end of file diff --git a/FictionArchive.Service.UserNovelDataService/Models/Database/Volume.cs b/FictionArchive.Service.UserNovelDataService/Models/Database/Volume.cs index f5b0293..b371ba2 100644 --- a/FictionArchive.Service.UserNovelDataService/Models/Database/Volume.cs +++ b/FictionArchive.Service.UserNovelDataService/Models/Database/Volume.cs @@ -4,5 +4,7 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database; public class Volume : BaseEntity { - public virtual ICollection Chapters { get; set; } + public uint NovelId { get; set; } + public virtual Novel Novel { get; set; } = null!; + public virtual ICollection Chapters { get; set; } = new List(); } \ No newline at end of file diff --git a/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/ChapterCreatedEvent.cs b/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/ChapterCreatedEvent.cs new file mode 100644 index 0000000..2591f68 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/ChapterCreatedEvent.cs @@ -0,0 +1,13 @@ +using FictionArchive.Service.Shared.Services.EventBus; + +namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents; + +public class ChapterCreatedEvent : IIntegrationEvent +{ + public required uint ChapterId { get; init; } + public required uint NovelId { get; init; } + public required uint VolumeId { get; init; } + public required int VolumeOrder { get; init; } + public required uint ChapterOrder { get; init; } + public required string ChapterTitle { get; init; } +} diff --git a/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/NovelCreatedEvent.cs b/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/NovelCreatedEvent.cs new file mode 100644 index 0000000..f55c349 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/NovelCreatedEvent.cs @@ -0,0 +1,13 @@ +using FictionArchive.Common.Enums; +using FictionArchive.Service.Shared.Services.EventBus; + +namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents; + +public class NovelCreatedEvent : IIntegrationEvent +{ + public required uint NovelId { get; init; } + public required string Title { get; init; } + public required Language OriginalLanguage { get; init; } + public required string Source { get; init; } + public required string AuthorName { get; init; } +} diff --git a/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/UserInvitedEvent.cs b/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/UserInvitedEvent.cs new file mode 100644 index 0000000..609f029 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Models/IntegrationEvents/UserInvitedEvent.cs @@ -0,0 +1,15 @@ +using FictionArchive.Service.Shared.Services.EventBus; + +namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents; + +public class UserInvitedEvent : IIntegrationEvent +{ + public Guid InvitedUserId { get; set; } + public required string InvitedUsername { get; set; } + public required string InvitedEmail { get; set; } + public required string InvitedOAuthProviderId { get; set; } + + public Guid InviterId { get; set; } + public required string InviterUsername { get; set; } + public required string InviterOAuthProviderId { get; set; } +} diff --git a/FictionArchive.Service.UserNovelDataService/Program.cs b/FictionArchive.Service.UserNovelDataService/Program.cs index 166735f..d14cc0e 100644 --- a/FictionArchive.Service.UserNovelDataService/Program.cs +++ b/FictionArchive.Service.UserNovelDataService/Program.cs @@ -3,7 +3,9 @@ using FictionArchive.Service.Shared; using FictionArchive.Service.Shared.Extensions; using FictionArchive.Service.Shared.Services.EventBus.Implementations; using FictionArchive.Service.UserNovelDataService.GraphQL; +using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents; using FictionArchive.Service.UserNovelDataService.Services; +using FictionArchive.Service.UserNovelDataService.Services.EventHandlers; namespace FictionArchive.Service.UserNovelDataService; @@ -27,7 +29,10 @@ public class Program builder.Services.AddRabbitMQ(opt => { builder.Configuration.GetSection("RabbitMQ").Bind(opt); - }); + }) + .Subscribe() + .Subscribe() + .Subscribe(); } #endregion diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/00_README.md b/FictionArchive.Service.UserNovelDataService/Scripts/00_README.md new file mode 100644 index 0000000..1db2939 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/00_README.md @@ -0,0 +1,93 @@ +# UserNovelDataService Backfill Scripts + +SQL scripts for backfilling data from UserService and NovelService into UserNovelDataService. + +## Prerequisites + +1. **Run EF migrations** on the UserNovelDataService database to ensure all tables exist: + ```bash + dotnet ef database update --project FictionArchive.Service.UserNovelDataService + ``` + + This will apply the `AddNovelVolumeChapter` migration which creates: + - `Novels` table (Id, CreatedTime, LastUpdatedTime) + - `Volumes` table (Id, NovelId FK, CreatedTime, LastUpdatedTime) + - `Chapters` table (Id, VolumeId FK, CreatedTime, LastUpdatedTime) + +## Execution Order + +Run scripts in numeric order: + +### Extraction (run against source databases) +1. `01_extract_users_from_userservice.sql` - Run against **UserService** DB +2. `02_extract_novels_from_novelservice.sql` - Run against **NovelService** DB +3. `03_extract_volumes_from_novelservice.sql` - Run against **NovelService** DB +4. `04_extract_chapters_from_novelservice.sql` - Run against **NovelService** DB + +### Insertion (run against UserNovelDataService database) +5. `05_insert_users_to_usernoveldataservice.sql` +6. `06_insert_novels_to_usernoveldataservice.sql` +7. `07_insert_volumes_to_usernoveldataservice.sql` +8. `08_insert_chapters_to_usernoveldataservice.sql` + +## Methods + +Each script provides three options: + +1. **SELECT for review** - Review data before export +2. **Generate INSERT statements** - Creates individual INSERT statements (good for small datasets) +3. **CSV export/import** - Use PostgreSQL `\copy` for bulk operations (recommended for large datasets) + +## Example Workflow + +### Using CSV Export/Import (Recommended) + +```bash +# 1. Export from source databases +psql -h localhost -U postgres -d userservice -c "\copy (SELECT \"Id\", \"OAuthProviderId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Users\" WHERE \"Disabled\" = false) TO '/tmp/users_export.csv' WITH CSV HEADER" + +psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Novels\") TO '/tmp/novels_export.csv' WITH CSV HEADER" + +psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"NovelId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Volume\" ORDER BY \"NovelId\", \"Id\") TO '/tmp/volumes_export.csv' WITH CSV HEADER" + +psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"VolumeId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Chapter\" ORDER BY \"VolumeId\", \"Id\") TO '/tmp/chapters_export.csv' WITH CSV HEADER" + +# 2. Import into UserNovelDataService (order matters due to FK constraints!) +psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Users\" (\"Id\", \"OAuthProviderId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/users_export.csv' WITH CSV HEADER" + +psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Novels\" (\"Id\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/novels_export.csv' WITH CSV HEADER" + +psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Volumes\" (\"Id\", \"NovelId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/volumes_export.csv' WITH CSV HEADER" + +psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Chapters\" (\"Id\", \"VolumeId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/chapters_export.csv' WITH CSV HEADER" +``` + +**Important**: Insert order matters due to foreign key constraints: +1. Users (no dependencies) +2. Novels (no dependencies) +3. Volumes (depends on Novels) +4. Chapters (depends on Volumes) + +### Using dblink (Cross-database queries) + +If both databases are on the same PostgreSQL server, you can use `dblink` extension for direct cross-database inserts. See the commented examples in each insert script. + +## Verification + +After running the backfill, verify counts match: + +```sql +-- Run on UserService DB +SELECT COUNT(*) as user_count FROM "Users" WHERE "Disabled" = false; + +-- Run on NovelService DB +SELECT COUNT(*) as novel_count FROM "Novels"; +SELECT COUNT(*) as volume_count FROM "Volume"; +SELECT COUNT(*) as chapter_count FROM "Chapter"; + +-- Run on UserNovelDataService DB +SELECT COUNT(*) as user_count FROM "Users"; +SELECT COUNT(*) as novel_count FROM "Novels"; +SELECT COUNT(*) as volume_count FROM "Volumes"; +SELECT COUNT(*) as chapter_count FROM "Chapters"; +``` diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/01_extract_users_from_userservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/01_extract_users_from_userservice.sql new file mode 100644 index 0000000..e818089 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/01_extract_users_from_userservice.sql @@ -0,0 +1,28 @@ +-- Extract Users from UserService database +-- Run this against: UserService PostgreSQL database +-- Output: CSV or use COPY TO for bulk export + +-- Option 1: Simple SELECT for review/testing +SELECT + "Id", + "OAuthProviderId", + "CreatedTime", + "LastUpdatedTime" +FROM "Users" +WHERE "Disabled" = false +ORDER BY "CreatedTime"; + +-- Option 2: Generate INSERT statements (useful for small datasets) +SELECT format( + 'INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") VALUES (%L, %L, %L, %L) ON CONFLICT ("Id") DO NOTHING;', + "Id", + "OAuthProviderId", + "CreatedTime", + "LastUpdatedTime" +) +FROM "Users" +WHERE "Disabled" = false +ORDER BY "CreatedTime"; + +-- Option 3: Export to CSV (run from psql) +-- \copy (SELECT "Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime" FROM "Users" WHERE "Disabled" = false ORDER BY "CreatedTime") TO '/tmp/users_export.csv' WITH CSV HEADER; diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/02_extract_novels_from_novelservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/02_extract_novels_from_novelservice.sql new file mode 100644 index 0000000..9fa1b0e --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/02_extract_novels_from_novelservice.sql @@ -0,0 +1,24 @@ +-- Extract Novels from NovelService database +-- Run this against: NovelService PostgreSQL database +-- Output: CSV or use COPY TO for bulk export + +-- Option 1: Simple SELECT for review/testing +SELECT + "Id", + "CreatedTime", + "LastUpdatedTime" +FROM "Novels" +ORDER BY "Id"; + +-- Option 2: Generate INSERT statements +SELECT format( + 'INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime") VALUES (%s, %L, %L) ON CONFLICT ("Id") DO NOTHING;', + "Id", + "CreatedTime", + "LastUpdatedTime" +) +FROM "Novels" +ORDER BY "Id"; + +-- Option 3: Export to CSV (run from psql) +-- \copy (SELECT "Id", "CreatedTime", "LastUpdatedTime" FROM "Novels" ORDER BY "Id") TO '/tmp/novels_export.csv' WITH CSV HEADER; diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/03_extract_volumes_from_novelservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/03_extract_volumes_from_novelservice.sql new file mode 100644 index 0000000..b64f25b --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/03_extract_volumes_from_novelservice.sql @@ -0,0 +1,26 @@ +-- Extract Volumes from NovelService database +-- Run this against: NovelService PostgreSQL database +-- Output: CSV or use COPY TO for bulk export + +-- Option 1: Simple SELECT for review/testing +SELECT + "Id", + "NovelId", + "CreatedTime", + "LastUpdatedTime" +FROM "Volume" +ORDER BY "NovelId", "Id"; + +-- Option 2: Generate INSERT statements +SELECT format( + 'INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") VALUES (%s, %s, %L, %L) ON CONFLICT ("Id") DO NOTHING;', + "Id", + "NovelId", + "CreatedTime", + "LastUpdatedTime" +) +FROM "Volume" +ORDER BY "NovelId", "Id"; + +-- Option 3: Export to CSV (run from psql) +-- \copy (SELECT "Id", "NovelId", "CreatedTime", "LastUpdatedTime" FROM "Volume" ORDER BY "NovelId", "Id") TO '/tmp/volumes_export.csv' WITH CSV HEADER; diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/04_extract_chapters_from_novelservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/04_extract_chapters_from_novelservice.sql new file mode 100644 index 0000000..30886b7 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/04_extract_chapters_from_novelservice.sql @@ -0,0 +1,26 @@ +-- Extract Chapters from NovelService database +-- Run this against: NovelService PostgreSQL database +-- Output: CSV or use COPY TO for bulk export + +-- Option 1: Simple SELECT for review/testing +SELECT + "Id", + "VolumeId", + "CreatedTime", + "LastUpdatedTime" +FROM "Chapter" +ORDER BY "VolumeId", "Id"; + +-- Option 2: Generate INSERT statements +SELECT format( + 'INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") VALUES (%s, %s, %L, %L) ON CONFLICT ("Id") DO NOTHING;', + "Id", + "VolumeId", + "CreatedTime", + "LastUpdatedTime" +) +FROM "Chapter" +ORDER BY "VolumeId", "Id"; + +-- Option 3: Export to CSV (run from psql) +-- \copy (SELECT "Id", "VolumeId", "CreatedTime", "LastUpdatedTime" FROM "Chapter" ORDER BY "VolumeId", "Id") TO '/tmp/chapters_export.csv' WITH CSV HEADER; diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/05_insert_users_to_usernoveldataservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/05_insert_users_to_usernoveldataservice.sql new file mode 100644 index 0000000..fa5ae5c --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/05_insert_users_to_usernoveldataservice.sql @@ -0,0 +1,32 @@ +-- Insert Users into UserNovelDataService database +-- Run this against: UserNovelDataService PostgreSQL database +-- +-- PREREQUISITE: You must have extracted users from UserService first +-- using 01_extract_users_from_userservice.sql + +-- Option 1: If you have a CSV file from export +-- \copy "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/users_export.csv' WITH CSV HEADER; + +-- Option 2: Direct cross-database insert using dblink +-- First, install dblink extension if not already done: +-- CREATE EXTENSION IF NOT EXISTS dblink; + +-- Example using dblink (adjust connection string): +/* +INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") +SELECT + "Id"::uuid, + "OAuthProviderId", + "CreatedTime"::timestamp with time zone, + "LastUpdatedTime"::timestamp with time zone +FROM dblink( + 'host=localhost port=5432 dbname=userservice user=postgres password=yourpassword', + 'SELECT "Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime" FROM "Users" WHERE "Disabled" = false' +) AS t("Id" uuid, "OAuthProviderId" text, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone) +ON CONFLICT ("Id") DO UPDATE SET + "OAuthProviderId" = EXCLUDED."OAuthProviderId", + "LastUpdatedTime" = EXCLUDED."LastUpdatedTime"; +*/ + +-- Option 3: Paste generated INSERT statements from extraction script here +-- INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING; diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/06_insert_novels_to_usernoveldataservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/06_insert_novels_to_usernoveldataservice.sql new file mode 100644 index 0000000..bf9b307 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/06_insert_novels_to_usernoveldataservice.sql @@ -0,0 +1,31 @@ +-- Insert Novels into UserNovelDataService database +-- Run this against: UserNovelDataService PostgreSQL database +-- +-- PREREQUISITE: +-- 1. Ensure the Novels table exists (run EF migrations first if needed) +-- 2. Extract novels from NovelService using 02_extract_novels_from_novelservice.sql + +-- Option 1: If you have a CSV file from export +-- \copy "Novels" ("Id", "CreatedTime", "LastUpdatedTime") FROM '/tmp/novels_export.csv' WITH CSV HEADER; + +-- Option 2: Direct cross-database insert using dblink +-- First, install dblink extension if not already done: +-- CREATE EXTENSION IF NOT EXISTS dblink; + +-- Example using dblink (adjust connection string): +/* +INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime") +SELECT + "Id"::bigint, + "CreatedTime"::timestamp with time zone, + "LastUpdatedTime"::timestamp with time zone +FROM dblink( + 'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword', + 'SELECT "Id", "CreatedTime", "LastUpdatedTime" FROM "Novels"' +) AS t("Id" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone) +ON CONFLICT ("Id") DO UPDATE SET + "LastUpdatedTime" = EXCLUDED."LastUpdatedTime"; +*/ + +-- Option 3: Paste generated INSERT statements from extraction script here +-- INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING; diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/07_insert_volumes_to_usernoveldataservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/07_insert_volumes_to_usernoveldataservice.sql new file mode 100644 index 0000000..36af9f3 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/07_insert_volumes_to_usernoveldataservice.sql @@ -0,0 +1,34 @@ +-- Insert Volumes into UserNovelDataService database +-- Run this against: UserNovelDataService PostgreSQL database +-- +-- PREREQUISITE: +-- 1. Ensure the Volumes table exists (run EF migrations first if needed) +-- 2. Novels must be inserted first (FK constraint) +-- 3. Extract volumes from NovelService using 03_extract_volumes_from_novelservice.sql + +-- Option 1: If you have a CSV file from export +-- \copy "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/volumes_export.csv' WITH CSV HEADER; + +-- Option 2: Direct cross-database insert using dblink +-- First, install dblink extension if not already done: +-- CREATE EXTENSION IF NOT EXISTS dblink; + +-- Example using dblink (adjust connection string): +/* +INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") +SELECT + "Id"::bigint, + "NovelId"::bigint, + "CreatedTime"::timestamp with time zone, + "LastUpdatedTime"::timestamp with time zone +FROM dblink( + 'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword', + 'SELECT "Id", "NovelId", "CreatedTime", "LastUpdatedTime" FROM "Volume"' +) AS t("Id" bigint, "NovelId" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone) +ON CONFLICT ("Id") DO UPDATE SET + "NovelId" = EXCLUDED."NovelId", + "LastUpdatedTime" = EXCLUDED."LastUpdatedTime"; +*/ + +-- Option 3: Paste generated INSERT statements from extraction script here +-- INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING; diff --git a/FictionArchive.Service.UserNovelDataService/Scripts/08_insert_chapters_to_usernoveldataservice.sql b/FictionArchive.Service.UserNovelDataService/Scripts/08_insert_chapters_to_usernoveldataservice.sql new file mode 100644 index 0000000..5f03100 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Scripts/08_insert_chapters_to_usernoveldataservice.sql @@ -0,0 +1,34 @@ +-- Insert Chapters into UserNovelDataService database +-- Run this against: UserNovelDataService PostgreSQL database +-- +-- PREREQUISITE: +-- 1. Ensure the Chapters table exists (run EF migrations first if needed) +-- 2. Volumes must be inserted first (FK constraint) +-- 3. Extract chapters from NovelService using 04_extract_chapters_from_novelservice.sql + +-- Option 1: If you have a CSV file from export +-- \copy "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/chapters_export.csv' WITH CSV HEADER; + +-- Option 2: Direct cross-database insert using dblink +-- First, install dblink extension if not already done: +-- CREATE EXTENSION IF NOT EXISTS dblink; + +-- Example using dblink (adjust connection string): +/* +INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") +SELECT + "Id"::bigint, + "VolumeId"::bigint, + "CreatedTime"::timestamp with time zone, + "LastUpdatedTime"::timestamp with time zone +FROM dblink( + 'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword', + 'SELECT "Id", "VolumeId", "CreatedTime", "LastUpdatedTime" FROM "Chapter"' +) AS t("Id" bigint, "VolumeId" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone) +ON CONFLICT ("Id") DO UPDATE SET + "VolumeId" = EXCLUDED."VolumeId", + "LastUpdatedTime" = EXCLUDED."LastUpdatedTime"; +*/ + +-- Option 3: Paste generated INSERT statements from extraction script here +-- INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING; diff --git a/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/ChapterCreatedEventHandler.cs b/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/ChapterCreatedEventHandler.cs new file mode 100644 index 0000000..74f46f7 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/ChapterCreatedEventHandler.cs @@ -0,0 +1,53 @@ +using FictionArchive.Service.Shared.Services.EventBus; +using FictionArchive.Service.UserNovelDataService.Models.Database; +using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents; +using Microsoft.EntityFrameworkCore; + +namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers; + +public class ChapterCreatedEventHandler : IIntegrationEventHandler +{ + private readonly UserNovelDataServiceDbContext _dbContext; + private readonly ILogger _logger; + + public ChapterCreatedEventHandler( + UserNovelDataServiceDbContext dbContext, + ILogger logger) + { + _dbContext = dbContext; + _logger = logger; + } + + public async Task Handle(ChapterCreatedEvent @event) + { + // Ensure novel exists + var novelExists = await _dbContext.Novels.AnyAsync(n => n.Id == @event.NovelId); + if (!novelExists) + { + var novel = new Novel { Id = @event.NovelId }; + _dbContext.Novels.Add(novel); + } + + // Ensure volume exists + var volumeExists = await _dbContext.Volumes.AnyAsync(v => v.Id == @event.VolumeId); + if (!volumeExists) + { + var volume = new Volume { Id = @event.VolumeId }; + _dbContext.Volumes.Add(volume); + } + + // Create chapter if not exists + var chapterExists = await _dbContext.Chapters.AnyAsync(c => c.Id == @event.ChapterId); + if (chapterExists) + { + _logger.LogDebug("Chapter {ChapterId} already exists, skipping", @event.ChapterId); + return; + } + + var chapter = new Chapter { Id = @event.ChapterId }; + _dbContext.Chapters.Add(chapter); + await _dbContext.SaveChangesAsync(); + + _logger.LogInformation("Created chapter stub for {ChapterId} in novel {NovelId}", @event.ChapterId, @event.NovelId); + } +} diff --git a/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/NovelCreatedEventHandler.cs b/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/NovelCreatedEventHandler.cs new file mode 100644 index 0000000..1e47531 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/NovelCreatedEventHandler.cs @@ -0,0 +1,36 @@ +using FictionArchive.Service.Shared.Services.EventBus; +using FictionArchive.Service.UserNovelDataService.Models.Database; +using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents; +using Microsoft.EntityFrameworkCore; + +namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers; + +public class NovelCreatedEventHandler : IIntegrationEventHandler +{ + private readonly UserNovelDataServiceDbContext _dbContext; + private readonly ILogger _logger; + + public NovelCreatedEventHandler( + UserNovelDataServiceDbContext dbContext, + ILogger logger) + { + _dbContext = dbContext; + _logger = logger; + } + + public async Task Handle(NovelCreatedEvent @event) + { + var exists = await _dbContext.Novels.AnyAsync(n => n.Id == @event.NovelId); + if (exists) + { + _logger.LogDebug("Novel {NovelId} already exists, skipping", @event.NovelId); + return; + } + + var novel = new Novel { Id = @event.NovelId }; + _dbContext.Novels.Add(novel); + await _dbContext.SaveChangesAsync(); + + _logger.LogInformation("Created novel stub for {NovelId}", @event.NovelId); + } +} diff --git a/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/UserInvitedEventHandler.cs b/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/UserInvitedEventHandler.cs new file mode 100644 index 0000000..a48a2c8 --- /dev/null +++ b/FictionArchive.Service.UserNovelDataService/Services/EventHandlers/UserInvitedEventHandler.cs @@ -0,0 +1,40 @@ +using FictionArchive.Service.Shared.Services.EventBus; +using FictionArchive.Service.UserNovelDataService.Models.Database; +using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents; +using Microsoft.EntityFrameworkCore; + +namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers; + +public class UserInvitedEventHandler : IIntegrationEventHandler +{ + private readonly UserNovelDataServiceDbContext _dbContext; + private readonly ILogger _logger; + + public UserInvitedEventHandler( + UserNovelDataServiceDbContext dbContext, + ILogger logger) + { + _dbContext = dbContext; + _logger = logger; + } + + public async Task Handle(UserInvitedEvent @event) + { + var exists = await _dbContext.Users.AnyAsync(u => u.Id == @event.InvitedUserId); + if (exists) + { + _logger.LogDebug("User {UserId} already exists, skipping", @event.InvitedUserId); + return; + } + + var user = new User + { + Id = @event.InvitedUserId, + OAuthProviderId = @event.InvitedOAuthProviderId + }; + _dbContext.Users.Add(user); + await _dbContext.SaveChangesAsync(); + + _logger.LogInformation("Created user stub for {UserId}", @event.InvitedUserId); + } +} diff --git a/FictionArchive.Service.UserNovelDataService/Services/UserNovelDataServiceDbContext.cs b/FictionArchive.Service.UserNovelDataService/Services/UserNovelDataServiceDbContext.cs index 38eb08c..289eb48 100644 --- a/FictionArchive.Service.UserNovelDataService/Services/UserNovelDataServiceDbContext.cs +++ b/FictionArchive.Service.UserNovelDataService/Services/UserNovelDataServiceDbContext.cs @@ -8,6 +8,9 @@ public class UserNovelDataServiceDbContext : FictionArchiveDbContext { public DbSet Users { get; set; } public DbSet Bookmarks { get; set; } + public DbSet Novels { get; set; } + public DbSet Volumes { get; set; } + public DbSet Chapters { get; set; } public UserNovelDataServiceDbContext(DbContextOptions options, ILogger logger) : base(options, logger) {