[FA-27] Need to test events but seems to mostly work

This commit is contained in:
gamer147
2026-01-19 15:13:14 -05:00
parent 19ae4a8089
commit 1ecfd9cc99
26 changed files with 967 additions and 4 deletions

View File

@@ -0,0 +1,198 @@
// <auto-generated />
using System;
using FictionArchive.Service.UserNovelDataService.Services;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace FictionArchive.Service.UserNovelDataService.Migrations
{
[DbContext(typeof(UserNovelDataServiceDbContext))]
[Migration("20260119184741_AddNovelVolumeChapter")]
partial class AddNovelVolumeChapter
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.11")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("integer");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("Id"));
b.Property<long>("ChapterId")
.HasColumnType("bigint");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("Description")
.HasColumnType("text");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long>("NovelId")
.HasColumnType("bigint");
b.Property<Guid>("UserId")
.HasColumnType("uuid");
b.HasKey("Id");
b.HasIndex("UserId", "ChapterId")
.IsUnique();
b.HasIndex("UserId", "NovelId");
b.ToTable("Bookmarks");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long>("VolumeId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("Chapters");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.HasKey("Id");
b.ToTable("Novels");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.User", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid");
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<string>("OAuthProviderId")
.IsRequired()
.HasColumnType("text");
b.HasKey("Id");
b.ToTable("Users");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long>("NovelId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("NovelId");
b.ToTable("Volumes");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b =>
{
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.User", "User")
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("User");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
{
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
{
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", "Novel")
.WithMany("Volumes")
.HasForeignKey("NovelId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Novel");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,95 @@
using Microsoft.EntityFrameworkCore.Migrations;
using NodaTime;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace FictionArchive.Service.UserNovelDataService.Migrations
{
/// <inheritdoc />
public partial class AddNovelVolumeChapter : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Novels",
columns: table => new
{
Id = table.Column<long>(type: "bigint", nullable: false)
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Novels", x => x.Id);
});
migrationBuilder.CreateTable(
name: "Volumes",
columns: table => new
{
Id = table.Column<long>(type: "bigint", nullable: false)
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
NovelId = table.Column<long>(type: "bigint", nullable: false),
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Volumes", x => x.Id);
table.ForeignKey(
name: "FK_Volumes_Novels_NovelId",
column: x => x.NovelId,
principalTable: "Novels",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Chapters",
columns: table => new
{
Id = table.Column<long>(type: "bigint", nullable: false)
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
VolumeId = table.Column<long>(type: "bigint", nullable: false),
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Chapters", x => x.Id);
table.ForeignKey(
name: "FK_Chapters_Volumes_VolumeId",
column: x => x.VolumeId,
principalTable: "Volumes",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_Chapters_VolumeId",
table: "Chapters",
column: "VolumeId");
migrationBuilder.CreateIndex(
name: "IX_Volumes_NovelId",
table: "Volumes",
column: "NovelId");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "Chapters");
migrationBuilder.DropTable(
name: "Volumes");
migrationBuilder.DropTable(
name: "Novels");
}
}
}

View File

@@ -59,6 +59,49 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations
b.ToTable("Bookmarks");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long>("VolumeId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("Chapters");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.HasKey("Id");
b.ToTable("Novels");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.User", b =>
{
b.Property<Guid>("Id")
@@ -80,6 +123,30 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations
b.ToTable("Users");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
b.Property<Instant>("CreatedTime")
.HasColumnType("timestamp with time zone");
b.Property<Instant>("LastUpdatedTime")
.HasColumnType("timestamp with time zone");
b.Property<long>("NovelId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("NovelId");
b.ToTable("Volumes");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b =>
{
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.User", "User")
@@ -90,6 +157,38 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations
b.Navigation("User");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
{
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
{
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", "Novel")
.WithMany("Volumes")
.HasForeignKey("NovelId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Novel");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
{
b.Navigation("Volumes");
});
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}

View File

@@ -4,5 +4,6 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database;
public class Chapter : BaseEntity<uint>
{
public uint VolumeId { get; set; }
public virtual Volume Volume { get; set; } = null!;
}

View File

@@ -4,5 +4,5 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database;
public class Novel : BaseEntity<uint>
{
public virtual ICollection<Volume> Volumes { get; set; }
public virtual ICollection<Volume> Volumes { get; set; } = new List<Volume>();
}

View File

@@ -4,5 +4,7 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database;
public class Volume : BaseEntity<uint>
{
public virtual ICollection<Chapter> Chapters { get; set; }
public uint NovelId { get; set; }
public virtual Novel Novel { get; set; } = null!;
public virtual ICollection<Chapter> Chapters { get; set; } = new List<Chapter>();
}

View File

@@ -0,0 +1,13 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
public class ChapterCreatedEvent : IIntegrationEvent
{
public required uint ChapterId { get; init; }
public required uint NovelId { get; init; }
public required uint VolumeId { get; init; }
public required int VolumeOrder { get; init; }
public required uint ChapterOrder { get; init; }
public required string ChapterTitle { get; init; }
}

View File

@@ -0,0 +1,13 @@
using FictionArchive.Common.Enums;
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
public class NovelCreatedEvent : IIntegrationEvent
{
public required uint NovelId { get; init; }
public required string Title { get; init; }
public required Language OriginalLanguage { get; init; }
public required string Source { get; init; }
public required string AuthorName { get; init; }
}

View File

@@ -0,0 +1,15 @@
using FictionArchive.Service.Shared.Services.EventBus;
namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
public class UserInvitedEvent : IIntegrationEvent
{
public Guid InvitedUserId { get; set; }
public required string InvitedUsername { get; set; }
public required string InvitedEmail { get; set; }
public required string InvitedOAuthProviderId { get; set; }
public Guid InviterId { get; set; }
public required string InviterUsername { get; set; }
public required string InviterOAuthProviderId { get; set; }
}

View File

@@ -3,7 +3,9 @@ using FictionArchive.Service.Shared;
using FictionArchive.Service.Shared.Extensions;
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
using FictionArchive.Service.UserNovelDataService.GraphQL;
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
using FictionArchive.Service.UserNovelDataService.Services;
using FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
namespace FictionArchive.Service.UserNovelDataService;
@@ -27,7 +29,10 @@ public class Program
builder.Services.AddRabbitMQ(opt =>
{
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
});
})
.Subscribe<NovelCreatedEvent, NovelCreatedEventHandler>()
.Subscribe<ChapterCreatedEvent, ChapterCreatedEventHandler>()
.Subscribe<UserInvitedEvent, UserInvitedEventHandler>();
}
#endregion

View File

@@ -0,0 +1,93 @@
# UserNovelDataService Backfill Scripts
SQL scripts for backfilling data from UserService and NovelService into UserNovelDataService.
## Prerequisites
1. **Run EF migrations** on the UserNovelDataService database to ensure all tables exist:
```bash
dotnet ef database update --project FictionArchive.Service.UserNovelDataService
```
This will apply the `AddNovelVolumeChapter` migration which creates:
- `Novels` table (Id, CreatedTime, LastUpdatedTime)
- `Volumes` table (Id, NovelId FK, CreatedTime, LastUpdatedTime)
- `Chapters` table (Id, VolumeId FK, CreatedTime, LastUpdatedTime)
## Execution Order
Run scripts in numeric order:
### Extraction (run against source databases)
1. `01_extract_users_from_userservice.sql` - Run against **UserService** DB
2. `02_extract_novels_from_novelservice.sql` - Run against **NovelService** DB
3. `03_extract_volumes_from_novelservice.sql` - Run against **NovelService** DB
4. `04_extract_chapters_from_novelservice.sql` - Run against **NovelService** DB
### Insertion (run against UserNovelDataService database)
5. `05_insert_users_to_usernoveldataservice.sql`
6. `06_insert_novels_to_usernoveldataservice.sql`
7. `07_insert_volumes_to_usernoveldataservice.sql`
8. `08_insert_chapters_to_usernoveldataservice.sql`
## Methods
Each script provides three options:
1. **SELECT for review** - Review data before export
2. **Generate INSERT statements** - Creates individual INSERT statements (good for small datasets)
3. **CSV export/import** - Use PostgreSQL `\copy` for bulk operations (recommended for large datasets)
## Example Workflow
### Using CSV Export/Import (Recommended)
```bash
# 1. Export from source databases
psql -h localhost -U postgres -d userservice -c "\copy (SELECT \"Id\", \"OAuthProviderId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Users\" WHERE \"Disabled\" = false) TO '/tmp/users_export.csv' WITH CSV HEADER"
psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Novels\") TO '/tmp/novels_export.csv' WITH CSV HEADER"
psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"NovelId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Volume\" ORDER BY \"NovelId\", \"Id\") TO '/tmp/volumes_export.csv' WITH CSV HEADER"
psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"VolumeId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Chapter\" ORDER BY \"VolumeId\", \"Id\") TO '/tmp/chapters_export.csv' WITH CSV HEADER"
# 2. Import into UserNovelDataService (order matters due to FK constraints!)
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Users\" (\"Id\", \"OAuthProviderId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/users_export.csv' WITH CSV HEADER"
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Novels\" (\"Id\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/novels_export.csv' WITH CSV HEADER"
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Volumes\" (\"Id\", \"NovelId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/volumes_export.csv' WITH CSV HEADER"
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Chapters\" (\"Id\", \"VolumeId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/chapters_export.csv' WITH CSV HEADER"
```
**Important**: Insert order matters due to foreign key constraints:
1. Users (no dependencies)
2. Novels (no dependencies)
3. Volumes (depends on Novels)
4. Chapters (depends on Volumes)
### Using dblink (Cross-database queries)
If both databases are on the same PostgreSQL server, you can use `dblink` extension for direct cross-database inserts. See the commented examples in each insert script.
## Verification
After running the backfill, verify counts match:
```sql
-- Run on UserService DB
SELECT COUNT(*) as user_count FROM "Users" WHERE "Disabled" = false;
-- Run on NovelService DB
SELECT COUNT(*) as novel_count FROM "Novels";
SELECT COUNT(*) as volume_count FROM "Volume";
SELECT COUNT(*) as chapter_count FROM "Chapter";
-- Run on UserNovelDataService DB
SELECT COUNT(*) as user_count FROM "Users";
SELECT COUNT(*) as novel_count FROM "Novels";
SELECT COUNT(*) as volume_count FROM "Volumes";
SELECT COUNT(*) as chapter_count FROM "Chapters";
```

View File

@@ -0,0 +1,28 @@
-- Extract Users from UserService database
-- Run this against: UserService PostgreSQL database
-- Output: CSV or use COPY TO for bulk export
-- Option 1: Simple SELECT for review/testing
SELECT
"Id",
"OAuthProviderId",
"CreatedTime",
"LastUpdatedTime"
FROM "Users"
WHERE "Disabled" = false
ORDER BY "CreatedTime";
-- Option 2: Generate INSERT statements (useful for small datasets)
SELECT format(
'INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") VALUES (%L, %L, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
"Id",
"OAuthProviderId",
"CreatedTime",
"LastUpdatedTime"
)
FROM "Users"
WHERE "Disabled" = false
ORDER BY "CreatedTime";
-- Option 3: Export to CSV (run from psql)
-- \copy (SELECT "Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime" FROM "Users" WHERE "Disabled" = false ORDER BY "CreatedTime") TO '/tmp/users_export.csv' WITH CSV HEADER;

View File

@@ -0,0 +1,24 @@
-- Extract Novels from NovelService database
-- Run this against: NovelService PostgreSQL database
-- Output: CSV or use COPY TO for bulk export
-- Option 1: Simple SELECT for review/testing
SELECT
"Id",
"CreatedTime",
"LastUpdatedTime"
FROM "Novels"
ORDER BY "Id";
-- Option 2: Generate INSERT statements
SELECT format(
'INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime") VALUES (%s, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
"Id",
"CreatedTime",
"LastUpdatedTime"
)
FROM "Novels"
ORDER BY "Id";
-- Option 3: Export to CSV (run from psql)
-- \copy (SELECT "Id", "CreatedTime", "LastUpdatedTime" FROM "Novels" ORDER BY "Id") TO '/tmp/novels_export.csv' WITH CSV HEADER;

View File

@@ -0,0 +1,26 @@
-- Extract Volumes from NovelService database
-- Run this against: NovelService PostgreSQL database
-- Output: CSV or use COPY TO for bulk export
-- Option 1: Simple SELECT for review/testing
SELECT
"Id",
"NovelId",
"CreatedTime",
"LastUpdatedTime"
FROM "Volume"
ORDER BY "NovelId", "Id";
-- Option 2: Generate INSERT statements
SELECT format(
'INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") VALUES (%s, %s, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
"Id",
"NovelId",
"CreatedTime",
"LastUpdatedTime"
)
FROM "Volume"
ORDER BY "NovelId", "Id";
-- Option 3: Export to CSV (run from psql)
-- \copy (SELECT "Id", "NovelId", "CreatedTime", "LastUpdatedTime" FROM "Volume" ORDER BY "NovelId", "Id") TO '/tmp/volumes_export.csv' WITH CSV HEADER;

View File

@@ -0,0 +1,26 @@
-- Extract Chapters from NovelService database
-- Run this against: NovelService PostgreSQL database
-- Output: CSV or use COPY TO for bulk export
-- Option 1: Simple SELECT for review/testing
SELECT
"Id",
"VolumeId",
"CreatedTime",
"LastUpdatedTime"
FROM "Chapter"
ORDER BY "VolumeId", "Id";
-- Option 2: Generate INSERT statements
SELECT format(
'INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") VALUES (%s, %s, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
"Id",
"VolumeId",
"CreatedTime",
"LastUpdatedTime"
)
FROM "Chapter"
ORDER BY "VolumeId", "Id";
-- Option 3: Export to CSV (run from psql)
-- \copy (SELECT "Id", "VolumeId", "CreatedTime", "LastUpdatedTime" FROM "Chapter" ORDER BY "VolumeId", "Id") TO '/tmp/chapters_export.csv' WITH CSV HEADER;

View File

@@ -0,0 +1,32 @@
-- Insert Users into UserNovelDataService database
-- Run this against: UserNovelDataService PostgreSQL database
--
-- PREREQUISITE: You must have extracted users from UserService first
-- using 01_extract_users_from_userservice.sql
-- Option 1: If you have a CSV file from export
-- \copy "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/users_export.csv' WITH CSV HEADER;
-- Option 2: Direct cross-database insert using dblink
-- First, install dblink extension if not already done:
-- CREATE EXTENSION IF NOT EXISTS dblink;
-- Example using dblink (adjust connection string):
/*
INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime")
SELECT
"Id"::uuid,
"OAuthProviderId",
"CreatedTime"::timestamp with time zone,
"LastUpdatedTime"::timestamp with time zone
FROM dblink(
'host=localhost port=5432 dbname=userservice user=postgres password=yourpassword',
'SELECT "Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime" FROM "Users" WHERE "Disabled" = false'
) AS t("Id" uuid, "OAuthProviderId" text, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
ON CONFLICT ("Id") DO UPDATE SET
"OAuthProviderId" = EXCLUDED."OAuthProviderId",
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
*/
-- Option 3: Paste generated INSERT statements from extraction script here
-- INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;

View File

@@ -0,0 +1,31 @@
-- Insert Novels into UserNovelDataService database
-- Run this against: UserNovelDataService PostgreSQL database
--
-- PREREQUISITE:
-- 1. Ensure the Novels table exists (run EF migrations first if needed)
-- 2. Extract novels from NovelService using 02_extract_novels_from_novelservice.sql
-- Option 1: If you have a CSV file from export
-- \copy "Novels" ("Id", "CreatedTime", "LastUpdatedTime") FROM '/tmp/novels_export.csv' WITH CSV HEADER;
-- Option 2: Direct cross-database insert using dblink
-- First, install dblink extension if not already done:
-- CREATE EXTENSION IF NOT EXISTS dblink;
-- Example using dblink (adjust connection string):
/*
INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime")
SELECT
"Id"::bigint,
"CreatedTime"::timestamp with time zone,
"LastUpdatedTime"::timestamp with time zone
FROM dblink(
'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword',
'SELECT "Id", "CreatedTime", "LastUpdatedTime" FROM "Novels"'
) AS t("Id" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
ON CONFLICT ("Id") DO UPDATE SET
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
*/
-- Option 3: Paste generated INSERT statements from extraction script here
-- INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;

View File

@@ -0,0 +1,34 @@
-- Insert Volumes into UserNovelDataService database
-- Run this against: UserNovelDataService PostgreSQL database
--
-- PREREQUISITE:
-- 1. Ensure the Volumes table exists (run EF migrations first if needed)
-- 2. Novels must be inserted first (FK constraint)
-- 3. Extract volumes from NovelService using 03_extract_volumes_from_novelservice.sql
-- Option 1: If you have a CSV file from export
-- \copy "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/volumes_export.csv' WITH CSV HEADER;
-- Option 2: Direct cross-database insert using dblink
-- First, install dblink extension if not already done:
-- CREATE EXTENSION IF NOT EXISTS dblink;
-- Example using dblink (adjust connection string):
/*
INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime")
SELECT
"Id"::bigint,
"NovelId"::bigint,
"CreatedTime"::timestamp with time zone,
"LastUpdatedTime"::timestamp with time zone
FROM dblink(
'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword',
'SELECT "Id", "NovelId", "CreatedTime", "LastUpdatedTime" FROM "Volume"'
) AS t("Id" bigint, "NovelId" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
ON CONFLICT ("Id") DO UPDATE SET
"NovelId" = EXCLUDED."NovelId",
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
*/
-- Option 3: Paste generated INSERT statements from extraction script here
-- INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;

View File

@@ -0,0 +1,34 @@
-- Insert Chapters into UserNovelDataService database
-- Run this against: UserNovelDataService PostgreSQL database
--
-- PREREQUISITE:
-- 1. Ensure the Chapters table exists (run EF migrations first if needed)
-- 2. Volumes must be inserted first (FK constraint)
-- 3. Extract chapters from NovelService using 04_extract_chapters_from_novelservice.sql
-- Option 1: If you have a CSV file from export
-- \copy "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/chapters_export.csv' WITH CSV HEADER;
-- Option 2: Direct cross-database insert using dblink
-- First, install dblink extension if not already done:
-- CREATE EXTENSION IF NOT EXISTS dblink;
-- Example using dblink (adjust connection string):
/*
INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime")
SELECT
"Id"::bigint,
"VolumeId"::bigint,
"CreatedTime"::timestamp with time zone,
"LastUpdatedTime"::timestamp with time zone
FROM dblink(
'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword',
'SELECT "Id", "VolumeId", "CreatedTime", "LastUpdatedTime" FROM "Chapter"'
) AS t("Id" bigint, "VolumeId" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
ON CONFLICT ("Id") DO UPDATE SET
"VolumeId" = EXCLUDED."VolumeId",
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
*/
-- Option 3: Paste generated INSERT statements from extraction script here
-- INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;

View File

@@ -0,0 +1,53 @@
using FictionArchive.Service.Shared.Services.EventBus;
using FictionArchive.Service.UserNovelDataService.Models.Database;
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
using Microsoft.EntityFrameworkCore;
namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
public class ChapterCreatedEventHandler : IIntegrationEventHandler<ChapterCreatedEvent>
{
private readonly UserNovelDataServiceDbContext _dbContext;
private readonly ILogger<ChapterCreatedEventHandler> _logger;
public ChapterCreatedEventHandler(
UserNovelDataServiceDbContext dbContext,
ILogger<ChapterCreatedEventHandler> logger)
{
_dbContext = dbContext;
_logger = logger;
}
public async Task Handle(ChapterCreatedEvent @event)
{
// Ensure novel exists
var novelExists = await _dbContext.Novels.AnyAsync(n => n.Id == @event.NovelId);
if (!novelExists)
{
var novel = new Novel { Id = @event.NovelId };
_dbContext.Novels.Add(novel);
}
// Ensure volume exists
var volumeExists = await _dbContext.Volumes.AnyAsync(v => v.Id == @event.VolumeId);
if (!volumeExists)
{
var volume = new Volume { Id = @event.VolumeId };
_dbContext.Volumes.Add(volume);
}
// Create chapter if not exists
var chapterExists = await _dbContext.Chapters.AnyAsync(c => c.Id == @event.ChapterId);
if (chapterExists)
{
_logger.LogDebug("Chapter {ChapterId} already exists, skipping", @event.ChapterId);
return;
}
var chapter = new Chapter { Id = @event.ChapterId };
_dbContext.Chapters.Add(chapter);
await _dbContext.SaveChangesAsync();
_logger.LogInformation("Created chapter stub for {ChapterId} in novel {NovelId}", @event.ChapterId, @event.NovelId);
}
}

View File

@@ -0,0 +1,36 @@
using FictionArchive.Service.Shared.Services.EventBus;
using FictionArchive.Service.UserNovelDataService.Models.Database;
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
using Microsoft.EntityFrameworkCore;
namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
public class NovelCreatedEventHandler : IIntegrationEventHandler<NovelCreatedEvent>
{
private readonly UserNovelDataServiceDbContext _dbContext;
private readonly ILogger<NovelCreatedEventHandler> _logger;
public NovelCreatedEventHandler(
UserNovelDataServiceDbContext dbContext,
ILogger<NovelCreatedEventHandler> logger)
{
_dbContext = dbContext;
_logger = logger;
}
public async Task Handle(NovelCreatedEvent @event)
{
var exists = await _dbContext.Novels.AnyAsync(n => n.Id == @event.NovelId);
if (exists)
{
_logger.LogDebug("Novel {NovelId} already exists, skipping", @event.NovelId);
return;
}
var novel = new Novel { Id = @event.NovelId };
_dbContext.Novels.Add(novel);
await _dbContext.SaveChangesAsync();
_logger.LogInformation("Created novel stub for {NovelId}", @event.NovelId);
}
}

View File

@@ -0,0 +1,40 @@
using FictionArchive.Service.Shared.Services.EventBus;
using FictionArchive.Service.UserNovelDataService.Models.Database;
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
using Microsoft.EntityFrameworkCore;
namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
public class UserInvitedEventHandler : IIntegrationEventHandler<UserInvitedEvent>
{
private readonly UserNovelDataServiceDbContext _dbContext;
private readonly ILogger<UserInvitedEventHandler> _logger;
public UserInvitedEventHandler(
UserNovelDataServiceDbContext dbContext,
ILogger<UserInvitedEventHandler> logger)
{
_dbContext = dbContext;
_logger = logger;
}
public async Task Handle(UserInvitedEvent @event)
{
var exists = await _dbContext.Users.AnyAsync(u => u.Id == @event.InvitedUserId);
if (exists)
{
_logger.LogDebug("User {UserId} already exists, skipping", @event.InvitedUserId);
return;
}
var user = new User
{
Id = @event.InvitedUserId,
OAuthProviderId = @event.InvitedOAuthProviderId
};
_dbContext.Users.Add(user);
await _dbContext.SaveChangesAsync();
_logger.LogInformation("Created user stub for {UserId}", @event.InvitedUserId);
}
}

View File

@@ -8,6 +8,9 @@ public class UserNovelDataServiceDbContext : FictionArchiveDbContext
{
public DbSet<User> Users { get; set; }
public DbSet<Bookmark> Bookmarks { get; set; }
public DbSet<Novel> Novels { get; set; }
public DbSet<Volume> Volumes { get; set; }
public DbSet<Chapter> Chapters { get; set; }
public UserNovelDataServiceDbContext(DbContextOptions options, ILogger<UserNovelDataServiceDbContext> logger) : base(options, logger)
{