Compare commits
3 Commits
f8a45ad891
...
c97654631b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c97654631b | ||
|
|
1ecfd9cc99 | ||
|
|
19ae4a8089 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -140,3 +140,6 @@ appsettings.Local.json
|
|||||||
schema.graphql
|
schema.graphql
|
||||||
*.fsp
|
*.fsp
|
||||||
gateway.fgp
|
gateway.fgp
|
||||||
|
|
||||||
|
# Git worktrees
|
||||||
|
.worktrees/
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||||
|
|
||||||
|
public class ChapterCreatedEvent : IIntegrationEvent
|
||||||
|
{
|
||||||
|
public required uint ChapterId { get; init; }
|
||||||
|
public required uint NovelId { get; init; }
|
||||||
|
public required uint VolumeId { get; init; }
|
||||||
|
public required int VolumeOrder { get; init; }
|
||||||
|
public required uint ChapterOrder { get; init; }
|
||||||
|
public required string ChapterTitle { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
using FictionArchive.Common.Enums;
|
||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.NovelService.Models.IntegrationEvents;
|
||||||
|
|
||||||
|
public class NovelCreatedEvent : IIntegrationEvent
|
||||||
|
{
|
||||||
|
public required uint NovelId { get; init; }
|
||||||
|
public required string Title { get; init; }
|
||||||
|
public required Language OriginalLanguage { get; init; }
|
||||||
|
public required string Source { get; init; }
|
||||||
|
public required string AuthorName { get; init; }
|
||||||
|
}
|
||||||
@@ -343,6 +343,12 @@ public class NovelUpdateService
|
|||||||
Novel novel;
|
Novel novel;
|
||||||
bool shouldPublishCoverEvent;
|
bool shouldPublishCoverEvent;
|
||||||
|
|
||||||
|
// Capture existing chapter IDs to detect new chapters later
|
||||||
|
var existingChapterIds = existingNovel?.Volumes
|
||||||
|
.SelectMany(v => v.Chapters)
|
||||||
|
.Select(c => c.Id)
|
||||||
|
.ToHashSet() ?? new HashSet<uint>();
|
||||||
|
|
||||||
if (existingNovel == null)
|
if (existingNovel == null)
|
||||||
{
|
{
|
||||||
// CREATE PATH: New novel
|
// CREATE PATH: New novel
|
||||||
@@ -384,6 +390,36 @@ public class NovelUpdateService
|
|||||||
|
|
||||||
await _dbContext.SaveChangesAsync();
|
await _dbContext.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Publish novel created event for new novels
|
||||||
|
if (existingNovel == null)
|
||||||
|
{
|
||||||
|
await _eventBus.Publish(new NovelCreatedEvent
|
||||||
|
{
|
||||||
|
NovelId = novel.Id,
|
||||||
|
Title = novel.Name.Texts.First(t => t.Language == novel.RawLanguage).Text,
|
||||||
|
OriginalLanguage = novel.RawLanguage,
|
||||||
|
Source = novel.Source.Key,
|
||||||
|
AuthorName = novel.Author.Name.Texts.First(t => t.Language == novel.RawLanguage).Text
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Publish chapter created events for new chapters
|
||||||
|
foreach (var volume in novel.Volumes)
|
||||||
|
{
|
||||||
|
foreach (var chapter in volume.Chapters.Where(c => !existingChapterIds.Contains(c.Id)))
|
||||||
|
{
|
||||||
|
await _eventBus.Publish(new ChapterCreatedEvent
|
||||||
|
{
|
||||||
|
ChapterId = chapter.Id,
|
||||||
|
NovelId = novel.Id,
|
||||||
|
VolumeId = volume.Id,
|
||||||
|
VolumeOrder = volume.Order,
|
||||||
|
ChapterOrder = chapter.Order,
|
||||||
|
ChapterTitle = chapter.Name.Texts.First(t => t.Language == novel.RawLanguage).Text
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Publish cover image event if needed
|
// Publish cover image event if needed
|
||||||
if (shouldPublishCoverEvent && novel.CoverImage != null && metadata.CoverImage != null)
|
if (shouldPublishCoverEvent && novel.CoverImage != null && metadata.CoverImage != null)
|
||||||
{
|
{
|
||||||
|
|||||||
198
FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.Designer.cs
generated
Normal file
198
FictionArchive.Service.UserNovelDataService/Migrations/20260119184741_AddNovelVolumeChapter.Designer.cs
generated
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
// <auto-generated />
|
||||||
|
using System;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Services;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||||
|
using Microsoft.EntityFrameworkCore.Migrations;
|
||||||
|
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||||
|
using NodaTime;
|
||||||
|
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||||
|
|
||||||
|
#nullable disable
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Migrations
|
||||||
|
{
|
||||||
|
[DbContext(typeof(UserNovelDataServiceDbContext))]
|
||||||
|
[Migration("20260119184741_AddNovelVolumeChapter")]
|
||||||
|
partial class AddNovelVolumeChapter
|
||||||
|
{
|
||||||
|
/// <inheritdoc />
|
||||||
|
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||||
|
{
|
||||||
|
#pragma warning disable 612, 618
|
||||||
|
modelBuilder
|
||||||
|
.HasAnnotation("ProductVersion", "9.0.11")
|
||||||
|
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||||
|
|
||||||
|
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b =>
|
||||||
|
{
|
||||||
|
b.Property<int>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("integer");
|
||||||
|
|
||||||
|
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("Id"));
|
||||||
|
|
||||||
|
b.Property<long>("ChapterId")
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<string>("Description")
|
||||||
|
.HasColumnType("text");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<long>("NovelId")
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
b.Property<Guid>("UserId")
|
||||||
|
.HasColumnType("uuid");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.HasIndex("UserId", "ChapterId")
|
||||||
|
.IsUnique();
|
||||||
|
|
||||||
|
b.HasIndex("UserId", "NovelId");
|
||||||
|
|
||||||
|
b.ToTable("Bookmarks");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
|
||||||
|
{
|
||||||
|
b.Property<long>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<long>("VolumeId")
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.HasIndex("VolumeId");
|
||||||
|
|
||||||
|
b.ToTable("Chapters");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
|
||||||
|
{
|
||||||
|
b.Property<long>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.ToTable("Novels");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.User", b =>
|
||||||
|
{
|
||||||
|
b.Property<Guid>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("uuid");
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<string>("OAuthProviderId")
|
||||||
|
.IsRequired()
|
||||||
|
.HasColumnType("text");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.ToTable("Users");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
|
||||||
|
{
|
||||||
|
b.Property<long>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<long>("NovelId")
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.HasIndex("NovelId");
|
||||||
|
|
||||||
|
b.ToTable("Volumes");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b =>
|
||||||
|
{
|
||||||
|
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.User", "User")
|
||||||
|
.WithMany()
|
||||||
|
.HasForeignKey("UserId")
|
||||||
|
.OnDelete(DeleteBehavior.Cascade)
|
||||||
|
.IsRequired();
|
||||||
|
|
||||||
|
b.Navigation("User");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
|
||||||
|
{
|
||||||
|
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", "Volume")
|
||||||
|
.WithMany("Chapters")
|
||||||
|
.HasForeignKey("VolumeId")
|
||||||
|
.OnDelete(DeleteBehavior.Cascade)
|
||||||
|
.IsRequired();
|
||||||
|
|
||||||
|
b.Navigation("Volume");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
|
||||||
|
{
|
||||||
|
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", "Novel")
|
||||||
|
.WithMany("Volumes")
|
||||||
|
.HasForeignKey("NovelId")
|
||||||
|
.OnDelete(DeleteBehavior.Cascade)
|
||||||
|
.IsRequired();
|
||||||
|
|
||||||
|
b.Navigation("Novel");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
|
||||||
|
{
|
||||||
|
b.Navigation("Volumes");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
|
||||||
|
{
|
||||||
|
b.Navigation("Chapters");
|
||||||
|
});
|
||||||
|
#pragma warning restore 612, 618
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,95 @@
|
|||||||
|
using Microsoft.EntityFrameworkCore.Migrations;
|
||||||
|
using NodaTime;
|
||||||
|
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||||
|
|
||||||
|
#nullable disable
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Migrations
|
||||||
|
{
|
||||||
|
/// <inheritdoc />
|
||||||
|
public partial class AddNovelVolumeChapter : Migration
|
||||||
|
{
|
||||||
|
/// <inheritdoc />
|
||||||
|
protected override void Up(MigrationBuilder migrationBuilder)
|
||||||
|
{
|
||||||
|
migrationBuilder.CreateTable(
|
||||||
|
name: "Novels",
|
||||||
|
columns: table => new
|
||||||
|
{
|
||||||
|
Id = table.Column<long>(type: "bigint", nullable: false)
|
||||||
|
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||||
|
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||||
|
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
|
||||||
|
},
|
||||||
|
constraints: table =>
|
||||||
|
{
|
||||||
|
table.PrimaryKey("PK_Novels", x => x.Id);
|
||||||
|
});
|
||||||
|
|
||||||
|
migrationBuilder.CreateTable(
|
||||||
|
name: "Volumes",
|
||||||
|
columns: table => new
|
||||||
|
{
|
||||||
|
Id = table.Column<long>(type: "bigint", nullable: false)
|
||||||
|
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||||
|
NovelId = table.Column<long>(type: "bigint", nullable: false),
|
||||||
|
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||||
|
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
|
||||||
|
},
|
||||||
|
constraints: table =>
|
||||||
|
{
|
||||||
|
table.PrimaryKey("PK_Volumes", x => x.Id);
|
||||||
|
table.ForeignKey(
|
||||||
|
name: "FK_Volumes_Novels_NovelId",
|
||||||
|
column: x => x.NovelId,
|
||||||
|
principalTable: "Novels",
|
||||||
|
principalColumn: "Id",
|
||||||
|
onDelete: ReferentialAction.Cascade);
|
||||||
|
});
|
||||||
|
|
||||||
|
migrationBuilder.CreateTable(
|
||||||
|
name: "Chapters",
|
||||||
|
columns: table => new
|
||||||
|
{
|
||||||
|
Id = table.Column<long>(type: "bigint", nullable: false)
|
||||||
|
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||||
|
VolumeId = table.Column<long>(type: "bigint", nullable: false),
|
||||||
|
CreatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false),
|
||||||
|
LastUpdatedTime = table.Column<Instant>(type: "timestamp with time zone", nullable: false)
|
||||||
|
},
|
||||||
|
constraints: table =>
|
||||||
|
{
|
||||||
|
table.PrimaryKey("PK_Chapters", x => x.Id);
|
||||||
|
table.ForeignKey(
|
||||||
|
name: "FK_Chapters_Volumes_VolumeId",
|
||||||
|
column: x => x.VolumeId,
|
||||||
|
principalTable: "Volumes",
|
||||||
|
principalColumn: "Id",
|
||||||
|
onDelete: ReferentialAction.Cascade);
|
||||||
|
});
|
||||||
|
|
||||||
|
migrationBuilder.CreateIndex(
|
||||||
|
name: "IX_Chapters_VolumeId",
|
||||||
|
table: "Chapters",
|
||||||
|
column: "VolumeId");
|
||||||
|
|
||||||
|
migrationBuilder.CreateIndex(
|
||||||
|
name: "IX_Volumes_NovelId",
|
||||||
|
table: "Volumes",
|
||||||
|
column: "NovelId");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
protected override void Down(MigrationBuilder migrationBuilder)
|
||||||
|
{
|
||||||
|
migrationBuilder.DropTable(
|
||||||
|
name: "Chapters");
|
||||||
|
|
||||||
|
migrationBuilder.DropTable(
|
||||||
|
name: "Volumes");
|
||||||
|
|
||||||
|
migrationBuilder.DropTable(
|
||||||
|
name: "Novels");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -59,6 +59,49 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations
|
|||||||
b.ToTable("Bookmarks");
|
b.ToTable("Bookmarks");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
|
||||||
|
{
|
||||||
|
b.Property<long>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<long>("VolumeId")
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.HasIndex("VolumeId");
|
||||||
|
|
||||||
|
b.ToTable("Chapters");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
|
||||||
|
{
|
||||||
|
b.Property<long>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.ToTable("Novels");
|
||||||
|
});
|
||||||
|
|
||||||
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.User", b =>
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.User", b =>
|
||||||
{
|
{
|
||||||
b.Property<Guid>("Id")
|
b.Property<Guid>("Id")
|
||||||
@@ -80,6 +123,30 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations
|
|||||||
b.ToTable("Users");
|
b.ToTable("Users");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
|
||||||
|
{
|
||||||
|
b.Property<long>("Id")
|
||||||
|
.ValueGeneratedOnAdd()
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<long>("Id"));
|
||||||
|
|
||||||
|
b.Property<Instant>("CreatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<Instant>("LastUpdatedTime")
|
||||||
|
.HasColumnType("timestamp with time zone");
|
||||||
|
|
||||||
|
b.Property<long>("NovelId")
|
||||||
|
.HasColumnType("bigint");
|
||||||
|
|
||||||
|
b.HasKey("Id");
|
||||||
|
|
||||||
|
b.HasIndex("NovelId");
|
||||||
|
|
||||||
|
b.ToTable("Volumes");
|
||||||
|
});
|
||||||
|
|
||||||
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b =>
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Bookmark", b =>
|
||||||
{
|
{
|
||||||
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.User", "User")
|
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.User", "User")
|
||||||
@@ -90,6 +157,38 @@ namespace FictionArchive.Service.UserNovelDataService.Migrations
|
|||||||
|
|
||||||
b.Navigation("User");
|
b.Navigation("User");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Chapter", b =>
|
||||||
|
{
|
||||||
|
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", "Volume")
|
||||||
|
.WithMany("Chapters")
|
||||||
|
.HasForeignKey("VolumeId")
|
||||||
|
.OnDelete(DeleteBehavior.Cascade)
|
||||||
|
.IsRequired();
|
||||||
|
|
||||||
|
b.Navigation("Volume");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
|
||||||
|
{
|
||||||
|
b.HasOne("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", "Novel")
|
||||||
|
.WithMany("Volumes")
|
||||||
|
.HasForeignKey("NovelId")
|
||||||
|
.OnDelete(DeleteBehavior.Cascade)
|
||||||
|
.IsRequired();
|
||||||
|
|
||||||
|
b.Navigation("Novel");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Novel", b =>
|
||||||
|
{
|
||||||
|
b.Navigation("Volumes");
|
||||||
|
});
|
||||||
|
|
||||||
|
modelBuilder.Entity("FictionArchive.Service.UserNovelDataService.Models.Database.Volume", b =>
|
||||||
|
{
|
||||||
|
b.Navigation("Chapters");
|
||||||
|
});
|
||||||
#pragma warning restore 612, 618
|
#pragma warning restore 612, 618
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,5 +4,6 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database;
|
|||||||
|
|
||||||
public class Chapter : BaseEntity<uint>
|
public class Chapter : BaseEntity<uint>
|
||||||
{
|
{
|
||||||
|
public uint VolumeId { get; set; }
|
||||||
|
public virtual Volume Volume { get; set; } = null!;
|
||||||
}
|
}
|
||||||
@@ -4,5 +4,5 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database;
|
|||||||
|
|
||||||
public class Novel : BaseEntity<uint>
|
public class Novel : BaseEntity<uint>
|
||||||
{
|
{
|
||||||
public virtual ICollection<Volume> Volumes { get; set; }
|
public virtual ICollection<Volume> Volumes { get; set; } = new List<Volume>();
|
||||||
}
|
}
|
||||||
@@ -4,5 +4,7 @@ namespace FictionArchive.Service.UserNovelDataService.Models.Database;
|
|||||||
|
|
||||||
public class Volume : BaseEntity<uint>
|
public class Volume : BaseEntity<uint>
|
||||||
{
|
{
|
||||||
public virtual ICollection<Chapter> Chapters { get; set; }
|
public uint NovelId { get; set; }
|
||||||
|
public virtual Novel Novel { get; set; } = null!;
|
||||||
|
public virtual ICollection<Chapter> Chapters { get; set; } = new List<Chapter>();
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
|
||||||
|
|
||||||
|
public class ChapterCreatedEvent : IIntegrationEvent
|
||||||
|
{
|
||||||
|
public required uint ChapterId { get; init; }
|
||||||
|
public required uint NovelId { get; init; }
|
||||||
|
public required uint VolumeId { get; init; }
|
||||||
|
public required int VolumeOrder { get; init; }
|
||||||
|
public required uint ChapterOrder { get; init; }
|
||||||
|
public required string ChapterTitle { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
using FictionArchive.Common.Enums;
|
||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
|
||||||
|
|
||||||
|
public class NovelCreatedEvent : IIntegrationEvent
|
||||||
|
{
|
||||||
|
public required uint NovelId { get; init; }
|
||||||
|
public required string Title { get; init; }
|
||||||
|
public required Language OriginalLanguage { get; init; }
|
||||||
|
public required string Source { get; init; }
|
||||||
|
public required string AuthorName { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
|
||||||
|
|
||||||
|
public class UserInvitedEvent : IIntegrationEvent
|
||||||
|
{
|
||||||
|
public Guid InvitedUserId { get; set; }
|
||||||
|
public required string InvitedUsername { get; set; }
|
||||||
|
public required string InvitedEmail { get; set; }
|
||||||
|
public required string InvitedOAuthProviderId { get; set; }
|
||||||
|
|
||||||
|
public Guid InviterId { get; set; }
|
||||||
|
public required string InviterUsername { get; set; }
|
||||||
|
public required string InviterOAuthProviderId { get; set; }
|
||||||
|
}
|
||||||
@@ -3,7 +3,9 @@ using FictionArchive.Service.Shared;
|
|||||||
using FictionArchive.Service.Shared.Extensions;
|
using FictionArchive.Service.Shared.Extensions;
|
||||||
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
using FictionArchive.Service.Shared.Services.EventBus.Implementations;
|
||||||
using FictionArchive.Service.UserNovelDataService.GraphQL;
|
using FictionArchive.Service.UserNovelDataService.GraphQL;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
|
||||||
using FictionArchive.Service.UserNovelDataService.Services;
|
using FictionArchive.Service.UserNovelDataService.Services;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
|
||||||
|
|
||||||
namespace FictionArchive.Service.UserNovelDataService;
|
namespace FictionArchive.Service.UserNovelDataService;
|
||||||
|
|
||||||
@@ -27,7 +29,10 @@ public class Program
|
|||||||
builder.Services.AddRabbitMQ(opt =>
|
builder.Services.AddRabbitMQ(opt =>
|
||||||
{
|
{
|
||||||
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
builder.Configuration.GetSection("RabbitMQ").Bind(opt);
|
||||||
});
|
})
|
||||||
|
.Subscribe<NovelCreatedEvent, NovelCreatedEventHandler>()
|
||||||
|
.Subscribe<ChapterCreatedEvent, ChapterCreatedEventHandler>()
|
||||||
|
.Subscribe<UserInvitedEvent, UserInvitedEventHandler>();
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|||||||
@@ -0,0 +1,93 @@
|
|||||||
|
# UserNovelDataService Backfill Scripts
|
||||||
|
|
||||||
|
SQL scripts for backfilling data from UserService and NovelService into UserNovelDataService.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
1. **Run EF migrations** on the UserNovelDataService database to ensure all tables exist:
|
||||||
|
```bash
|
||||||
|
dotnet ef database update --project FictionArchive.Service.UserNovelDataService
|
||||||
|
```
|
||||||
|
|
||||||
|
This will apply the `AddNovelVolumeChapter` migration which creates:
|
||||||
|
- `Novels` table (Id, CreatedTime, LastUpdatedTime)
|
||||||
|
- `Volumes` table (Id, NovelId FK, CreatedTime, LastUpdatedTime)
|
||||||
|
- `Chapters` table (Id, VolumeId FK, CreatedTime, LastUpdatedTime)
|
||||||
|
|
||||||
|
## Execution Order
|
||||||
|
|
||||||
|
Run scripts in numeric order:
|
||||||
|
|
||||||
|
### Extraction (run against source databases)
|
||||||
|
1. `01_extract_users_from_userservice.sql` - Run against **UserService** DB
|
||||||
|
2. `02_extract_novels_from_novelservice.sql` - Run against **NovelService** DB
|
||||||
|
3. `03_extract_volumes_from_novelservice.sql` - Run against **NovelService** DB
|
||||||
|
4. `04_extract_chapters_from_novelservice.sql` - Run against **NovelService** DB
|
||||||
|
|
||||||
|
### Insertion (run against UserNovelDataService database)
|
||||||
|
5. `05_insert_users_to_usernoveldataservice.sql`
|
||||||
|
6. `06_insert_novels_to_usernoveldataservice.sql`
|
||||||
|
7. `07_insert_volumes_to_usernoveldataservice.sql`
|
||||||
|
8. `08_insert_chapters_to_usernoveldataservice.sql`
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
Each script provides three options:
|
||||||
|
|
||||||
|
1. **SELECT for review** - Review data before export
|
||||||
|
2. **Generate INSERT statements** - Creates individual INSERT statements (good for small datasets)
|
||||||
|
3. **CSV export/import** - Use PostgreSQL `\copy` for bulk operations (recommended for large datasets)
|
||||||
|
|
||||||
|
## Example Workflow
|
||||||
|
|
||||||
|
### Using CSV Export/Import (Recommended)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Export from source databases
|
||||||
|
psql -h localhost -U postgres -d userservice -c "\copy (SELECT \"Id\", \"OAuthProviderId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Users\" WHERE \"Disabled\" = false) TO '/tmp/users_export.csv' WITH CSV HEADER"
|
||||||
|
|
||||||
|
psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Novels\") TO '/tmp/novels_export.csv' WITH CSV HEADER"
|
||||||
|
|
||||||
|
psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"NovelId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Volume\" ORDER BY \"NovelId\", \"Id\") TO '/tmp/volumes_export.csv' WITH CSV HEADER"
|
||||||
|
|
||||||
|
psql -h localhost -U postgres -d novelservice -c "\copy (SELECT \"Id\", \"VolumeId\", \"CreatedTime\", \"LastUpdatedTime\" FROM \"Chapter\" ORDER BY \"VolumeId\", \"Id\") TO '/tmp/chapters_export.csv' WITH CSV HEADER"
|
||||||
|
|
||||||
|
# 2. Import into UserNovelDataService (order matters due to FK constraints!)
|
||||||
|
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Users\" (\"Id\", \"OAuthProviderId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/users_export.csv' WITH CSV HEADER"
|
||||||
|
|
||||||
|
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Novels\" (\"Id\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/novels_export.csv' WITH CSV HEADER"
|
||||||
|
|
||||||
|
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Volumes\" (\"Id\", \"NovelId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/volumes_export.csv' WITH CSV HEADER"
|
||||||
|
|
||||||
|
psql -h localhost -U postgres -d usernoveldataservice -c "\copy \"Chapters\" (\"Id\", \"VolumeId\", \"CreatedTime\", \"LastUpdatedTime\") FROM '/tmp/chapters_export.csv' WITH CSV HEADER"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Important**: Insert order matters due to foreign key constraints:
|
||||||
|
1. Users (no dependencies)
|
||||||
|
2. Novels (no dependencies)
|
||||||
|
3. Volumes (depends on Novels)
|
||||||
|
4. Chapters (depends on Volumes)
|
||||||
|
|
||||||
|
### Using dblink (Cross-database queries)
|
||||||
|
|
||||||
|
If both databases are on the same PostgreSQL server, you can use `dblink` extension for direct cross-database inserts. See the commented examples in each insert script.
|
||||||
|
|
||||||
|
## Verification
|
||||||
|
|
||||||
|
After running the backfill, verify counts match:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Run on UserService DB
|
||||||
|
SELECT COUNT(*) as user_count FROM "Users" WHERE "Disabled" = false;
|
||||||
|
|
||||||
|
-- Run on NovelService DB
|
||||||
|
SELECT COUNT(*) as novel_count FROM "Novels";
|
||||||
|
SELECT COUNT(*) as volume_count FROM "Volume";
|
||||||
|
SELECT COUNT(*) as chapter_count FROM "Chapter";
|
||||||
|
|
||||||
|
-- Run on UserNovelDataService DB
|
||||||
|
SELECT COUNT(*) as user_count FROM "Users";
|
||||||
|
SELECT COUNT(*) as novel_count FROM "Novels";
|
||||||
|
SELECT COUNT(*) as volume_count FROM "Volumes";
|
||||||
|
SELECT COUNT(*) as chapter_count FROM "Chapters";
|
||||||
|
```
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
-- Extract Users from UserService database
|
||||||
|
-- Run this against: UserService PostgreSQL database
|
||||||
|
-- Output: CSV or use COPY TO for bulk export
|
||||||
|
|
||||||
|
-- Option 1: Simple SELECT for review/testing
|
||||||
|
SELECT
|
||||||
|
"Id",
|
||||||
|
"OAuthProviderId",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
FROM "Users"
|
||||||
|
WHERE "Disabled" = false
|
||||||
|
ORDER BY "CreatedTime";
|
||||||
|
|
||||||
|
-- Option 2: Generate INSERT statements (useful for small datasets)
|
||||||
|
SELECT format(
|
||||||
|
'INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") VALUES (%L, %L, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
|
||||||
|
"Id",
|
||||||
|
"OAuthProviderId",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
)
|
||||||
|
FROM "Users"
|
||||||
|
WHERE "Disabled" = false
|
||||||
|
ORDER BY "CreatedTime";
|
||||||
|
|
||||||
|
-- Option 3: Export to CSV (run from psql)
|
||||||
|
-- \copy (SELECT "Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime" FROM "Users" WHERE "Disabled" = false ORDER BY "CreatedTime") TO '/tmp/users_export.csv' WITH CSV HEADER;
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
-- Extract Novels from NovelService database
|
||||||
|
-- Run this against: NovelService PostgreSQL database
|
||||||
|
-- Output: CSV or use COPY TO for bulk export
|
||||||
|
|
||||||
|
-- Option 1: Simple SELECT for review/testing
|
||||||
|
SELECT
|
||||||
|
"Id",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
FROM "Novels"
|
||||||
|
ORDER BY "Id";
|
||||||
|
|
||||||
|
-- Option 2: Generate INSERT statements
|
||||||
|
SELECT format(
|
||||||
|
'INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime") VALUES (%s, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
|
||||||
|
"Id",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
)
|
||||||
|
FROM "Novels"
|
||||||
|
ORDER BY "Id";
|
||||||
|
|
||||||
|
-- Option 3: Export to CSV (run from psql)
|
||||||
|
-- \copy (SELECT "Id", "CreatedTime", "LastUpdatedTime" FROM "Novels" ORDER BY "Id") TO '/tmp/novels_export.csv' WITH CSV HEADER;
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
-- Extract Volumes from NovelService database
|
||||||
|
-- Run this against: NovelService PostgreSQL database
|
||||||
|
-- Output: CSV or use COPY TO for bulk export
|
||||||
|
|
||||||
|
-- Option 1: Simple SELECT for review/testing
|
||||||
|
SELECT
|
||||||
|
"Id",
|
||||||
|
"NovelId",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
FROM "Volume"
|
||||||
|
ORDER BY "NovelId", "Id";
|
||||||
|
|
||||||
|
-- Option 2: Generate INSERT statements
|
||||||
|
SELECT format(
|
||||||
|
'INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") VALUES (%s, %s, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
|
||||||
|
"Id",
|
||||||
|
"NovelId",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
)
|
||||||
|
FROM "Volume"
|
||||||
|
ORDER BY "NovelId", "Id";
|
||||||
|
|
||||||
|
-- Option 3: Export to CSV (run from psql)
|
||||||
|
-- \copy (SELECT "Id", "NovelId", "CreatedTime", "LastUpdatedTime" FROM "Volume" ORDER BY "NovelId", "Id") TO '/tmp/volumes_export.csv' WITH CSV HEADER;
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
-- Extract Chapters from NovelService database
|
||||||
|
-- Run this against: NovelService PostgreSQL database
|
||||||
|
-- Output: CSV or use COPY TO for bulk export
|
||||||
|
|
||||||
|
-- Option 1: Simple SELECT for review/testing
|
||||||
|
SELECT
|
||||||
|
"Id",
|
||||||
|
"VolumeId",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
FROM "Chapter"
|
||||||
|
ORDER BY "VolumeId", "Id";
|
||||||
|
|
||||||
|
-- Option 2: Generate INSERT statements
|
||||||
|
SELECT format(
|
||||||
|
'INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") VALUES (%s, %s, %L, %L) ON CONFLICT ("Id") DO NOTHING;',
|
||||||
|
"Id",
|
||||||
|
"VolumeId",
|
||||||
|
"CreatedTime",
|
||||||
|
"LastUpdatedTime"
|
||||||
|
)
|
||||||
|
FROM "Chapter"
|
||||||
|
ORDER BY "VolumeId", "Id";
|
||||||
|
|
||||||
|
-- Option 3: Export to CSV (run from psql)
|
||||||
|
-- \copy (SELECT "Id", "VolumeId", "CreatedTime", "LastUpdatedTime" FROM "Chapter" ORDER BY "VolumeId", "Id") TO '/tmp/chapters_export.csv' WITH CSV HEADER;
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
-- Insert Users into UserNovelDataService database
|
||||||
|
-- Run this against: UserNovelDataService PostgreSQL database
|
||||||
|
--
|
||||||
|
-- PREREQUISITE: You must have extracted users from UserService first
|
||||||
|
-- using 01_extract_users_from_userservice.sql
|
||||||
|
|
||||||
|
-- Option 1: If you have a CSV file from export
|
||||||
|
-- \copy "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/users_export.csv' WITH CSV HEADER;
|
||||||
|
|
||||||
|
-- Option 2: Direct cross-database insert using dblink
|
||||||
|
-- First, install dblink extension if not already done:
|
||||||
|
-- CREATE EXTENSION IF NOT EXISTS dblink;
|
||||||
|
|
||||||
|
-- Example using dblink (adjust connection string):
|
||||||
|
/*
|
||||||
|
INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime")
|
||||||
|
SELECT
|
||||||
|
"Id"::uuid,
|
||||||
|
"OAuthProviderId",
|
||||||
|
"CreatedTime"::timestamp with time zone,
|
||||||
|
"LastUpdatedTime"::timestamp with time zone
|
||||||
|
FROM dblink(
|
||||||
|
'host=localhost port=5432 dbname=userservice user=postgres password=yourpassword',
|
||||||
|
'SELECT "Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime" FROM "Users" WHERE "Disabled" = false'
|
||||||
|
) AS t("Id" uuid, "OAuthProviderId" text, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
|
||||||
|
ON CONFLICT ("Id") DO UPDATE SET
|
||||||
|
"OAuthProviderId" = EXCLUDED."OAuthProviderId",
|
||||||
|
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
|
||||||
|
*/
|
||||||
|
|
||||||
|
-- Option 3: Paste generated INSERT statements from extraction script here
|
||||||
|
-- INSERT INTO "Users" ("Id", "OAuthProviderId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
-- Insert Novels into UserNovelDataService database
|
||||||
|
-- Run this against: UserNovelDataService PostgreSQL database
|
||||||
|
--
|
||||||
|
-- PREREQUISITE:
|
||||||
|
-- 1. Ensure the Novels table exists (run EF migrations first if needed)
|
||||||
|
-- 2. Extract novels from NovelService using 02_extract_novels_from_novelservice.sql
|
||||||
|
|
||||||
|
-- Option 1: If you have a CSV file from export
|
||||||
|
-- \copy "Novels" ("Id", "CreatedTime", "LastUpdatedTime") FROM '/tmp/novels_export.csv' WITH CSV HEADER;
|
||||||
|
|
||||||
|
-- Option 2: Direct cross-database insert using dblink
|
||||||
|
-- First, install dblink extension if not already done:
|
||||||
|
-- CREATE EXTENSION IF NOT EXISTS dblink;
|
||||||
|
|
||||||
|
-- Example using dblink (adjust connection string):
|
||||||
|
/*
|
||||||
|
INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime")
|
||||||
|
SELECT
|
||||||
|
"Id"::bigint,
|
||||||
|
"CreatedTime"::timestamp with time zone,
|
||||||
|
"LastUpdatedTime"::timestamp with time zone
|
||||||
|
FROM dblink(
|
||||||
|
'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword',
|
||||||
|
'SELECT "Id", "CreatedTime", "LastUpdatedTime" FROM "Novels"'
|
||||||
|
) AS t("Id" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
|
||||||
|
ON CONFLICT ("Id") DO UPDATE SET
|
||||||
|
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
|
||||||
|
*/
|
||||||
|
|
||||||
|
-- Option 3: Paste generated INSERT statements from extraction script here
|
||||||
|
-- INSERT INTO "Novels" ("Id", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
-- Insert Volumes into UserNovelDataService database
|
||||||
|
-- Run this against: UserNovelDataService PostgreSQL database
|
||||||
|
--
|
||||||
|
-- PREREQUISITE:
|
||||||
|
-- 1. Ensure the Volumes table exists (run EF migrations first if needed)
|
||||||
|
-- 2. Novels must be inserted first (FK constraint)
|
||||||
|
-- 3. Extract volumes from NovelService using 03_extract_volumes_from_novelservice.sql
|
||||||
|
|
||||||
|
-- Option 1: If you have a CSV file from export
|
||||||
|
-- \copy "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/volumes_export.csv' WITH CSV HEADER;
|
||||||
|
|
||||||
|
-- Option 2: Direct cross-database insert using dblink
|
||||||
|
-- First, install dblink extension if not already done:
|
||||||
|
-- CREATE EXTENSION IF NOT EXISTS dblink;
|
||||||
|
|
||||||
|
-- Example using dblink (adjust connection string):
|
||||||
|
/*
|
||||||
|
INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime")
|
||||||
|
SELECT
|
||||||
|
"Id"::bigint,
|
||||||
|
"NovelId"::bigint,
|
||||||
|
"CreatedTime"::timestamp with time zone,
|
||||||
|
"LastUpdatedTime"::timestamp with time zone
|
||||||
|
FROM dblink(
|
||||||
|
'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword',
|
||||||
|
'SELECT "Id", "NovelId", "CreatedTime", "LastUpdatedTime" FROM "Volume"'
|
||||||
|
) AS t("Id" bigint, "NovelId" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
|
||||||
|
ON CONFLICT ("Id") DO UPDATE SET
|
||||||
|
"NovelId" = EXCLUDED."NovelId",
|
||||||
|
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
|
||||||
|
*/
|
||||||
|
|
||||||
|
-- Option 3: Paste generated INSERT statements from extraction script here
|
||||||
|
-- INSERT INTO "Volumes" ("Id", "NovelId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
-- Insert Chapters into UserNovelDataService database
|
||||||
|
-- Run this against: UserNovelDataService PostgreSQL database
|
||||||
|
--
|
||||||
|
-- PREREQUISITE:
|
||||||
|
-- 1. Ensure the Chapters table exists (run EF migrations first if needed)
|
||||||
|
-- 2. Volumes must be inserted first (FK constraint)
|
||||||
|
-- 3. Extract chapters from NovelService using 04_extract_chapters_from_novelservice.sql
|
||||||
|
|
||||||
|
-- Option 1: If you have a CSV file from export
|
||||||
|
-- \copy "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") FROM '/tmp/chapters_export.csv' WITH CSV HEADER;
|
||||||
|
|
||||||
|
-- Option 2: Direct cross-database insert using dblink
|
||||||
|
-- First, install dblink extension if not already done:
|
||||||
|
-- CREATE EXTENSION IF NOT EXISTS dblink;
|
||||||
|
|
||||||
|
-- Example using dblink (adjust connection string):
|
||||||
|
/*
|
||||||
|
INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime")
|
||||||
|
SELECT
|
||||||
|
"Id"::bigint,
|
||||||
|
"VolumeId"::bigint,
|
||||||
|
"CreatedTime"::timestamp with time zone,
|
||||||
|
"LastUpdatedTime"::timestamp with time zone
|
||||||
|
FROM dblink(
|
||||||
|
'host=localhost port=5432 dbname=novelservice user=postgres password=yourpassword',
|
||||||
|
'SELECT "Id", "VolumeId", "CreatedTime", "LastUpdatedTime" FROM "Chapter"'
|
||||||
|
) AS t("Id" bigint, "VolumeId" bigint, "CreatedTime" timestamp with time zone, "LastUpdatedTime" timestamp with time zone)
|
||||||
|
ON CONFLICT ("Id") DO UPDATE SET
|
||||||
|
"VolumeId" = EXCLUDED."VolumeId",
|
||||||
|
"LastUpdatedTime" = EXCLUDED."LastUpdatedTime";
|
||||||
|
*/
|
||||||
|
|
||||||
|
-- Option 3: Paste generated INSERT statements from extraction script here
|
||||||
|
-- INSERT INTO "Chapters" ("Id", "VolumeId", "CreatedTime", "LastUpdatedTime") VALUES (...) ON CONFLICT ("Id") DO NOTHING;
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Models.Database;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
|
||||||
|
|
||||||
|
public class ChapterCreatedEventHandler : IIntegrationEventHandler<ChapterCreatedEvent>
|
||||||
|
{
|
||||||
|
private readonly UserNovelDataServiceDbContext _dbContext;
|
||||||
|
private readonly ILogger<ChapterCreatedEventHandler> _logger;
|
||||||
|
|
||||||
|
public ChapterCreatedEventHandler(
|
||||||
|
UserNovelDataServiceDbContext dbContext,
|
||||||
|
ILogger<ChapterCreatedEventHandler> logger)
|
||||||
|
{
|
||||||
|
_dbContext = dbContext;
|
||||||
|
_logger = logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task Handle(ChapterCreatedEvent @event)
|
||||||
|
{
|
||||||
|
// Ensure novel exists
|
||||||
|
var novelExists = await _dbContext.Novels.AnyAsync(n => n.Id == @event.NovelId);
|
||||||
|
if (!novelExists)
|
||||||
|
{
|
||||||
|
var novel = new Novel { Id = @event.NovelId };
|
||||||
|
_dbContext.Novels.Add(novel);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure volume exists
|
||||||
|
var volumeExists = await _dbContext.Volumes.AnyAsync(v => v.Id == @event.VolumeId);
|
||||||
|
if (!volumeExists)
|
||||||
|
{
|
||||||
|
var volume = new Volume { Id = @event.VolumeId };
|
||||||
|
_dbContext.Volumes.Add(volume);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create chapter if not exists
|
||||||
|
var chapterExists = await _dbContext.Chapters.AnyAsync(c => c.Id == @event.ChapterId);
|
||||||
|
if (chapterExists)
|
||||||
|
{
|
||||||
|
_logger.LogDebug("Chapter {ChapterId} already exists, skipping", @event.ChapterId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var chapter = new Chapter { Id = @event.ChapterId };
|
||||||
|
_dbContext.Chapters.Add(chapter);
|
||||||
|
await _dbContext.SaveChangesAsync();
|
||||||
|
|
||||||
|
_logger.LogInformation("Created chapter stub for {ChapterId} in novel {NovelId}", @event.ChapterId, @event.NovelId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Models.Database;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
|
||||||
|
|
||||||
|
public class NovelCreatedEventHandler : IIntegrationEventHandler<NovelCreatedEvent>
|
||||||
|
{
|
||||||
|
private readonly UserNovelDataServiceDbContext _dbContext;
|
||||||
|
private readonly ILogger<NovelCreatedEventHandler> _logger;
|
||||||
|
|
||||||
|
public NovelCreatedEventHandler(
|
||||||
|
UserNovelDataServiceDbContext dbContext,
|
||||||
|
ILogger<NovelCreatedEventHandler> logger)
|
||||||
|
{
|
||||||
|
_dbContext = dbContext;
|
||||||
|
_logger = logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task Handle(NovelCreatedEvent @event)
|
||||||
|
{
|
||||||
|
var exists = await _dbContext.Novels.AnyAsync(n => n.Id == @event.NovelId);
|
||||||
|
if (exists)
|
||||||
|
{
|
||||||
|
_logger.LogDebug("Novel {NovelId} already exists, skipping", @event.NovelId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var novel = new Novel { Id = @event.NovelId };
|
||||||
|
_dbContext.Novels.Add(novel);
|
||||||
|
await _dbContext.SaveChangesAsync();
|
||||||
|
|
||||||
|
_logger.LogInformation("Created novel stub for {NovelId}", @event.NovelId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
using FictionArchive.Service.Shared.Services.EventBus;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Models.Database;
|
||||||
|
using FictionArchive.Service.UserNovelDataService.Models.IntegrationEvents;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
||||||
|
namespace FictionArchive.Service.UserNovelDataService.Services.EventHandlers;
|
||||||
|
|
||||||
|
public class UserInvitedEventHandler : IIntegrationEventHandler<UserInvitedEvent>
|
||||||
|
{
|
||||||
|
private readonly UserNovelDataServiceDbContext _dbContext;
|
||||||
|
private readonly ILogger<UserInvitedEventHandler> _logger;
|
||||||
|
|
||||||
|
public UserInvitedEventHandler(
|
||||||
|
UserNovelDataServiceDbContext dbContext,
|
||||||
|
ILogger<UserInvitedEventHandler> logger)
|
||||||
|
{
|
||||||
|
_dbContext = dbContext;
|
||||||
|
_logger = logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task Handle(UserInvitedEvent @event)
|
||||||
|
{
|
||||||
|
var exists = await _dbContext.Users.AnyAsync(u => u.Id == @event.InvitedUserId);
|
||||||
|
if (exists)
|
||||||
|
{
|
||||||
|
_logger.LogDebug("User {UserId} already exists, skipping", @event.InvitedUserId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var user = new User
|
||||||
|
{
|
||||||
|
Id = @event.InvitedUserId,
|
||||||
|
OAuthProviderId = @event.InvitedOAuthProviderId
|
||||||
|
};
|
||||||
|
_dbContext.Users.Add(user);
|
||||||
|
await _dbContext.SaveChangesAsync();
|
||||||
|
|
||||||
|
_logger.LogInformation("Created user stub for {UserId}", @event.InvitedUserId);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,9 @@ public class UserNovelDataServiceDbContext : FictionArchiveDbContext
|
|||||||
{
|
{
|
||||||
public DbSet<User> Users { get; set; }
|
public DbSet<User> Users { get; set; }
|
||||||
public DbSet<Bookmark> Bookmarks { get; set; }
|
public DbSet<Bookmark> Bookmarks { get; set; }
|
||||||
|
public DbSet<Novel> Novels { get; set; }
|
||||||
|
public DbSet<Volume> Volumes { get; set; }
|
||||||
|
public DbSet<Chapter> Chapters { get; set; }
|
||||||
|
|
||||||
public UserNovelDataServiceDbContext(DbContextOptions options, ILogger<UserNovelDataServiceDbContext> logger) : base(options, logger)
|
public UserNovelDataServiceDbContext(DbContextOptions options, ILogger<UserNovelDataServiceDbContext> logger) : base(options, logger)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -0,0 +1,181 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { Button } from '$lib/components/ui/button';
|
||||||
|
import { Popover, PopoverTrigger, PopoverContent } from '$lib/components/ui/popover';
|
||||||
|
import { Textarea } from '$lib/components/ui/textarea';
|
||||||
|
import { client } from '$lib/graphql/client';
|
||||||
|
import { UpsertBookmarkDocument, RemoveBookmarkDocument } from '$lib/graphql/__generated__/graphql';
|
||||||
|
import Bookmark from '@lucide/svelte/icons/bookmark';
|
||||||
|
import BookmarkCheck from '@lucide/svelte/icons/bookmark-check';
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
novelId: number;
|
||||||
|
chapterId: number;
|
||||||
|
isBookmarked?: boolean;
|
||||||
|
bookmarkDescription?: string | null;
|
||||||
|
size?: 'default' | 'sm' | 'icon';
|
||||||
|
onBookmarkChange?: (isBookmarked: boolean, description?: string | null) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
let {
|
||||||
|
novelId,
|
||||||
|
chapterId,
|
||||||
|
isBookmarked = false,
|
||||||
|
bookmarkDescription = null,
|
||||||
|
size = 'icon',
|
||||||
|
onBookmarkChange
|
||||||
|
}: Props = $props();
|
||||||
|
|
||||||
|
// Bookmark state
|
||||||
|
let popoverOpen = $state(false);
|
||||||
|
let description = $state(bookmarkDescription ?? '');
|
||||||
|
let saving = $state(false);
|
||||||
|
let removing = $state(false);
|
||||||
|
let error: string | null = $state(null);
|
||||||
|
|
||||||
|
// Reset description when popover opens
|
||||||
|
$effect(() => {
|
||||||
|
if (popoverOpen) {
|
||||||
|
description = bookmarkDescription ?? '';
|
||||||
|
error = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
async function saveBookmark() {
|
||||||
|
saving = true;
|
||||||
|
error = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await client
|
||||||
|
.mutation(UpsertBookmarkDocument, {
|
||||||
|
input: {
|
||||||
|
chapterId,
|
||||||
|
novelId,
|
||||||
|
description: description.trim() || null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.toPromise();
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
error = result.error.message;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.data?.upsertBookmark?.errors?.length) {
|
||||||
|
error = result.data.upsertBookmark.errors[0]?.message ?? 'Failed to save bookmark';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.data?.upsertBookmark?.bookmarkPayload?.success) {
|
||||||
|
popoverOpen = false;
|
||||||
|
onBookmarkChange?.(true, description.trim() || null);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
error = e instanceof Error ? e.message : 'Failed to save bookmark';
|
||||||
|
} finally {
|
||||||
|
saving = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeBookmark() {
|
||||||
|
removing = true;
|
||||||
|
error = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await client
|
||||||
|
.mutation(RemoveBookmarkDocument, {
|
||||||
|
input: { chapterId }
|
||||||
|
})
|
||||||
|
.toPromise();
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
error = result.error.message;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.data?.removeBookmark?.errors?.length) {
|
||||||
|
error = result.data.removeBookmark.errors[0]?.message ?? 'Failed to remove bookmark';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.data?.removeBookmark?.bookmarkPayload?.success) {
|
||||||
|
popoverOpen = false;
|
||||||
|
description = '';
|
||||||
|
onBookmarkChange?.(false, null);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
error = e instanceof Error ? e.message : 'Failed to remove bookmark';
|
||||||
|
} finally {
|
||||||
|
removing = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleClick(e: MouseEvent) {
|
||||||
|
e.preventDefault();
|
||||||
|
e.stopPropagation();
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
||||||
|
<!-- svelte-ignore a11y_no_static_element_interactions -->
|
||||||
|
<div onclick={handleClick}>
|
||||||
|
<Popover bind:open={popoverOpen}>
|
||||||
|
<PopoverTrigger asChild>
|
||||||
|
{#snippet child({ props })}
|
||||||
|
<Button
|
||||||
|
variant={isBookmarked ? 'default' : 'ghost'}
|
||||||
|
{size}
|
||||||
|
class={size === 'icon' ? 'h-8 w-8' : 'gap-2'}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
{#if isBookmarked}
|
||||||
|
<BookmarkCheck class="h-4 w-4" />
|
||||||
|
{:else}
|
||||||
|
<Bookmark class="h-4 w-4" />
|
||||||
|
{/if}
|
||||||
|
{#if size !== 'icon'}
|
||||||
|
<span>{isBookmarked ? 'Bookmarked' : 'Bookmark'}</span>
|
||||||
|
{/if}
|
||||||
|
</Button>
|
||||||
|
{/snippet}
|
||||||
|
</PopoverTrigger>
|
||||||
|
<PopoverContent class="w-80">
|
||||||
|
<div class="space-y-4">
|
||||||
|
<div class="space-y-2">
|
||||||
|
<h4 class="font-medium leading-none">
|
||||||
|
{isBookmarked ? 'Edit bookmark' : 'Bookmark this chapter'}
|
||||||
|
</h4>
|
||||||
|
<p class="text-sm text-muted-foreground">
|
||||||
|
{isBookmarked ? 'Update your note or remove the bookmark.' : 'Add an optional note to remember why you bookmarked this.'}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Textarea
|
||||||
|
bind:value={description}
|
||||||
|
placeholder="Add a note..."
|
||||||
|
class="min-h-[80px] resize-none"
|
||||||
|
/>
|
||||||
|
{#if error}
|
||||||
|
<p class="text-sm text-destructive">{error}</p>
|
||||||
|
{/if}
|
||||||
|
<div class="flex justify-end gap-2">
|
||||||
|
{#if isBookmarked}
|
||||||
|
<Button
|
||||||
|
variant="destructive"
|
||||||
|
size="sm"
|
||||||
|
onclick={removeBookmark}
|
||||||
|
disabled={removing || saving}
|
||||||
|
>
|
||||||
|
{removing ? 'Removing...' : 'Remove'}
|
||||||
|
</Button>
|
||||||
|
{/if}
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
onclick={saveBookmark}
|
||||||
|
disabled={saving || removing}
|
||||||
|
>
|
||||||
|
{saving ? 'Saving...' : 'Save'}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</PopoverContent>
|
||||||
|
</Popover>
|
||||||
|
</div>
|
||||||
@@ -54,6 +54,7 @@
|
|||||||
} from '$lib/components/ui/tooltip';
|
} from '$lib/components/ui/tooltip';
|
||||||
import { formatRelativeTime, formatAbsoluteTime } from '$lib/utils/time';
|
import { formatRelativeTime, formatAbsoluteTime } from '$lib/utils/time';
|
||||||
import { sanitizeHtml } from '$lib/utils/sanitize';
|
import { sanitizeHtml } from '$lib/utils/sanitize';
|
||||||
|
import ChapterBookmarkButton from './ChapterBookmarkButton.svelte';
|
||||||
// Direct imports for faster builds
|
// Direct imports for faster builds
|
||||||
import ArrowLeft from '@lucide/svelte/icons/arrow-left';
|
import ArrowLeft from '@lucide/svelte/icons/arrow-left';
|
||||||
import ExternalLink from '@lucide/svelte/icons/external-link';
|
import ExternalLink from '@lucide/svelte/icons/external-link';
|
||||||
@@ -144,6 +145,32 @@
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Bookmark lookup by chapterId for quick access in chapter list
|
||||||
|
const bookmarkLookup = $derived(
|
||||||
|
new Map(bookmarks.map((b) => [b.chapterId, b]))
|
||||||
|
);
|
||||||
|
|
||||||
|
function handleChapterBookmarkChange(chapterId: number, isBookmarked: boolean, description?: string | null) {
|
||||||
|
if (isBookmarked) {
|
||||||
|
// Add or update bookmark in local state
|
||||||
|
const existingIndex = bookmarks.findIndex((b) => b.chapterId === chapterId);
|
||||||
|
const newBookmark = {
|
||||||
|
id: existingIndex >= 0 ? bookmarks[existingIndex].id : -1, // temp id
|
||||||
|
chapterId,
|
||||||
|
description: description ?? null,
|
||||||
|
createdTime: new Date().toISOString()
|
||||||
|
};
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
bookmarks[existingIndex] = newBookmark;
|
||||||
|
} else {
|
||||||
|
bookmarks = [...bookmarks, newBookmark];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Remove bookmark from local state
|
||||||
|
bookmarks = bookmarks.filter((b) => b.chapterId !== chapterId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const chapterCount = $derived(
|
const chapterCount = $derived(
|
||||||
sortedVolumes.reduce((sum, v) => sum + v.chapters.length, 0)
|
sortedVolumes.reduce((sum, v) => sum + v.chapters.length, 0)
|
||||||
);
|
);
|
||||||
@@ -200,9 +227,9 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Load bookmarks when tab is first activated
|
// Load bookmarks when novel is loaded (for count display)
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
if (activeTab === 'bookmarks' && !bookmarksLoaded && novelId) {
|
if (novel && !bookmarksLoaded && novelId) {
|
||||||
fetchBookmarks();
|
fetchBookmarks();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -591,24 +618,36 @@
|
|||||||
<div class="max-h-96 overflow-y-auto -mx-2">
|
<div class="max-h-96 overflow-y-auto -mx-2">
|
||||||
{#each singleVolumeChapters as chapter (chapter.id)}
|
{#each singleVolumeChapters as chapter (chapter.id)}
|
||||||
{@const chapterDate = chapter.lastUpdatedTime ? new Date(chapter.lastUpdatedTime) : null}
|
{@const chapterDate = chapter.lastUpdatedTime ? new Date(chapter.lastUpdatedTime) : null}
|
||||||
<a
|
{@const chapterBookmark = bookmarkLookup.get(chapter.id)}
|
||||||
href="/novels/{novelId}/volumes/{sortedVolumes[0]?.order}/chapters/{chapter.order}"
|
<div class="flex items-center px-3 py-2.5 hover:bg-muted/50 rounded-md transition-colors group">
|
||||||
class="flex items-center justify-between px-3 py-2.5 hover:bg-muted/50 rounded-md transition-colors group"
|
<a
|
||||||
>
|
href="/novels/{novelId}/volumes/{sortedVolumes[0]?.order}/chapters/{chapter.order}"
|
||||||
<div class="flex items-center gap-3 min-w-0">
|
class="flex items-center gap-3 min-w-0 flex-1"
|
||||||
|
>
|
||||||
<span class="text-muted-foreground text-sm font-medium shrink-0 w-14">
|
<span class="text-muted-foreground text-sm font-medium shrink-0 w-14">
|
||||||
Ch. {chapter.order}
|
Ch. {chapter.order}
|
||||||
</span>
|
</span>
|
||||||
<span class="text-sm truncate group-hover:text-primary transition-colors">
|
<span class="text-sm truncate group-hover:text-primary transition-colors">
|
||||||
{chapter.name}
|
{chapter.name}
|
||||||
</span>
|
</span>
|
||||||
|
</a>
|
||||||
|
<div class="flex items-center gap-2 shrink-0 ml-2">
|
||||||
|
{#if chapterDate}
|
||||||
|
<span class="text-xs text-muted-foreground/70">
|
||||||
|
{formatRelativeTime(chapterDate)}
|
||||||
|
</span>
|
||||||
|
{/if}
|
||||||
|
{#if novelId}
|
||||||
|
<ChapterBookmarkButton
|
||||||
|
novelId={parseInt(novelId, 10)}
|
||||||
|
chapterId={chapter.id}
|
||||||
|
isBookmarked={!!chapterBookmark}
|
||||||
|
bookmarkDescription={chapterBookmark?.description}
|
||||||
|
onBookmarkChange={(isBookmarked, description) => handleChapterBookmarkChange(chapter.id, isBookmarked, description)}
|
||||||
|
/>
|
||||||
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{#if chapterDate}
|
</div>
|
||||||
<span class="text-xs text-muted-foreground/70 shrink-0 ml-2">
|
|
||||||
{formatRelativeTime(chapterDate)}
|
|
||||||
</span>
|
|
||||||
{/if}
|
|
||||||
</a>
|
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
@@ -630,24 +669,36 @@
|
|||||||
<div class="space-y-0.5">
|
<div class="space-y-0.5">
|
||||||
{#each volumeChapters as chapter (chapter.id)}
|
{#each volumeChapters as chapter (chapter.id)}
|
||||||
{@const chapterDate = chapter.lastUpdatedTime ? new Date(chapter.lastUpdatedTime) : null}
|
{@const chapterDate = chapter.lastUpdatedTime ? new Date(chapter.lastUpdatedTime) : null}
|
||||||
<a
|
{@const chapterBookmark = bookmarkLookup.get(chapter.id)}
|
||||||
href="/novels/{novelId}/volumes/{volume.order}/chapters/{chapter.order}"
|
<div class="flex items-center px-3 py-2.5 hover:bg-muted/50 rounded-md transition-colors group">
|
||||||
class="flex items-center justify-between px-3 py-2.5 hover:bg-muted/50 rounded-md transition-colors group"
|
<a
|
||||||
>
|
href="/novels/{novelId}/volumes/{volume.order}/chapters/{chapter.order}"
|
||||||
<div class="flex items-center gap-3 min-w-0">
|
class="flex items-center gap-3 min-w-0 flex-1"
|
||||||
|
>
|
||||||
<span class="text-muted-foreground text-sm font-medium shrink-0 w-14">
|
<span class="text-muted-foreground text-sm font-medium shrink-0 w-14">
|
||||||
Ch. {chapter.order}
|
Ch. {chapter.order}
|
||||||
</span>
|
</span>
|
||||||
<span class="text-sm truncate group-hover:text-primary transition-colors">
|
<span class="text-sm truncate group-hover:text-primary transition-colors">
|
||||||
{chapter.name}
|
{chapter.name}
|
||||||
</span>
|
</span>
|
||||||
|
</a>
|
||||||
|
<div class="flex items-center gap-2 shrink-0 ml-2">
|
||||||
|
{#if chapterDate}
|
||||||
|
<span class="text-xs text-muted-foreground/70">
|
||||||
|
{formatRelativeTime(chapterDate)}
|
||||||
|
</span>
|
||||||
|
{/if}
|
||||||
|
{#if novelId}
|
||||||
|
<ChapterBookmarkButton
|
||||||
|
novelId={parseInt(novelId, 10)}
|
||||||
|
chapterId={chapter.id}
|
||||||
|
isBookmarked={!!chapterBookmark}
|
||||||
|
bookmarkDescription={chapterBookmark?.description}
|
||||||
|
onBookmarkChange={(isBookmarked, description) => handleChapterBookmarkChange(chapter.id, isBookmarked, description)}
|
||||||
|
/>
|
||||||
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{#if chapterDate}
|
</div>
|
||||||
<span class="text-xs text-muted-foreground/70 shrink-0 ml-2">
|
|
||||||
{formatRelativeTime(chapterDate)}
|
|
||||||
</span>
|
|
||||||
{/if}
|
|
||||||
</a>
|
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
</AccordionContent>
|
</AccordionContent>
|
||||||
|
|||||||
Reference in New Issue
Block a user