Updated lots of stuff, got multi scrape working, need to test not-nullable chapter novel ids with our current model, now supports sqlite and postgres concurrently (and easy add more), need to get it deployed/do auth
Some checks failed
continuous-integration/drone/push Build is failing
Some checks failed
continuous-integration/drone/push Build is failing
This commit is contained in:
@@ -3,12 +3,15 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using DBConnection;
|
||||
using DBConnection.Models;
|
||||
using DBConnection.Repositories;
|
||||
using DBConnection.Repositories.Interfaces;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Shared.Models.DTO;
|
||||
using Treestar.Shared.Models.DBDomain;
|
||||
using Treestar.Shared.Models.DTO;
|
||||
using Treestar.Shared.Models.DTO.Requests;
|
||||
using Treestar.Shared.Models.DTO.Responses;
|
||||
using WebNovelPortalAPI.Exceptions;
|
||||
using WebNovelPortalAPI.Scrapers;
|
||||
|
||||
namespace WebNovelPortalAPI.Controllers
|
||||
@@ -26,6 +29,17 @@ namespace WebNovelPortalAPI.Controllers
|
||||
_novelRepository = novelRepository;
|
||||
}
|
||||
|
||||
private async Task<Novel?> ScrapeNovel(string url)
|
||||
{
|
||||
var scraper = MatchScraper(url);
|
||||
if (scraper == null)
|
||||
{
|
||||
throw new NoMatchingScraperException(url);
|
||||
}
|
||||
var novel = scraper.ScrapeNovel(url);
|
||||
return novel;
|
||||
}
|
||||
|
||||
private IScraper? MatchScraper(string novelUrl)
|
||||
{
|
||||
return _scrapers.FirstOrDefault(i => i.MatchesUrl(novelUrl));
|
||||
@@ -45,27 +59,57 @@ namespace WebNovelPortalAPI.Controllers
|
||||
}
|
||||
|
||||
[HttpPost]
|
||||
[Route("scrapeNovel")]
|
||||
public async Task<IActionResult> ScrapeNovel(ScrapeNovelRequest request)
|
||||
[Route("scrapeNovels")]
|
||||
public async Task<IActionResult> ScrapeNovels(ScrapeNovelsRequest request)
|
||||
{
|
||||
var scraper = MatchScraper(request.NovelUrl);
|
||||
if (scraper == null)
|
||||
var successfulScrapes = new List<Novel>();
|
||||
var failures = new Dictionary<string, Exception>();
|
||||
foreach (var novelUrl in request.NovelUrls)
|
||||
{
|
||||
return BadRequest("Invalid url, no valid scraper configured");
|
||||
try
|
||||
{
|
||||
successfulScrapes.Add(await ScrapeNovel(novelUrl));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
failures[novelUrl] = e;
|
||||
}
|
||||
}
|
||||
|
||||
Novel novel;
|
||||
IEnumerable<Novel> successfulUploads;
|
||||
try
|
||||
{
|
||||
novel = scraper.ScrapeNovel(request.NovelUrl);
|
||||
successfulUploads = await _novelRepository.UpsertMany(successfulScrapes);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
return StatusCode(500, e);
|
||||
}
|
||||
return Ok(new ScrapeNovelsResponse
|
||||
{
|
||||
Failures = failures,
|
||||
SuccessfulNovels = successfulScrapes
|
||||
});
|
||||
}
|
||||
|
||||
var novelUpload = await _novelRepository.Upsert(novel);
|
||||
return Ok(novelUpload);
|
||||
[HttpPost]
|
||||
[Route("scrapeNovel")]
|
||||
public async Task<IActionResult> ScrapeNovel(ScrapeNovelRequest request)
|
||||
{
|
||||
try
|
||||
{
|
||||
var novel = await ScrapeNovel(request.NovelUrl);
|
||||
var dbNovel = await _novelRepository.Upsert(novel);
|
||||
return Ok(dbNovel);
|
||||
}
|
||||
catch (NoMatchingScraperException e)
|
||||
{
|
||||
return BadRequest("Invalid url, no valid scraper configured");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
return StatusCode(500, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user