mirror of
https://github.com/hkalexling/Mango.git
synced 2025-08-02 10:55:30 -04:00
Plugin downloader WIP
This commit is contained in:
parent
7e4532fb14
commit
a994c43857
@ -2,6 +2,17 @@ require "./api"
|
||||
require "zip"
|
||||
|
||||
module MangaDex
|
||||
class PageJob
|
||||
property success = false
|
||||
property url : String
|
||||
property filename : String
|
||||
property writer : Zip::Writer
|
||||
property tries_remaning : Int32
|
||||
|
||||
def initialize(@url, @filename, @writer, @tries_remaning)
|
||||
end
|
||||
end
|
||||
|
||||
class Downloader < Queue::Downloader
|
||||
@wait_seconds : Int32 = Config.current.mangadex["download_wait_seconds"]
|
||||
.to_i32
|
||||
@ -10,31 +21,17 @@ module MangaDex
|
||||
use_default
|
||||
|
||||
def initialize
|
||||
super
|
||||
@api = API.default
|
||||
|
||||
spawn do
|
||||
loop do
|
||||
sleep 1.second
|
||||
next if @stopped || @downloading
|
||||
begin
|
||||
job = pop
|
||||
next if job.nil?
|
||||
download job
|
||||
rescue e
|
||||
Logger.error e
|
||||
end
|
||||
end
|
||||
end
|
||||
super
|
||||
end
|
||||
|
||||
def pop : Queue::Job?
|
||||
job = nil
|
||||
DB.open "sqlite3://#{@queue.path}" do |db|
|
||||
begin
|
||||
db.query_one "select * from queue where id not like '%-%' and " \
|
||||
"(status = 0 or status = 1) order by time limit 1" \
|
||||
do |res|
|
||||
db.query_one "select * from queue where id not like '%-%' " \
|
||||
"and (status = 0 or status = 1) " \
|
||||
"order by time limit 1" do |res|
|
||||
job = Queue::Job.from_query_result res
|
||||
end
|
||||
rescue
|
||||
@ -72,13 +69,13 @@ module MangaDex
|
||||
|
||||
writer = Zip::Writer.new zip_path
|
||||
# Create a buffered channel. It works as an FIFO queue
|
||||
channel = Channel(Queue::PageJob).new chapter.pages.size
|
||||
channel = Channel(PageJob).new chapter.pages.size
|
||||
spawn do
|
||||
chapter.pages.each_with_index do |tuple, i|
|
||||
fn, url = tuple
|
||||
ext = File.extname fn
|
||||
fn = "#{i.to_s.rjust len, '0'}#{ext}"
|
||||
page_job = Queue::PageJob.new url, fn, writer, @retries
|
||||
page_job = PageJob.new url, fn, writer, @retries
|
||||
Logger.debug "Downloading #{url}"
|
||||
loop do
|
||||
sleep @wait_seconds.seconds
|
||||
@ -96,7 +93,7 @@ module MangaDex
|
||||
end
|
||||
|
||||
spawn do
|
||||
page_jobs = [] of Queue::PageJob
|
||||
page_jobs = [] of PageJob
|
||||
chapter.pages.size.times do
|
||||
page_job = channel.receive
|
||||
Logger.debug "[#{page_job.success ? "success" : "failed"}] " \
|
||||
@ -134,7 +131,7 @@ module MangaDex
|
||||
end
|
||||
end
|
||||
|
||||
private def download_page(job : Queue::PageJob)
|
||||
private def download_page(job : PageJob)
|
||||
Logger.debug "downloading #{job.url}"
|
||||
headers = HTTP::Headers{
|
||||
"User-agent" => "Mangadex.cr",
|
||||
|
@ -30,6 +30,7 @@ class CLI < Clim
|
||||
run do |opts|
|
||||
Config.load(opts.config).set_current
|
||||
MangaDex::Downloader.default
|
||||
Plugin::Downloader.default
|
||||
|
||||
# empty ARGV so it won't be passed to Kemal
|
||||
ARGV.clear
|
||||
|
@ -10,9 +10,9 @@ class Plugin
|
||||
job = nil
|
||||
DB.open "sqlite3://#{@queue.path}" do |db|
|
||||
begin
|
||||
db.query_one "select * from queue where id like '%-%' and " \
|
||||
"(status = 0 or status = 1) order by time limit 1" \
|
||||
do |res|
|
||||
db.query_one "select * from queue where id like '%-%' " \
|
||||
"and (status = 0 or status = 1) " \
|
||||
"order by time limit 1" do |res|
|
||||
job = Queue::Job.from_query_result res
|
||||
end
|
||||
rescue
|
||||
@ -20,5 +20,105 @@ class Plugin
|
||||
end
|
||||
job
|
||||
end
|
||||
|
||||
private def download(job : Queue::Job)
|
||||
@downloading = true
|
||||
@queue.set_status Queue::JobStatus::Downloading, job
|
||||
|
||||
begin
|
||||
unless job.plugin_name
|
||||
raise "Job does not have plugin name specificed"
|
||||
end
|
||||
|
||||
plugin = Plugin.new job.plugin_name.not_nil!
|
||||
info = plugin.select_chapter job.id
|
||||
|
||||
title = info["title"].as_s
|
||||
pages = info["pages"].as_i
|
||||
|
||||
@queue.set_pages pages, job
|
||||
lib_dir = @library_path
|
||||
manga_dir = File.join lib_dir, title
|
||||
unless File.exists? manga_dir
|
||||
Dir.mkdir_p manga_dir
|
||||
end
|
||||
|
||||
zip_path = File.join manga_dir, "#{job.title}.cbz.part"
|
||||
writer = Zip::Writer.new zip_path
|
||||
rescue e
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
unless e.message.nil?
|
||||
@queue.add_message e.message.not_nil!, job
|
||||
end
|
||||
@downloading = false
|
||||
raise e
|
||||
end
|
||||
|
||||
fail_count = 0
|
||||
|
||||
while page = plugin.next_page
|
||||
fn = page["filename"].as_s
|
||||
url = page["url"].as_s
|
||||
headers = HTTP::Headers.new
|
||||
|
||||
if page["headers"]?
|
||||
page["headers"].as_h.each do |k, v|
|
||||
headers.add k, v.as_s
|
||||
end
|
||||
end
|
||||
|
||||
page_success = false
|
||||
tries = 4
|
||||
|
||||
loop do
|
||||
sleep plugin.wait_seconds.seconds
|
||||
Logger.debug "downloading #{url}"
|
||||
tries -= 1
|
||||
|
||||
begin
|
||||
HTTP::Client.get url, headers do |res|
|
||||
unless res.success?
|
||||
raise "Failed to download page #{url}. " \
|
||||
"[#{res.status_code}] #{res.status_message}"
|
||||
end
|
||||
writer.add fn, res.body_io
|
||||
end
|
||||
rescue e
|
||||
@queue.add_fail job
|
||||
fail_count += 1
|
||||
msg = "Failed to download page #{url}. Error: #{e}"
|
||||
@queue.add_message msg, job
|
||||
Logger.error msg
|
||||
Logger.debug "[failed] #{url}"
|
||||
else
|
||||
@queue.add_success job
|
||||
Logger.debug "[success] #{url}"
|
||||
page_success = true
|
||||
end
|
||||
|
||||
break if page_success || tries < 0
|
||||
end
|
||||
end
|
||||
|
||||
Logger.debug "Download completed. #{fail_count}/#{pages} failed"
|
||||
writer.close
|
||||
filename = File.join File.dirname(zip_path), File.basename(zip_path,
|
||||
".part")
|
||||
File.rename zip_path, filename
|
||||
Logger.debug "cbz File created at #{filename}"
|
||||
|
||||
zip_exception = validate_archive filename
|
||||
if !zip_exception.nil?
|
||||
@queue.add_message "The downloaded archive is corrupted. " \
|
||||
"Error: #{zip_exception}", job
|
||||
@queue.set_status Queue::JobStatus::Error, job
|
||||
elsif fail_count > 0
|
||||
@queue.set_status Queue::JobStatus::MissingPages, job
|
||||
else
|
||||
@queue.set_status Queue::JobStatus::Completed, job
|
||||
end
|
||||
|
||||
@downloading = false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -36,7 +36,6 @@ class Plugin
|
||||
|
||||
def initialize(filename : String)
|
||||
dir = Config.current.plugin_path
|
||||
pp dir
|
||||
Dir.mkdir_p dir unless Dir.exists? dir
|
||||
|
||||
@path = File.join dir, "#{filename}.js"
|
||||
|
35
src/queue.cr
35
src/queue.cr
@ -10,20 +10,25 @@ class Queue
|
||||
def initialize
|
||||
@queue = Queue.default
|
||||
@queue << self
|
||||
|
||||
spawn do
|
||||
loop do
|
||||
sleep 1.second
|
||||
next if @stopped || @downloading
|
||||
begin
|
||||
job = pop
|
||||
next if job.nil?
|
||||
download job
|
||||
rescue e
|
||||
Logger.error e
|
||||
@downloading = false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
abstract def pop : Job?
|
||||
end
|
||||
|
||||
class PageJob
|
||||
property success = false
|
||||
property url : String
|
||||
property filename : String
|
||||
property writer : Zip::Writer
|
||||
property tries_remaning : Int32
|
||||
|
||||
def initialize(@url, @filename, @writer, @tries_remaning)
|
||||
end
|
||||
private abstract def download(job : Job)
|
||||
end
|
||||
|
||||
enum JobStatus
|
||||
@ -62,8 +67,9 @@ class Queue
|
||||
@time = Time.unix_ms time
|
||||
|
||||
ary = @id.split("-")
|
||||
if ary.size > 1
|
||||
plugin_name = ary[0]
|
||||
if ary.size == 2
|
||||
@plugin_name = ary[0]
|
||||
@id = ary[1]
|
||||
end
|
||||
end
|
||||
|
||||
@ -74,7 +80,8 @@ class Queue
|
||||
job
|
||||
end
|
||||
|
||||
def initialize(@id, @manga_id, @title, @manga_title, @status, @time)
|
||||
def initialize(@id, @manga_id, @title, @manga_title, @status, @time,
|
||||
@plugin_name = nil)
|
||||
end
|
||||
|
||||
def to_json(json)
|
||||
|
Loading…
x
Reference in New Issue
Block a user