mirror of
https://github.com/hkalexling/Mango.git
synced 2025-08-03 03:15:31 -04:00
Project-wise code formatting
This commit is contained in:
parent
3866c81588
commit
8b184ed48d
@ -1,14 +1,14 @@
|
|||||||
require "./spec_helper"
|
require "./spec_helper"
|
||||||
|
|
||||||
describe Config do
|
describe Config do
|
||||||
it "creates config if it does not exist" do
|
it "creates config if it does not exist" do
|
||||||
with_default_config do |config, logger, path|
|
with_default_config do |config, logger, path|
|
||||||
File.exists?(path).should be_true
|
File.exists?(path).should be_true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "correctly loads config" do
|
it "correctly loads config" do
|
||||||
config = Config.load "spec/asset/test-config.yml"
|
config = Config.load "spec/asset/test-config.yml"
|
||||||
config.port.should eq 3000
|
config.port.should eq 3000
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -3,103 +3,102 @@ require "./spec_helper"
|
|||||||
include MangaDex
|
include MangaDex
|
||||||
|
|
||||||
describe Queue do
|
describe Queue do
|
||||||
it "creates DB at given path" do
|
it "creates DB at given path" do
|
||||||
with_queue do |queue, path|
|
with_queue do |queue, path|
|
||||||
File.exists?(path).should be_true
|
File.exists?(path).should be_true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "pops nil when empty" do
|
it "pops nil when empty" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
queue.pop.should be_nil
|
queue.pop.should be_nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "inserts multiple jobs" do
|
it "inserts multiple jobs" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
j1 = Job.new "1", "1", "title", "manga_title", JobStatus::Error,
|
j1 = Job.new "1", "1", "title", "manga_title", JobStatus::Error,
|
||||||
Time.utc
|
Time.utc
|
||||||
j2 = Job.new "2", "2", "title", "manga_title", JobStatus::Completed,
|
j2 = Job.new "2", "2", "title", "manga_title", JobStatus::Completed,
|
||||||
Time.utc
|
Time.utc
|
||||||
j3 = Job.new "3", "3", "title", "manga_title", JobStatus::Pending,
|
j3 = Job.new "3", "3", "title", "manga_title", JobStatus::Pending,
|
||||||
Time.utc
|
Time.utc
|
||||||
j4 = Job.new "4", "4", "title", "manga_title",
|
j4 = Job.new "4", "4", "title", "manga_title",
|
||||||
JobStatus::Downloading, Time.utc
|
JobStatus::Downloading, Time.utc
|
||||||
count = queue.push [j1, j2, j3, j4]
|
count = queue.push [j1, j2, j3, j4]
|
||||||
count.should eq 4
|
count.should eq 4
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "pops pending job" do
|
it "pops pending job" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
job = queue.pop
|
job = queue.pop
|
||||||
job.should_not be_nil
|
job.should_not be_nil
|
||||||
job.not_nil!.id.should eq "3"
|
job.not_nil!.id.should eq "3"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "correctly counts jobs" do
|
it "correctly counts jobs" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
queue.count.should eq 4
|
queue.count.should eq 4
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "deletes job" do
|
it "deletes job" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
queue.delete "4"
|
queue.delete "4"
|
||||||
queue.count.should eq 3
|
queue.count.should eq 3
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "sets status" do
|
it "sets status" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
job = queue.pop.not_nil!
|
job = queue.pop.not_nil!
|
||||||
queue.set_status JobStatus::Downloading, job
|
queue.set_status JobStatus::Downloading, job
|
||||||
job = queue.pop
|
job = queue.pop
|
||||||
job.should_not be_nil
|
job.should_not be_nil
|
||||||
job.not_nil!.status.should eq JobStatus::Downloading
|
job.not_nil!.status.should eq JobStatus::Downloading
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "sets number of pages" do
|
it "sets number of pages" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
job = queue.pop.not_nil!
|
job = queue.pop.not_nil!
|
||||||
queue.set_pages 100, job
|
queue.set_pages 100, job
|
||||||
job = queue.pop
|
job = queue.pop
|
||||||
job.should_not be_nil
|
job.should_not be_nil
|
||||||
job.not_nil!.pages.should eq 100
|
job.not_nil!.pages.should eq 100
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "adds fail/success counts" do
|
it "adds fail/success counts" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
job = queue.pop.not_nil!
|
job = queue.pop.not_nil!
|
||||||
queue.add_success job
|
queue.add_success job
|
||||||
queue.add_success job
|
queue.add_success job
|
||||||
queue.add_fail job
|
queue.add_fail job
|
||||||
job = queue.pop
|
job = queue.pop
|
||||||
job.should_not be_nil
|
job.should_not be_nil
|
||||||
job.not_nil!.success_count.should eq 2
|
job.not_nil!.success_count.should eq 2
|
||||||
job.not_nil!.fail_count.should eq 1
|
job.not_nil!.fail_count.should eq 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "appends status message" do
|
it "appends status message" do
|
||||||
with_queue do |queue|
|
with_queue do |queue|
|
||||||
job = queue.pop.not_nil!
|
job = queue.pop.not_nil!
|
||||||
queue.add_message "hello", job
|
queue.add_message "hello", job
|
||||||
queue.add_message "world", job
|
queue.add_message "world", job
|
||||||
job = queue.pop
|
job = queue.pop
|
||||||
job.should_not be_nil
|
job.should_not be_nil
|
||||||
job.not_nil!.status_message.should eq "\nhello\nworld"
|
job.not_nil!.status_message.should eq "\nhello\nworld"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "cleans up" do
|
it "cleans up" do
|
||||||
with_queue do
|
with_queue do
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
State.reset
|
State.reset
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -3,63 +3,63 @@ require "../src/context"
|
|||||||
require "../src/server"
|
require "../src/server"
|
||||||
|
|
||||||
class State
|
class State
|
||||||
@@hash = {} of String => String
|
@@hash = {} of String => String
|
||||||
|
|
||||||
def self.get(key)
|
def self.get(key)
|
||||||
@@hash[key]?
|
@@hash[key]?
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.get!(key)
|
def self.get!(key)
|
||||||
@@hash[key]
|
@@hash[key]
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.set(key, value)
|
def self.set(key, value)
|
||||||
return if value.nil?
|
return if value.nil?
|
||||||
@@hash[key] = value
|
@@hash[key] = value
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.reset
|
def self.reset
|
||||||
@@hash.clear
|
@@hash.clear
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_tempfile(name)
|
def get_tempfile(name)
|
||||||
path = State.get name
|
path = State.get name
|
||||||
if path.nil? || !File.exists? path
|
if path.nil? || !File.exists? path
|
||||||
file = File.tempfile name
|
file = File.tempfile name
|
||||||
State.set name, file.path
|
State.set name, file.path
|
||||||
return file
|
return file
|
||||||
else
|
else
|
||||||
return File.new path
|
return File.new path
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def with_default_config
|
def with_default_config
|
||||||
temp_config = get_tempfile "mango-test-config"
|
temp_config = get_tempfile "mango-test-config"
|
||||||
config = Config.load temp_config.path
|
config = Config.load temp_config.path
|
||||||
logger = Logger.new config.log_level
|
logger = Logger.new config.log_level
|
||||||
yield config, logger, temp_config.path
|
yield config, logger, temp_config.path
|
||||||
temp_config.delete
|
temp_config.delete
|
||||||
end
|
end
|
||||||
|
|
||||||
def with_storage
|
def with_storage
|
||||||
with_default_config do |config, logger|
|
with_default_config do |config, logger|
|
||||||
temp_db = get_tempfile "mango-test-db"
|
temp_db = get_tempfile "mango-test-db"
|
||||||
storage = Storage.new temp_db.path, logger
|
storage = Storage.new temp_db.path, logger
|
||||||
clear = yield storage, temp_db.path
|
clear = yield storage, temp_db.path
|
||||||
if clear == true
|
if clear == true
|
||||||
temp_db.delete
|
temp_db.delete
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def with_queue
|
def with_queue
|
||||||
with_default_config do |config, logger|
|
with_default_config do |config, logger|
|
||||||
temp_queue_db = get_tempfile "mango-test-queue-db"
|
temp_queue_db = get_tempfile "mango-test-queue-db"
|
||||||
queue = MangaDex::Queue.new temp_queue_db.path, logger
|
queue = MangaDex::Queue.new temp_queue_db.path, logger
|
||||||
clear = yield queue, temp_queue_db.path
|
clear = yield queue, temp_queue_db.path
|
||||||
if clear == true
|
if clear == true
|
||||||
temp_queue_db.delete
|
temp_queue_db.delete
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,91 +1,91 @@
|
|||||||
require "./spec_helper"
|
require "./spec_helper"
|
||||||
|
|
||||||
describe Storage do
|
describe Storage do
|
||||||
it "creates DB at given path" do
|
it "creates DB at given path" do
|
||||||
with_storage do |storage, path|
|
with_storage do |storage, path|
|
||||||
File.exists?(path).should be_true
|
File.exists?(path).should be_true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "deletes user" do
|
it "deletes user" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
storage.delete_user "admin"
|
storage.delete_user "admin"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "creates new user" do
|
it "creates new user" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
storage.new_user "user", "123456", false
|
storage.new_user "user", "123456", false
|
||||||
storage.new_user "admin", "123456", true
|
storage.new_user "admin", "123456", true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "verifies username/password combination" do
|
it "verifies username/password combination" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
user_token = storage.verify_user "user", "123456"
|
user_token = storage.verify_user "user", "123456"
|
||||||
admin_token = storage.verify_user "admin", "123456"
|
admin_token = storage.verify_user "admin", "123456"
|
||||||
user_token.should_not be_nil
|
user_token.should_not be_nil
|
||||||
admin_token.should_not be_nil
|
admin_token.should_not be_nil
|
||||||
State.set "user_token", user_token
|
State.set "user_token", user_token
|
||||||
State.set "admin_token", admin_token
|
State.set "admin_token", admin_token
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "rejects duplicate username" do
|
it "rejects duplicate username" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
expect_raises SQLite3::Exception,
|
expect_raises SQLite3::Exception,
|
||||||
"UNIQUE constraint failed: users.username" do
|
"UNIQUE constraint failed: users.username" do
|
||||||
storage.new_user "admin", "123456", true
|
storage.new_user "admin", "123456", true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "verifies token" do
|
it "verifies token" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
user_token = State.get! "user_token"
|
user_token = State.get! "user_token"
|
||||||
user = storage.verify_token user_token
|
user = storage.verify_token user_token
|
||||||
user.should eq "user"
|
user.should eq "user"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "verfies admin token" do
|
it "verfies admin token" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
admin_token = State.get! "admin_token"
|
admin_token = State.get! "admin_token"
|
||||||
storage.verify_admin(admin_token).should be_true
|
storage.verify_admin(admin_token).should be_true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "rejects non-admin token" do
|
it "rejects non-admin token" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
user_token = State.get! "user_token"
|
user_token = State.get! "user_token"
|
||||||
storage.verify_admin(user_token).should be_false
|
storage.verify_admin(user_token).should be_false
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "updates user" do
|
it "updates user" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
storage.update_user "admin", "admin", "654321", true
|
storage.update_user "admin", "admin", "654321", true
|
||||||
token = storage.verify_user "admin", "654321"
|
token = storage.verify_user "admin", "654321"
|
||||||
admin_token = State.get! "admin_token"
|
admin_token = State.get! "admin_token"
|
||||||
token.should eq admin_token
|
token.should eq admin_token
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "logs user out" do
|
it "logs user out" do
|
||||||
with_storage do |storage|
|
with_storage do |storage|
|
||||||
user_token = State.get! "user_token"
|
user_token = State.get! "user_token"
|
||||||
admin_token = State.get! "admin_token"
|
admin_token = State.get! "admin_token"
|
||||||
storage.logout user_token
|
storage.logout user_token
|
||||||
storage.logout admin_token
|
storage.logout admin_token
|
||||||
storage.verify_token(user_token).should be_nil
|
storage.verify_token(user_token).should be_nil
|
||||||
storage.verify_token(admin_token).should be_nil
|
storage.verify_token(admin_token).should be_nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "cleans up" do
|
it "cleans up" do
|
||||||
with_storage do
|
with_storage do
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
State.reset
|
State.reset
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,36 +1,36 @@
|
|||||||
require "./spec_helper"
|
require "./spec_helper"
|
||||||
|
|
||||||
describe "compare_alphanumerically" do
|
describe "compare_alphanumerically" do
|
||||||
it "sorts filenames with leading zeros correctly" do
|
it "sorts filenames with leading zeros correctly" do
|
||||||
ary = ["010.jpg", "001.jpg", "002.png"]
|
ary = ["010.jpg", "001.jpg", "002.png"]
|
||||||
ary.sort! {|a, b|
|
ary.sort! { |a, b|
|
||||||
compare_alphanumerically a, b
|
compare_alphanumerically a, b
|
||||||
}
|
}
|
||||||
ary.should eq ["001.jpg", "002.png", "010.jpg"]
|
ary.should eq ["001.jpg", "002.png", "010.jpg"]
|
||||||
end
|
end
|
||||||
|
|
||||||
it "sorts filenames without leading zeros correctly" do
|
it "sorts filenames without leading zeros correctly" do
|
||||||
ary = ["10.jpg", "1.jpg", "0.png", "0100.jpg"]
|
ary = ["10.jpg", "1.jpg", "0.png", "0100.jpg"]
|
||||||
ary.sort! {|a, b|
|
ary.sort! { |a, b|
|
||||||
compare_alphanumerically a, b
|
compare_alphanumerically a, b
|
||||||
}
|
}
|
||||||
ary.should eq ["0.png", "1.jpg", "10.jpg", "0100.jpg"]
|
ary.should eq ["0.png", "1.jpg", "10.jpg", "0100.jpg"]
|
||||||
end
|
end
|
||||||
|
|
||||||
# https://ux.stackexchange.com/a/95441
|
# https://ux.stackexchange.com/a/95441
|
||||||
it "sorts like the stack exchange post" do
|
it "sorts like the stack exchange post" do
|
||||||
ary = ["2", "12", "200000", "1000000", "a", "a12", "b2", "text2",
|
ary = ["2", "12", "200000", "1000000", "a", "a12", "b2", "text2",
|
||||||
"text2a", "text2a2", "text2a12", "text2ab", "text12", "text12a"]
|
"text2a", "text2a2", "text2a12", "text2ab", "text12", "text12a"]
|
||||||
ary.reverse.sort {|a, b|
|
ary.reverse.sort { |a, b|
|
||||||
compare_alphanumerically a, b
|
compare_alphanumerically a, b
|
||||||
}.should eq ary
|
}.should eq ary
|
||||||
end
|
end
|
||||||
|
|
||||||
# https://github.com/hkalexling/Mango/issues/22
|
# https://github.com/hkalexling/Mango/issues/22
|
||||||
it "handles numbers larger than Int32" do
|
it "handles numbers larger than Int32" do
|
||||||
ary = ["14410155591588.jpg", "21410155591588.png", "104410155591588.jpg"]
|
ary = ["14410155591588.jpg", "21410155591588.png", "104410155591588.jpg"]
|
||||||
ary.reverse.sort {|a, b|
|
ary.reverse.sort { |a, b|
|
||||||
compare_alphanumerically a, b
|
compare_alphanumerically a, b
|
||||||
}.should eq ary
|
}.should eq ary
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -3,24 +3,23 @@ require "./storage"
|
|||||||
require "./util"
|
require "./util"
|
||||||
|
|
||||||
class AuthHandler < Kemal::Handler
|
class AuthHandler < Kemal::Handler
|
||||||
def initialize(@storage : Storage)
|
def initialize(@storage : Storage)
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(env)
|
def call(env)
|
||||||
return call_next(env) \
|
return call_next(env) if request_path_startswith env, ["/login", "/logout"]
|
||||||
if request_path_startswith env, ["/login", "/logout"]
|
|
||||||
|
|
||||||
cookie = env.request.cookies.find { |c| c.name == "token" }
|
cookie = env.request.cookies.find { |c| c.name == "token" }
|
||||||
if cookie.nil? || ! @storage.verify_token cookie.value
|
if cookie.nil? || !@storage.verify_token cookie.value
|
||||||
return env.redirect "/login"
|
return env.redirect "/login"
|
||||||
end
|
end
|
||||||
|
|
||||||
if request_path_startswith env, ["/admin", "/api/admin", "/download"]
|
if request_path_startswith env, ["/admin", "/api/admin", "/download"]
|
||||||
unless @storage.verify_admin cookie.value
|
unless @storage.verify_admin cookie.value
|
||||||
env.response.status_code = 403
|
env.response.status_code = 403
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
call_next env
|
call_next env
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
102
src/config.cr
102
src/config.cr
@ -1,60 +1,58 @@
|
|||||||
require "yaml"
|
require "yaml"
|
||||||
|
|
||||||
class Config
|
class Config
|
||||||
include YAML::Serializable
|
include YAML::Serializable
|
||||||
|
|
||||||
property port : Int32 = 9000
|
property port : Int32 = 9000
|
||||||
property library_path : String = \
|
property library_path : String = File.expand_path "~/mango/library", home: true
|
||||||
File.expand_path "~/mango/library", home: true
|
property db_path : String = File.expand_path "~/mango/mango.db", home: true
|
||||||
property db_path : String = \
|
@[YAML::Field(key: "scan_interval_minutes")]
|
||||||
File.expand_path "~/mango/mango.db", home: true
|
property scan_interval : Int32 = 5
|
||||||
@[YAML::Field(key: "scan_interval_minutes")]
|
property log_level : String = "info"
|
||||||
property scan_interval : Int32 = 5
|
property mangadex = Hash(String, String | Int32).new
|
||||||
property log_level : String = "info"
|
|
||||||
property mangadex = Hash(String, String|Int32).new
|
|
||||||
|
|
||||||
@[YAML::Field(ignore: true)]
|
@[YAML::Field(ignore: true)]
|
||||||
@mangadex_defaults = {
|
@mangadex_defaults = {
|
||||||
"base_url" => "https://mangadex.org",
|
"base_url" => "https://mangadex.org",
|
||||||
"api_url" => "https://mangadex.org/api",
|
"api_url" => "https://mangadex.org/api",
|
||||||
"download_wait_seconds" => 5,
|
"download_wait_seconds" => 5,
|
||||||
"download_retries" => 4,
|
"download_retries" => 4,
|
||||||
"download_queue_db_path" => File.expand_path "~/mango/queue.db",
|
"download_queue_db_path" => File.expand_path("~/mango/queue.db",
|
||||||
home: true
|
home: true),
|
||||||
}
|
}
|
||||||
|
|
||||||
def self.load(path : String?)
|
def self.load(path : String?)
|
||||||
path = "~/.config/mango/config.yml" if path.nil?
|
path = "~/.config/mango/config.yml" if path.nil?
|
||||||
cfg_path = File.expand_path path, home: true
|
cfg_path = File.expand_path path, home: true
|
||||||
if File.exists? cfg_path
|
if File.exists? cfg_path
|
||||||
config = self.from_yaml File.read cfg_path
|
config = self.from_yaml File.read cfg_path
|
||||||
config.fill_defaults
|
config.fill_defaults
|
||||||
return config
|
return config
|
||||||
end
|
end
|
||||||
puts "The config file #{cfg_path} does not exist." \
|
puts "The config file #{cfg_path} does not exist." \
|
||||||
" Do you want mango to dump the default config there? [Y/n]"
|
" Do you want mango to dump the default config there? [Y/n]"
|
||||||
input = gets
|
input = gets
|
||||||
if input && input.downcase == "n"
|
if input && input.downcase == "n"
|
||||||
abort "Aborting..."
|
abort "Aborting..."
|
||||||
end
|
end
|
||||||
default = self.allocate
|
default = self.allocate
|
||||||
default.fill_defaults
|
default.fill_defaults
|
||||||
cfg_dir = File.dirname cfg_path
|
cfg_dir = File.dirname cfg_path
|
||||||
unless Dir.exists? cfg_dir
|
unless Dir.exists? cfg_dir
|
||||||
Dir.mkdir_p cfg_dir
|
Dir.mkdir_p cfg_dir
|
||||||
end
|
end
|
||||||
File.write cfg_path, default.to_yaml
|
File.write cfg_path, default.to_yaml
|
||||||
puts "The config file has been created at #{cfg_path}."
|
puts "The config file has been created at #{cfg_path}."
|
||||||
default
|
default
|
||||||
end
|
end
|
||||||
|
|
||||||
def fill_defaults
|
def fill_defaults
|
||||||
{% for hash_name in ["mangadex"] %}
|
{% for hash_name in ["mangadex"] %}
|
||||||
@{{hash_name.id}}_defaults.map do |k, v|
|
@{{hash_name.id}}_defaults.map do |k, v|
|
||||||
if @{{hash_name.id}}[k]?.nil?
|
if @{{hash_name.id}}[k]?.nil?
|
||||||
@{{hash_name.id}}[k] = v
|
@{{hash_name.id}}[k] = v
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
{% end %}
|
{% end %}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -4,18 +4,18 @@ require "./storage"
|
|||||||
require "./logger"
|
require "./logger"
|
||||||
|
|
||||||
class Context
|
class Context
|
||||||
property config : Config
|
property config : Config
|
||||||
property library : Library
|
property library : Library
|
||||||
property storage : Storage
|
property storage : Storage
|
||||||
property logger : Logger
|
property logger : Logger
|
||||||
property queue : MangaDex::Queue
|
property queue : MangaDex::Queue
|
||||||
|
|
||||||
def initialize(@config, @logger, @library, @storage, @queue)
|
def initialize(@config, @logger, @library, @storage, @queue)
|
||||||
end
|
end
|
||||||
|
|
||||||
{% for lvl in Logger::LEVELS %}
|
{% for lvl in Logger::LEVELS %}
|
||||||
def {{lvl.id}}(msg)
|
def {{lvl.id}}(msg)
|
||||||
@logger.{{lvl.id}} msg
|
@logger.{{lvl.id}} msg
|
||||||
end
|
end
|
||||||
{% end %}
|
{% end %}
|
||||||
end
|
end
|
||||||
|
627
src/library.cr
627
src/library.cr
@ -5,359 +5,364 @@ require "uri"
|
|||||||
require "./util"
|
require "./util"
|
||||||
|
|
||||||
struct Image
|
struct Image
|
||||||
property data : Bytes
|
property data : Bytes
|
||||||
property mime : String
|
property mime : String
|
||||||
property filename : String
|
property filename : String
|
||||||
property size : Int32
|
property size : Int32
|
||||||
|
|
||||||
def initialize(@data, @mime, @filename, @size)
|
def initialize(@data, @mime, @filename, @size)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class Entry
|
class Entry
|
||||||
property zip_path : String, book : Title, title : String,
|
property zip_path : String, book : Title, title : String,
|
||||||
size : String, pages : Int32, cover_url : String, id : String,
|
size : String, pages : Int32, cover_url : String, id : String,
|
||||||
title_id : String, encoded_path : String, encoded_title : String,
|
title_id : String, encoded_path : String, encoded_title : String,
|
||||||
mtime : Time
|
mtime : Time
|
||||||
|
|
||||||
def initialize(path, @book, @title_id, storage)
|
def initialize(path, @book, @title_id, storage)
|
||||||
@zip_path = path
|
@zip_path = path
|
||||||
@encoded_path = URI.encode path
|
@encoded_path = URI.encode path
|
||||||
@title = File.basename path, File.extname path
|
@title = File.basename path, File.extname path
|
||||||
@encoded_title = URI.encode @title
|
@encoded_title = URI.encode @title
|
||||||
@size = (File.size path).humanize_bytes
|
@size = (File.size path).humanize_bytes
|
||||||
file = Zip::File.new path
|
file = Zip::File.new path
|
||||||
@pages = file.entries
|
@pages = file.entries
|
||||||
.select { |e|
|
.select { |e|
|
||||||
["image/jpeg", "image/png"].includes? \
|
["image/jpeg", "image/png"].includes? \
|
||||||
MIME.from_filename? e.filename
|
MIME.from_filename? e.filename
|
||||||
}
|
}
|
||||||
.size
|
.size
|
||||||
file.close
|
file.close
|
||||||
@id = storage.get_id @zip_path, false
|
@id = storage.get_id @zip_path, false
|
||||||
@cover_url = "/api/page/#{@title_id}/#{@id}/1"
|
@cover_url = "/api/page/#{@title_id}/#{@id}/1"
|
||||||
@mtime = File.info(@zip_path).modification_time
|
@mtime = File.info(@zip_path).modification_time
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_json(json : JSON::Builder)
|
def to_json(json : JSON::Builder)
|
||||||
json.object do
|
json.object do
|
||||||
{% for str in ["zip_path", "title", "size", "cover_url", "id",
|
{% for str in ["zip_path", "title", "size", "cover_url", "id",
|
||||||
"title_id", "encoded_path", "encoded_title"] %}
|
"title_id", "encoded_path", "encoded_title"] %}
|
||||||
json.field {{str}}, @{{str.id}}
|
json.field {{str}}, @{{str.id}}
|
||||||
{% end %}
|
{% end %}
|
||||||
json.field "display_name", @book.display_name @title
|
json.field "display_name", @book.display_name @title
|
||||||
json.field "pages" {json.number @pages}
|
json.field "pages" { json.number @pages }
|
||||||
json.field "mtime" {json.number @mtime.to_unix}
|
json.field "mtime" { json.number @mtime.to_unix }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def display_name
|
def display_name
|
||||||
@book.display_name @title
|
@book.display_name @title
|
||||||
end
|
end
|
||||||
|
|
||||||
def encoded_display_name
|
def encoded_display_name
|
||||||
URI.encode display_name
|
URI.encode display_name
|
||||||
end
|
end
|
||||||
|
|
||||||
def read_page(page_num)
|
def read_page(page_num)
|
||||||
Zip::File.open @zip_path do |file|
|
Zip::File.open @zip_path do |file|
|
||||||
page = file.entries
|
page = file.entries
|
||||||
.select { |e|
|
.select { |e|
|
||||||
["image/jpeg", "image/png"].includes? \
|
["image/jpeg", "image/png"].includes? \
|
||||||
MIME.from_filename? e.filename
|
MIME.from_filename? e.filename
|
||||||
}
|
}
|
||||||
.sort { |a, b|
|
.sort { |a, b|
|
||||||
compare_alphanumerically a.filename, b.filename
|
compare_alphanumerically a.filename, b.filename
|
||||||
}
|
}
|
||||||
.[page_num - 1]
|
.[page_num - 1]
|
||||||
page.open do |io|
|
page.open do |io|
|
||||||
slice = Bytes.new page.uncompressed_size
|
slice = Bytes.new page.uncompressed_size
|
||||||
bytes_read = io.read_fully? slice
|
bytes_read = io.read_fully? slice
|
||||||
unless bytes_read
|
unless bytes_read
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
return Image.new slice, MIME.from_filename(page.filename),\
|
return Image.new slice, MIME.from_filename(page.filename),
|
||||||
page.filename, bytes_read
|
page.filename, bytes_read
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class Title
|
class Title
|
||||||
property dir : String, parent_id : String, title_ids : Array(String),
|
property dir : String, parent_id : String, title_ids : Array(String),
|
||||||
entries : Array(Entry), title : String, id : String,
|
entries : Array(Entry), title : String, id : String,
|
||||||
encoded_title : String, mtime : Time
|
encoded_title : String, mtime : Time
|
||||||
|
|
||||||
def initialize(@dir : String, @parent_id, storage,
|
def initialize(@dir : String, @parent_id, storage,
|
||||||
@logger : Logger, @library : Library)
|
@logger : Logger, @library : Library)
|
||||||
@id = storage.get_id @dir, true
|
@id = storage.get_id @dir, true
|
||||||
@title = File.basename dir
|
@title = File.basename dir
|
||||||
@encoded_title = URI.encode @title
|
@encoded_title = URI.encode @title
|
||||||
@title_ids = [] of String
|
@title_ids = [] of String
|
||||||
@entries = [] of Entry
|
@entries = [] of Entry
|
||||||
@mtime = File.info(dir).modification_time
|
@mtime = File.info(dir).modification_time
|
||||||
|
|
||||||
Dir.entries(dir).each do |fn|
|
Dir.entries(dir).each do |fn|
|
||||||
next if fn.starts_with? "."
|
next if fn.starts_with? "."
|
||||||
path = File.join dir, fn
|
path = File.join dir, fn
|
||||||
if File.directory? path
|
if File.directory? path
|
||||||
title = Title.new path, @id, storage, @logger, library
|
title = Title.new path, @id, storage, @logger, library
|
||||||
next if title.entries.size == 0 && title.titles.size == 0
|
next if title.entries.size == 0 && title.titles.size == 0
|
||||||
@library.title_hash[title.id] = title
|
@library.title_hash[title.id] = title
|
||||||
@title_ids << title.id
|
@title_ids << title.id
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
if [".zip", ".cbz"].includes? File.extname path
|
if [".zip", ".cbz"].includes? File.extname path
|
||||||
next if !valid_zip path
|
next if !valid_zip path
|
||||||
entry = Entry.new path, self, @id, storage
|
entry = Entry.new path, self, @id, storage
|
||||||
@entries << entry if entry.pages > 0
|
@entries << entry if entry.pages > 0
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
mtimes = [@mtime]
|
mtimes = [@mtime]
|
||||||
mtimes += @title_ids.map{|e| @library.title_hash[e].mtime}
|
mtimes += @title_ids.map { |e| @library.title_hash[e].mtime }
|
||||||
mtimes += @entries.map{|e| e.mtime}
|
mtimes += @entries.map { |e| e.mtime }
|
||||||
@mtime = mtimes.max
|
@mtime = mtimes.max
|
||||||
|
|
||||||
@title_ids.sort! do |a, b|
|
@title_ids.sort! do |a, b|
|
||||||
compare_alphanumerically @library.title_hash[a].title,
|
compare_alphanumerically @library.title_hash[a].title,
|
||||||
@library.title_hash[b].title
|
@library.title_hash[b].title
|
||||||
end
|
end
|
||||||
@entries.sort! do |a, b|
|
@entries.sort! do |a, b|
|
||||||
compare_alphanumerically a.title, b.title
|
compare_alphanumerically a.title, b.title
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_json(json : JSON::Builder)
|
def to_json(json : JSON::Builder)
|
||||||
json.object do
|
json.object do
|
||||||
{% for str in ["dir", "title", "id", "encoded_title"] %}
|
{% for str in ["dir", "title", "id", "encoded_title"] %}
|
||||||
json.field {{str}}, @{{str.id}}
|
json.field {{str}}, @{{str.id}}
|
||||||
{% end %}
|
{% end %}
|
||||||
json.field "display_name", display_name
|
json.field "display_name", display_name
|
||||||
json.field "mtime" {json.number @mtime.to_unix}
|
json.field "mtime" { json.number @mtime.to_unix }
|
||||||
json.field "titles" do
|
json.field "titles" do
|
||||||
json.raw self.titles.to_json
|
json.raw self.titles.to_json
|
||||||
end
|
end
|
||||||
json.field "entries" do
|
json.field "entries" do
|
||||||
json.raw @entries.to_json
|
json.raw @entries.to_json
|
||||||
end
|
end
|
||||||
json.field "parents" do
|
json.field "parents" do
|
||||||
json.array do
|
json.array do
|
||||||
self.parents.each do |title|
|
self.parents.each do |title|
|
||||||
json.object do
|
json.object do
|
||||||
json.field "title", title.title
|
json.field "title", title.title
|
||||||
json.field "id", title.id
|
json.field "id", title.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def titles
|
def titles
|
||||||
@title_ids.map {|tid| @library.get_title! tid}
|
@title_ids.map { |tid| @library.get_title! tid }
|
||||||
end
|
end
|
||||||
|
|
||||||
def parents
|
def parents
|
||||||
ary = [] of Title
|
ary = [] of Title
|
||||||
tid = @parent_id
|
tid = @parent_id
|
||||||
while !tid.empty?
|
while !tid.empty?
|
||||||
title = @library.get_title! tid
|
title = @library.get_title! tid
|
||||||
ary << title
|
ary << title
|
||||||
tid = title.parent_id
|
tid = title.parent_id
|
||||||
end
|
end
|
||||||
ary
|
ary
|
||||||
end
|
end
|
||||||
|
|
||||||
def size
|
def size
|
||||||
@entries.size + @title_ids.size
|
@entries.size + @title_ids.size
|
||||||
end
|
end
|
||||||
|
|
||||||
# When downloading from MangaDex, the zip/cbz file would not be valid
|
# When downloading from MangaDex, the zip/cbz file would not be valid
|
||||||
# before the download is completed. If we scan the zip file,
|
# before the download is completed. If we scan the zip file,
|
||||||
# Entry.new would throw, so we use this method to check before
|
# Entry.new would throw, so we use this method to check before
|
||||||
# constructing Entry
|
# constructing Entry
|
||||||
private def valid_zip(path : String)
|
private def valid_zip(path : String)
|
||||||
begin
|
begin
|
||||||
file = Zip::File.new path
|
file = Zip::File.new path
|
||||||
file.close
|
file.close
|
||||||
return true
|
return true
|
||||||
rescue
|
rescue
|
||||||
@logger.warn "File #{path} is corrupted or is not a valid zip "\
|
@logger.warn "File #{path} is corrupted or is not a valid zip " \
|
||||||
"archive. Ignoring it."
|
"archive. Ignoring it."
|
||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_entry(eid)
|
def get_entry(eid)
|
||||||
@entries.find { |e| e.id == eid }
|
@entries.find { |e| e.id == eid }
|
||||||
end
|
end
|
||||||
|
|
||||||
def display_name
|
def display_name
|
||||||
info = TitleInfo.new @dir
|
info = TitleInfo.new @dir
|
||||||
dn = info.display_name
|
dn = info.display_name
|
||||||
dn.empty? ? @title : dn
|
dn.empty? ? @title : dn
|
||||||
end
|
end
|
||||||
|
|
||||||
def encoded_display_name
|
def encoded_display_name
|
||||||
URI.encode display_name
|
URI.encode display_name
|
||||||
end
|
end
|
||||||
|
|
||||||
def display_name(entry_name)
|
def display_name(entry_name)
|
||||||
info = TitleInfo.new @dir
|
info = TitleInfo.new @dir
|
||||||
dn = info.entry_display_name[entry_name]?
|
dn = info.entry_display_name[entry_name]?
|
||||||
unless dn.nil? || dn.empty?
|
unless dn.nil? || dn.empty?
|
||||||
return dn
|
return dn
|
||||||
end
|
end
|
||||||
entry_name
|
entry_name
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_display_name(dn)
|
def set_display_name(dn)
|
||||||
info = TitleInfo.new @dir
|
info = TitleInfo.new @dir
|
||||||
info.display_name = dn
|
info.display_name = dn
|
||||||
info.save
|
info.save
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_display_name(entry_name : String, dn)
|
def set_display_name(entry_name : String, dn)
|
||||||
info = TitleInfo.new @dir
|
info = TitleInfo.new @dir
|
||||||
info.entry_display_name[entry_name] = dn
|
info.entry_display_name[entry_name] = dn
|
||||||
info.save
|
info.save
|
||||||
end
|
end
|
||||||
|
|
||||||
# For backward backward compatibility with v0.1.0, we save entry titles
|
# For backward backward compatibility with v0.1.0, we save entry titles
|
||||||
# instead of IDs in info.json
|
# instead of IDs in info.json
|
||||||
def save_progress(username, entry, page)
|
def save_progress(username, entry, page)
|
||||||
info = TitleInfo.new @dir
|
info = TitleInfo.new @dir
|
||||||
if info.progress[username]?.nil?
|
if info.progress[username]?.nil?
|
||||||
info.progress[username] = {entry => page}
|
info.progress[username] = {entry => page}
|
||||||
info.save
|
info.save
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
info.progress[username][entry] = page
|
info.progress[username][entry] = page
|
||||||
info.save
|
info.save
|
||||||
end
|
end
|
||||||
|
|
||||||
def load_progress(username, entry)
|
def load_progress(username, entry)
|
||||||
info = TitleInfo.new @dir
|
info = TitleInfo.new @dir
|
||||||
if info.progress[username]?.nil?
|
if info.progress[username]?.nil?
|
||||||
return 0
|
return 0
|
||||||
end
|
end
|
||||||
if info.progress[username][entry]?.nil?
|
if info.progress[username][entry]?.nil?
|
||||||
return 0
|
return 0
|
||||||
end
|
end
|
||||||
info.progress[username][entry]
|
info.progress[username][entry]
|
||||||
end
|
end
|
||||||
|
|
||||||
def load_percetage(username, entry)
|
def load_percetage(username, entry)
|
||||||
info = TitleInfo.new @dir
|
info = TitleInfo.new @dir
|
||||||
page = load_progress username, entry
|
page = load_progress username, entry
|
||||||
entry_obj = @entries.find{|e| e.title == entry}
|
entry_obj = @entries.find { |e| e.title == entry }
|
||||||
return 0.0 if entry_obj.nil?
|
return 0.0 if entry_obj.nil?
|
||||||
page / entry_obj.pages
|
page / entry_obj.pages
|
||||||
end
|
end
|
||||||
|
|
||||||
def load_percetage(username)
|
def load_percetage(username)
|
||||||
return 0.0 if @entries.empty?
|
return 0.0 if @entries.empty?
|
||||||
read_pages = total_pages = 0
|
read_pages = total_pages = 0
|
||||||
@entries.each do |e|
|
@entries.each do |e|
|
||||||
read_pages += load_progress username, e.title
|
read_pages += load_progress username, e.title
|
||||||
total_pages += e.pages
|
total_pages += e.pages
|
||||||
end
|
end
|
||||||
read_pages / total_pages
|
read_pages / total_pages
|
||||||
end
|
end
|
||||||
|
|
||||||
def next_entry(current_entry_obj)
|
def next_entry(current_entry_obj)
|
||||||
idx = @entries.index current_entry_obj
|
idx = @entries.index current_entry_obj
|
||||||
return nil if idx.nil? || idx == @entries.size - 1
|
return nil if idx.nil? || idx == @entries.size - 1
|
||||||
@entries[idx + 1]
|
@entries[idx + 1]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class TitleInfo
|
class TitleInfo
|
||||||
include JSON::Serializable
|
include JSON::Serializable
|
||||||
|
|
||||||
property comment = "Generated by Mango. DO NOT EDIT!"
|
property comment = "Generated by Mango. DO NOT EDIT!"
|
||||||
# { user1: { entry1: 10, entry2: 0 } }
|
# { user1: { entry1: 10, entry2: 0 } }
|
||||||
property progress = {} of String => Hash(String, Int32)
|
property progress = {} of String => Hash(String, Int32)
|
||||||
property display_name = ""
|
property display_name = ""
|
||||||
# { entry1 : "display name" }
|
# { entry1 : "display name" }
|
||||||
property entry_display_name = {} of String => String
|
property entry_display_name = {} of String => String
|
||||||
|
|
||||||
@[JSON::Field(ignore: true)]
|
@[JSON::Field(ignore: true)]
|
||||||
property dir : String = ""
|
property dir : String = ""
|
||||||
|
|
||||||
def initialize(@dir)
|
def initialize(@dir)
|
||||||
json_path = File.join @dir, "info.json"
|
json_path = File.join @dir, "info.json"
|
||||||
if File.exists? json_path
|
if File.exists? json_path
|
||||||
info = TitleInfo.from_json File.read json_path
|
info = TitleInfo.from_json File.read json_path
|
||||||
@progress = info.progress.clone
|
@progress = info.progress.clone
|
||||||
@display_name = info.display_name
|
@display_name = info.display_name
|
||||||
@entry_display_name = info.entry_display_name.clone
|
@entry_display_name = info.entry_display_name.clone
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def save
|
def save
|
||||||
json_path = File.join @dir, "info.json"
|
json_path = File.join @dir, "info.json"
|
||||||
File.write json_path, self.to_pretty_json
|
File.write json_path, self.to_pretty_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class Library
|
class Library
|
||||||
property dir : String, title_ids : Array(String), scan_interval : Int32,
|
property dir : String, title_ids : Array(String), scan_interval : Int32,
|
||||||
logger : Logger, storage : Storage, title_hash : Hash(String, Title)
|
logger : Logger, storage : Storage, title_hash : Hash(String, Title)
|
||||||
|
|
||||||
def initialize(@dir, @scan_interval, @logger, @storage)
|
def initialize(@dir, @scan_interval, @logger, @storage)
|
||||||
# explicitly initialize @titles to bypass the compiler check. it will
|
# explicitly initialize @titles to bypass the compiler check. it will
|
||||||
# be filled with actual Titles in the `scan` call below
|
# be filled with actual Titles in the `scan` call below
|
||||||
@title_ids = [] of String
|
@title_ids = [] of String
|
||||||
@title_hash = {} of String => Title
|
@title_hash = {} of String => Title
|
||||||
|
|
||||||
return scan if @scan_interval < 1
|
return scan if @scan_interval < 1
|
||||||
spawn do
|
spawn do
|
||||||
loop do
|
loop do
|
||||||
start = Time.local
|
start = Time.local
|
||||||
scan
|
scan
|
||||||
ms = (Time.local - start).total_milliseconds
|
ms = (Time.local - start).total_milliseconds
|
||||||
@logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
@logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
||||||
sleep @scan_interval * 60
|
sleep @scan_interval * 60
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
def titles
|
|
||||||
@title_ids.map {|tid| self.get_title!(tid) }
|
def titles
|
||||||
end
|
@title_ids.map { |tid| self.get_title!(tid) }
|
||||||
def to_json(json : JSON::Builder)
|
end
|
||||||
json.object do
|
|
||||||
json.field "dir", @dir
|
def to_json(json : JSON::Builder)
|
||||||
json.field "titles" do
|
json.object do
|
||||||
json.raw self.titles.to_json
|
json.field "dir", @dir
|
||||||
end
|
json.field "titles" do
|
||||||
end
|
json.raw self.titles.to_json
|
||||||
end
|
end
|
||||||
def get_title(tid)
|
end
|
||||||
@title_hash[tid]?
|
end
|
||||||
end
|
|
||||||
def get_title!(tid)
|
def get_title(tid)
|
||||||
@title_hash[tid]
|
@title_hash[tid]?
|
||||||
end
|
end
|
||||||
def scan
|
|
||||||
unless Dir.exists? @dir
|
def get_title!(tid)
|
||||||
@logger.info "The library directory #{@dir} does not exist. " \
|
@title_hash[tid]
|
||||||
"Attempting to create it"
|
end
|
||||||
Dir.mkdir_p @dir
|
|
||||||
end
|
def scan
|
||||||
@title_ids.clear
|
unless Dir.exists? @dir
|
||||||
(Dir.entries @dir)
|
@logger.info "The library directory #{@dir} does not exist. " \
|
||||||
.select { |fn| !fn.starts_with? "." }
|
"Attempting to create it"
|
||||||
.map { |fn| File.join @dir, fn }
|
Dir.mkdir_p @dir
|
||||||
.select { |path| File.directory? path }
|
end
|
||||||
.map { |path| Title.new path, "", @storage, @logger, self }
|
@title_ids.clear
|
||||||
.select { |title| !(title.entries.empty? && title.titles.empty?) }
|
(Dir.entries @dir)
|
||||||
.sort { |a, b| a.title <=> b.title }
|
.select { |fn| !fn.starts_with? "." }
|
||||||
.each do |title|
|
.map { |fn| File.join @dir, fn }
|
||||||
@title_hash[title.id] = title
|
.select { |path| File.directory? path }
|
||||||
@title_ids << title.id
|
.map { |path| Title.new path, "", @storage, @logger, self }
|
||||||
end
|
.select { |title| !(title.entries.empty? && title.titles.empty?) }
|
||||||
@logger.debug "Scan completed"
|
.sort { |a, b| a.title <=> b.title }
|
||||||
end
|
.each do |title|
|
||||||
|
@title_hash[title.id] = title
|
||||||
|
@title_ids << title.id
|
||||||
|
end
|
||||||
|
@logger.debug "Scan completed"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -2,25 +2,25 @@ require "kemal"
|
|||||||
require "./logger"
|
require "./logger"
|
||||||
|
|
||||||
class LogHandler < Kemal::BaseLogHandler
|
class LogHandler < Kemal::BaseLogHandler
|
||||||
def initialize(@logger : Logger)
|
def initialize(@logger : Logger)
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(env)
|
def call(env)
|
||||||
elapsed_time = Time.measure { call_next env }
|
elapsed_time = Time.measure { call_next env }
|
||||||
elapsed_text = elapsed_text elapsed_time
|
elapsed_text = elapsed_text elapsed_time
|
||||||
msg = "#{env.response.status_code} #{env.request.method}" \
|
msg = "#{env.response.status_code} #{env.request.method}" \
|
||||||
" #{env.request.resource} #{elapsed_text}"
|
" #{env.request.resource} #{elapsed_text}"
|
||||||
@logger.debug(msg)
|
@logger.debug msg
|
||||||
env
|
env
|
||||||
end
|
end
|
||||||
|
|
||||||
def write(msg)
|
def write(msg)
|
||||||
@logger.debug(msg)
|
@logger.debug msg
|
||||||
end
|
end
|
||||||
|
|
||||||
private def elapsed_text(elapsed)
|
private def elapsed_text(elapsed)
|
||||||
millis = elapsed.total_milliseconds
|
millis = elapsed.total_milliseconds
|
||||||
return "#{millis.round(2)}ms" if millis >= 1
|
return "#{millis.round(2)}ms" if millis >= 1
|
||||||
"#{(millis * 1000).round(2)}µs"
|
"#{(millis * 1000).round(2)}µs"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -2,57 +2,57 @@ require "log"
|
|||||||
require "colorize"
|
require "colorize"
|
||||||
|
|
||||||
class Logger
|
class Logger
|
||||||
LEVELS = ["debug", "error", "fatal", "info", "warn"]
|
LEVELS = ["debug", "error", "fatal", "info", "warn"]
|
||||||
SEVERITY_IDS = [0, 4, 5, 2, 3]
|
SEVERITY_IDS = [0, 4, 5, 2, 3]
|
||||||
COLORS = [:light_cyan, :light_red, :red, :light_yellow, :light_magenta]
|
COLORS = [:light_cyan, :light_red, :red, :light_yellow, :light_magenta]
|
||||||
|
|
||||||
@@severity : Log::Severity = :info
|
@@severity : Log::Severity = :info
|
||||||
|
|
||||||
def initialize(level : String)
|
def initialize(level : String)
|
||||||
{% begin %}
|
{% begin %}
|
||||||
case level.downcase
|
case level.downcase
|
||||||
when "off"
|
when "off"
|
||||||
@@severity = :none
|
@@severity = :none
|
||||||
{% for lvl, i in LEVELS %}
|
{% for lvl, i in LEVELS %}
|
||||||
when {{lvl}}
|
when {{lvl}}
|
||||||
@@severity = Log::Severity.new SEVERITY_IDS[{{i}}]
|
@@severity = Log::Severity.new SEVERITY_IDS[{{i}}]
|
||||||
{% end %}
|
{% end %}
|
||||||
else
|
else
|
||||||
raise "Unknown log level #{level}"
|
raise "Unknown log level #{level}"
|
||||||
end
|
end
|
||||||
{% end %}
|
{% end %}
|
||||||
|
|
||||||
@log = Log.for("")
|
@log = Log.for("")
|
||||||
|
|
||||||
@backend = Log::IOBackend.new
|
@backend = Log::IOBackend.new
|
||||||
@backend.formatter = ->(entry : Log::Entry, io : IO) do
|
@backend.formatter = ->(entry : Log::Entry, io : IO) do
|
||||||
color = :default
|
color = :default
|
||||||
{% begin %}
|
{% begin %}
|
||||||
case entry.severity.label.to_s().downcase
|
case entry.severity.label.to_s().downcase
|
||||||
{% for lvl, i in LEVELS %}
|
{% for lvl, i in LEVELS %}
|
||||||
when {{lvl}}, "#{{{lvl}}}ing"
|
when {{lvl}}, "#{{{lvl}}}ing"
|
||||||
color = COLORS[{{i}}]
|
color = COLORS[{{i}}]
|
||||||
{% end %}
|
{% end %}
|
||||||
else
|
else
|
||||||
end
|
end
|
||||||
{% end %}
|
{% end %}
|
||||||
|
|
||||||
io << "[#{entry.severity.label}]".ljust(10).colorize(color)
|
io << "[#{entry.severity.label}]".ljust(10).colorize(color)
|
||||||
io << entry.timestamp.to_s("%Y/%m/%d %H:%M:%S") << " | "
|
io << entry.timestamp.to_s("%Y/%m/%d %H:%M:%S") << " | "
|
||||||
io << entry.message
|
io << entry.message
|
||||||
end
|
end
|
||||||
|
|
||||||
Log.builder.bind "*", @@severity, @backend
|
Log.builder.bind "*", @@severity, @backend
|
||||||
end
|
end
|
||||||
|
|
||||||
# Ignores @@severity and always log msg
|
# Ignores @@severity and always log msg
|
||||||
def log(msg)
|
def log(msg)
|
||||||
@backend.write Log::Entry.new "", Log::Severity::None, msg, nil
|
@backend.write Log::Entry.new "", Log::Severity::None, msg, nil
|
||||||
end
|
end
|
||||||
|
|
||||||
{% for lvl in LEVELS %}
|
{% for lvl in LEVELS %}
|
||||||
def {{lvl.id}}(msg)
|
def {{lvl.id}}(msg)
|
||||||
@log.{{lvl.id}} { msg }
|
@log.{{lvl.id}} { msg }
|
||||||
end
|
end
|
||||||
{% end %}
|
{% end %}
|
||||||
end
|
end
|
||||||
|
@ -2,202 +2,200 @@ require "http/client"
|
|||||||
require "json"
|
require "json"
|
||||||
require "csv"
|
require "csv"
|
||||||
|
|
||||||
macro string_properties (names)
|
macro string_properties(names)
|
||||||
{% for name in names %}
|
{% for name in names %}
|
||||||
property {{name.id}} = ""
|
property {{name.id}} = ""
|
||||||
{% end %}
|
{% end %}
|
||||||
end
|
end
|
||||||
|
|
||||||
macro parse_strings_from_json (names)
|
macro parse_strings_from_json(names)
|
||||||
{% for name in names %}
|
{% for name in names %}
|
||||||
@{{name.id}} = obj[{{name}}].as_s
|
@{{name.id}} = obj[{{name}}].as_s
|
||||||
{% end %}
|
{% end %}
|
||||||
end
|
end
|
||||||
|
|
||||||
module MangaDex
|
module MangaDex
|
||||||
class Chapter
|
class Chapter
|
||||||
string_properties ["lang_code", "title", "volume", "chapter"]
|
string_properties ["lang_code", "title", "volume", "chapter"]
|
||||||
property manga : Manga
|
property manga : Manga
|
||||||
property time = Time.local
|
property time = Time.local
|
||||||
property id : String
|
property id : String
|
||||||
property full_title = ""
|
property full_title = ""
|
||||||
property language = ""
|
property language = ""
|
||||||
property pages = [] of {String, String} # filename, url
|
property pages = [] of {String, String} # filename, url
|
||||||
property groups = [] of {Int32, String} # group_id, group_name
|
property groups = [] of {Int32, String} # group_id, group_name
|
||||||
|
|
||||||
def initialize(@id, json_obj : JSON::Any, @manga, lang :
|
def initialize(@id, json_obj : JSON::Any, @manga,
|
||||||
Hash(String, String))
|
lang : Hash(String, String))
|
||||||
self.parse_json json_obj, lang
|
self.parse_json json_obj, lang
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_info_json
|
def to_info_json
|
||||||
JSON.build do |json|
|
JSON.build do |json|
|
||||||
json.object do
|
json.object do
|
||||||
{% for name in ["id", "title", "volume", "chapter",
|
{% for name in ["id", "title", "volume", "chapter",
|
||||||
"language", "full_title"] %}
|
"language", "full_title"] %}
|
||||||
json.field {{name}}, @{{name.id}}
|
json.field {{name}}, @{{name.id}}
|
||||||
{% end %}
|
{% end %}
|
||||||
json.field "time", @time.to_unix.to_s
|
json.field "time", @time.to_unix.to_s
|
||||||
json.field "manga_title", @manga.title
|
json.field "manga_title", @manga.title
|
||||||
json.field "manga_id", @manga.id
|
json.field "manga_id", @manga.id
|
||||||
json.field "groups" do
|
json.field "groups" do
|
||||||
json.object do
|
json.object do
|
||||||
@groups.each do |gid, gname|
|
@groups.each do |gid, gname|
|
||||||
json.field gname, gid
|
json.field gname, gid
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def parse_json(obj, lang)
|
def parse_json(obj, lang)
|
||||||
begin
|
begin
|
||||||
parse_strings_from_json ["lang_code", "title", "volume",
|
parse_strings_from_json ["lang_code", "title", "volume",
|
||||||
"chapter"]
|
"chapter"]
|
||||||
language = lang[@lang_code]?
|
language = lang[@lang_code]?
|
||||||
@language = language if language
|
@language = language if language
|
||||||
@time = Time.unix obj["timestamp"].as_i
|
@time = Time.unix obj["timestamp"].as_i
|
||||||
suffixes = ["", "_2", "_3"]
|
suffixes = ["", "_2", "_3"]
|
||||||
suffixes.each do |s|
|
suffixes.each do |s|
|
||||||
gid = obj["group_id#{s}"].as_i
|
gid = obj["group_id#{s}"].as_i
|
||||||
next if gid == 0
|
next if gid == 0
|
||||||
gname = obj["group_name#{s}"].as_s
|
gname = obj["group_name#{s}"].as_s
|
||||||
@groups << {gid, gname}
|
@groups << {gid, gname}
|
||||||
end
|
end
|
||||||
@full_title = @title
|
@full_title = @title
|
||||||
unless @chapter.empty?
|
unless @chapter.empty?
|
||||||
@full_title = "Ch.#{@chapter} " + @full_title
|
@full_title = "Ch.#{@chapter} " + @full_title
|
||||||
end
|
end
|
||||||
unless @volume.empty?
|
unless @volume.empty?
|
||||||
@full_title = "Vol.#{@volume} " + @full_title
|
@full_title = "Vol.#{@volume} " + @full_title
|
||||||
end
|
end
|
||||||
rescue e
|
rescue e
|
||||||
raise "failed to parse json: #{e}"
|
raise "failed to parse json: #{e}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
class Manga
|
|
||||||
string_properties ["cover_url", "description", "title", "author",
|
|
||||||
"artist"]
|
|
||||||
property chapters = [] of Chapter
|
|
||||||
property id : String
|
|
||||||
|
|
||||||
def initialize(@id, json_obj : JSON::Any)
|
class Manga
|
||||||
self.parse_json json_obj
|
string_properties ["cover_url", "description", "title", "author", "artist"]
|
||||||
end
|
property chapters = [] of Chapter
|
||||||
|
property id : String
|
||||||
|
|
||||||
def to_info_json(with_chapters = true)
|
def initialize(@id, json_obj : JSON::Any)
|
||||||
JSON.build do |json|
|
self.parse_json json_obj
|
||||||
json.object do
|
end
|
||||||
{% for name in ["id", "title", "description",
|
|
||||||
"author", "artist", "cover_url"] %}
|
|
||||||
json.field {{name}}, @{{name.id}}
|
|
||||||
{% end %}
|
|
||||||
if with_chapters
|
|
||||||
json.field "chapters" do
|
|
||||||
json.array do
|
|
||||||
@chapters.each do |c|
|
|
||||||
json.raw c.to_info_json
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def parse_json(obj)
|
def to_info_json(with_chapters = true)
|
||||||
begin
|
JSON.build do |json|
|
||||||
parse_strings_from_json ["cover_url", "description", "title",
|
json.object do
|
||||||
"author", "artist"]
|
{% for name in ["id", "title", "description", "author", "artist",
|
||||||
rescue e
|
"cover_url"] %}
|
||||||
raise "failed to parse json: #{e}"
|
json.field {{name}}, @{{name.id}}
|
||||||
end
|
{% end %}
|
||||||
end
|
if with_chapters
|
||||||
end
|
json.field "chapters" do
|
||||||
class API
|
json.array do
|
||||||
def initialize(@base_url = "https://mangadex.org/api/")
|
@chapters.each do |c|
|
||||||
@lang = {} of String => String
|
json.raw c.to_info_json
|
||||||
CSV.each_row {{read_file "src/assets/lang_codes.csv"}} do |row|
|
end
|
||||||
@lang[row[1]] = row[0]
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def get(url)
|
def parse_json(obj)
|
||||||
headers = HTTP::Headers {
|
begin
|
||||||
"User-agent" => "Mangadex.cr"
|
parse_strings_from_json ["cover_url", "description", "title", "author",
|
||||||
}
|
"artist"]
|
||||||
res = HTTP::Client.get url, headers
|
rescue e
|
||||||
raise "Failed to get #{url}. [#{res.status_code}] "\
|
raise "failed to parse json: #{e}"
|
||||||
"#{res.status_message}" if !res.success?
|
end
|
||||||
JSON.parse res.body
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_manga(id)
|
class API
|
||||||
obj = self.get File.join @base_url, "manga/#{id}"
|
def initialize(@base_url = "https://mangadex.org/api/")
|
||||||
if obj["status"]? != "OK"
|
@lang = {} of String => String
|
||||||
raise "Expecting `OK` in the `status` field. " \
|
CSV.each_row {{read_file "src/assets/lang_codes.csv"}} do |row|
|
||||||
"Got `#{obj["status"]?}`"
|
@lang[row[1]] = row[0]
|
||||||
end
|
end
|
||||||
begin
|
end
|
||||||
manga = Manga.new id, obj["manga"]
|
|
||||||
obj["chapter"].as_h.map do |k, v|
|
|
||||||
chapter = Chapter.new k, v, manga, @lang
|
|
||||||
manga.chapters << chapter
|
|
||||||
end
|
|
||||||
return manga
|
|
||||||
rescue
|
|
||||||
raise "Failed to parse JSON"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_chapter(chapter : Chapter)
|
def get(url)
|
||||||
obj = self.get File.join @base_url, "chapter/#{chapter.id}"
|
headers = HTTP::Headers{
|
||||||
if obj["status"]? == "external"
|
"User-agent" => "Mangadex.cr",
|
||||||
raise "This chapter is hosted on an external site " \
|
}
|
||||||
"#{obj["external"]?}, and Mango does not support " \
|
res = HTTP::Client.get url, headers
|
||||||
"external chapters."
|
raise "Failed to get #{url}. [#{res.status_code}] " \
|
||||||
end
|
"#{res.status_message}" if !res.success?
|
||||||
if obj["status"]? != "OK"
|
JSON.parse res.body
|
||||||
raise "Expecting `OK` in the `status` field. " \
|
end
|
||||||
"Got `#{obj["status"]?}`"
|
|
||||||
end
|
|
||||||
begin
|
|
||||||
server = obj["server"].as_s
|
|
||||||
hash = obj["hash"].as_s
|
|
||||||
chapter.pages = obj["page_array"].as_a.map do |fn|
|
|
||||||
{
|
|
||||||
fn.as_s,
|
|
||||||
"#{server}#{hash}/#{fn.as_s}"
|
|
||||||
}
|
|
||||||
end
|
|
||||||
rescue
|
|
||||||
raise "Failed to parse JSON"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_chapter(id : String)
|
def get_manga(id)
|
||||||
obj = self.get File.join @base_url, "chapter/#{id}"
|
obj = self.get File.join @base_url, "manga/#{id}"
|
||||||
if obj["status"]? == "external"
|
if obj["status"]? != "OK"
|
||||||
raise "This chapter is hosted on an external site " \
|
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||||
"#{obj["external"]?}, and Mango does not support " \
|
end
|
||||||
"external chapters."
|
begin
|
||||||
end
|
manga = Manga.new id, obj["manga"]
|
||||||
if obj["status"]? != "OK"
|
obj["chapter"].as_h.map do |k, v|
|
||||||
raise "Expecting `OK` in the `status` field. " \
|
chapter = Chapter.new k, v, manga, @lang
|
||||||
"Got `#{obj["status"]?}`"
|
manga.chapters << chapter
|
||||||
end
|
end
|
||||||
manga_id = ""
|
return manga
|
||||||
begin
|
rescue
|
||||||
manga_id = obj["manga_id"].as_i.to_s
|
raise "Failed to parse JSON"
|
||||||
rescue
|
end
|
||||||
raise "Failed to parse JSON"
|
end
|
||||||
end
|
|
||||||
manga = self.get_manga manga_id
|
def get_chapter(chapter : Chapter)
|
||||||
chapter = manga.chapters.find {|c| c.id == id}.not_nil!
|
obj = self.get File.join @base_url, "chapter/#{chapter.id}"
|
||||||
self.get_chapter chapter
|
if obj["status"]? == "external"
|
||||||
return chapter
|
raise "This chapter is hosted on an external site " \
|
||||||
end
|
"#{obj["external"]?}, and Mango does not support " \
|
||||||
end
|
"external chapters."
|
||||||
|
end
|
||||||
|
if obj["status"]? != "OK"
|
||||||
|
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||||
|
end
|
||||||
|
begin
|
||||||
|
server = obj["server"].as_s
|
||||||
|
hash = obj["hash"].as_s
|
||||||
|
chapter.pages = obj["page_array"].as_a.map do |fn|
|
||||||
|
{
|
||||||
|
fn.as_s,
|
||||||
|
"#{server}#{hash}/#{fn.as_s}",
|
||||||
|
}
|
||||||
|
end
|
||||||
|
rescue
|
||||||
|
raise "Failed to parse JSON"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_chapter(id : String)
|
||||||
|
obj = self.get File.join @base_url, "chapter/#{id}"
|
||||||
|
if obj["status"]? == "external"
|
||||||
|
raise "This chapter is hosted on an external site " \
|
||||||
|
"#{obj["external"]?}, and Mango does not support " \
|
||||||
|
"external chapters."
|
||||||
|
end
|
||||||
|
if obj["status"]? != "OK"
|
||||||
|
raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`"
|
||||||
|
end
|
||||||
|
manga_id = ""
|
||||||
|
begin
|
||||||
|
manga_id = obj["manga_id"].as_i.to_s
|
||||||
|
rescue
|
||||||
|
raise "Failed to parse JSON"
|
||||||
|
end
|
||||||
|
manga = self.get_manga manga_id
|
||||||
|
chapter = manga.chapters.find { |c| c.id == id }.not_nil!
|
||||||
|
self.get_chapter chapter
|
||||||
|
return chapter
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -2,373 +2,374 @@ require "./api"
|
|||||||
require "sqlite3"
|
require "sqlite3"
|
||||||
|
|
||||||
module MangaDex
|
module MangaDex
|
||||||
class PageJob
|
class PageJob
|
||||||
property success = false
|
property success = false
|
||||||
property url : String
|
property url : String
|
||||||
property filename : String
|
property filename : String
|
||||||
property writer : Zip::Writer
|
property writer : Zip::Writer
|
||||||
property tries_remaning : Int32
|
property tries_remaning : Int32
|
||||||
def initialize(@url, @filename, @writer, @tries_remaning)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
enum JobStatus
|
def initialize(@url, @filename, @writer, @tries_remaning)
|
||||||
Pending # 0
|
end
|
||||||
Downloading # 1
|
end
|
||||||
Error # 2
|
|
||||||
Completed # 3
|
|
||||||
MissingPages # 4
|
|
||||||
end
|
|
||||||
|
|
||||||
struct Job
|
enum JobStatus
|
||||||
property id : String
|
Pending # 0
|
||||||
property manga_id : String
|
Downloading # 1
|
||||||
property title : String
|
Error # 2
|
||||||
property manga_title : String
|
Completed # 3
|
||||||
property status : JobStatus
|
MissingPages # 4
|
||||||
property status_message : String = ""
|
end
|
||||||
property pages : Int32 = 0
|
|
||||||
property success_count : Int32 = 0
|
|
||||||
property fail_count : Int32 = 0
|
|
||||||
property time : Time
|
|
||||||
|
|
||||||
def parse_query_result(res : DB::ResultSet)
|
struct Job
|
||||||
@id = res.read String
|
property id : String
|
||||||
@manga_id = res.read String
|
property manga_id : String
|
||||||
@title = res.read String
|
property title : String
|
||||||
@manga_title = res.read String
|
property manga_title : String
|
||||||
status = res.read Int32
|
property status : JobStatus
|
||||||
@status_message = res.read String
|
property status_message : String = ""
|
||||||
@pages = res.read Int32
|
property pages : Int32 = 0
|
||||||
@success_count = res.read Int32
|
property success_count : Int32 = 0
|
||||||
@fail_count = res.read Int32
|
property fail_count : Int32 = 0
|
||||||
time = res.read Int64
|
property time : Time
|
||||||
@status = JobStatus.new status
|
|
||||||
@time = Time.unix_ms time
|
|
||||||
end
|
|
||||||
|
|
||||||
# Raises if the result set does not contain the correct set of columns
|
def parse_query_result(res : DB::ResultSet)
|
||||||
def self.from_query_result(res : DB::ResultSet)
|
@id = res.read String
|
||||||
job = Job.allocate
|
@manga_id = res.read String
|
||||||
job.parse_query_result res
|
@title = res.read String
|
||||||
return job
|
@manga_title = res.read String
|
||||||
end
|
status = res.read Int32
|
||||||
|
@status_message = res.read String
|
||||||
|
@pages = res.read Int32
|
||||||
|
@success_count = res.read Int32
|
||||||
|
@fail_count = res.read Int32
|
||||||
|
time = res.read Int64
|
||||||
|
@status = JobStatus.new status
|
||||||
|
@time = Time.unix_ms time
|
||||||
|
end
|
||||||
|
|
||||||
def initialize(@id, @manga_id, @title, @manga_title, @status, @time)
|
# Raises if the result set does not contain the correct set of columns
|
||||||
end
|
def self.from_query_result(res : DB::ResultSet)
|
||||||
|
job = Job.allocate
|
||||||
|
job.parse_query_result res
|
||||||
|
return job
|
||||||
|
end
|
||||||
|
|
||||||
def to_json(json)
|
def initialize(@id, @manga_id, @title, @manga_title, @status, @time)
|
||||||
json.object do
|
end
|
||||||
{% for name in ["id", "manga_id", "title", "manga_title",
|
|
||||||
"status_message"] %}
|
|
||||||
json.field {{name}}, @{{name.id}}
|
|
||||||
{% end %}
|
|
||||||
{% for name in ["pages", "success_count", "fail_count"] %}
|
|
||||||
json.field {{name}} do
|
|
||||||
json.number @{{name.id}}
|
|
||||||
end
|
|
||||||
{% end %}
|
|
||||||
json.field "status", @status.to_s
|
|
||||||
json.field "time" do
|
|
||||||
json.number @time.to_unix_ms
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class Queue
|
def to_json(json)
|
||||||
property downloader : Downloader?
|
json.object do
|
||||||
|
{% for name in ["id", "manga_id", "title", "manga_title",
|
||||||
|
"status_message"] %}
|
||||||
|
json.field {{name}}, @{{name.id}}
|
||||||
|
{% end %}
|
||||||
|
{% for name in ["pages", "success_count", "fail_count"] %}
|
||||||
|
json.field {{name}} do
|
||||||
|
json.number @{{name.id}}
|
||||||
|
end
|
||||||
|
{% end %}
|
||||||
|
json.field "status", @status.to_s
|
||||||
|
json.field "time" do
|
||||||
|
json.number @time.to_unix_ms
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def initialize(@path : String, @logger : Logger)
|
class Queue
|
||||||
dir = File.dirname path
|
property downloader : Downloader?
|
||||||
unless Dir.exists? dir
|
|
||||||
@logger.info "The queue DB directory #{dir} does not exist. " \
|
|
||||||
"Attepmting to create it"
|
|
||||||
Dir.mkdir_p dir
|
|
||||||
end
|
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
|
||||||
begin
|
|
||||||
db.exec "create table if not exists queue " \
|
|
||||||
"(id text, manga_id text, title text, manga_title " \
|
|
||||||
"text, status integer, status_message text, " \
|
|
||||||
"pages integer, success_count integer, " \
|
|
||||||
"fail_count integer, time integer)"
|
|
||||||
db.exec "create unique index if not exists id_idx " \
|
|
||||||
"on queue (id)"
|
|
||||||
db.exec "create index if not exists manga_id_idx " \
|
|
||||||
"on queue (manga_id)"
|
|
||||||
db.exec "create index if not exists status_idx " \
|
|
||||||
"on queue (status)"
|
|
||||||
rescue e
|
|
||||||
@logger.error "Error when checking tables in DB: #{e}"
|
|
||||||
raise e
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Returns the earliest job in queue or nil if the job cannot be parsed.
|
def initialize(@path : String, @logger : Logger)
|
||||||
# Returns nil if queue is empty
|
dir = File.dirname path
|
||||||
def pop
|
unless Dir.exists? dir
|
||||||
job = nil
|
@logger.info "The queue DB directory #{dir} does not exist. " \
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
"Attepmting to create it"
|
||||||
begin
|
Dir.mkdir_p dir
|
||||||
db.query_one "select * from queue where status = 0 "\
|
end
|
||||||
"or status = 1 order by time limit 1" do |res|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
job = Job.from_query_result res
|
begin
|
||||||
end
|
db.exec "create table if not exists queue " \
|
||||||
rescue
|
"(id text, manga_id text, title text, manga_title " \
|
||||||
end
|
"text, status integer, status_message text, " \
|
||||||
end
|
"pages integer, success_count integer, " \
|
||||||
return job
|
"fail_count integer, time integer)"
|
||||||
end
|
db.exec "create unique index if not exists id_idx " \
|
||||||
|
"on queue (id)"
|
||||||
|
db.exec "create index if not exists manga_id_idx " \
|
||||||
|
"on queue (manga_id)"
|
||||||
|
db.exec "create index if not exists status_idx " \
|
||||||
|
"on queue (status)"
|
||||||
|
rescue e
|
||||||
|
@logger.error "Error when checking tables in DB: #{e}"
|
||||||
|
raise e
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Push an array of jobs into the queue, and return the number of jobs
|
# Returns the earliest job in queue or nil if the job cannot be parsed.
|
||||||
# inserted. Any job already exists in the queue will be ignored.
|
# Returns nil if queue is empty
|
||||||
def push(jobs : Array(Job))
|
def pop
|
||||||
start_count = self.count
|
job = nil
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
jobs.each do |job|
|
begin
|
||||||
db.exec "insert or ignore into queue values "\
|
db.query_one "select * from queue where status = 0 " \
|
||||||
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
"or status = 1 order by time limit 1" do |res|
|
||||||
job.id, job.manga_id, job.title, job.manga_title,
|
job = Job.from_query_result res
|
||||||
job.status.to_i, job.status_message, job.pages,
|
end
|
||||||
job.success_count, job.fail_count, job.time.to_unix_ms
|
rescue
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
self.count - start_count
|
return job
|
||||||
end
|
end
|
||||||
|
|
||||||
def reset(id : String)
|
# Push an array of jobs into the queue, and return the number of jobs
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
# inserted. Any job already exists in the queue will be ignored.
|
||||||
db.exec "update queue set status = 0, status_message = '', " \
|
def push(jobs : Array(Job))
|
||||||
"pages = 0, success_count = 0, fail_count = 0 " \
|
start_count = self.count
|
||||||
"where id = (?)", id
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
end
|
jobs.each do |job|
|
||||||
end
|
db.exec "insert or ignore into queue values " \
|
||||||
|
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
job.id, job.manga_id, job.title, job.manga_title,
|
||||||
|
job.status.to_i, job.status_message, job.pages,
|
||||||
|
job.success_count, job.fail_count, job.time.to_unix_ms
|
||||||
|
end
|
||||||
|
end
|
||||||
|
self.count - start_count
|
||||||
|
end
|
||||||
|
|
||||||
def reset (job : Job)
|
def reset(id : String)
|
||||||
self.reset job.id
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
end
|
db.exec "update queue set status = 0, status_message = '', " \
|
||||||
|
"pages = 0, success_count = 0, fail_count = 0 " \
|
||||||
|
"where id = (?)", id
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Reset all failed tasks (missing pages and error)
|
def reset(job : Job)
|
||||||
def reset
|
self.reset job.id
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
end
|
||||||
db.exec "update queue set status = 0, status_message = '', " \
|
|
||||||
"pages = 0, success_count = 0, fail_count = 0 " \
|
|
||||||
"where status = 2 or status = 4"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def delete(id : String)
|
# Reset all failed tasks (missing pages and error)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
def reset
|
||||||
db.exec "delete from queue where id = (?)", id
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
end
|
db.exec "update queue set status = 0, status_message = '', " \
|
||||||
end
|
"pages = 0, success_count = 0, fail_count = 0 " \
|
||||||
|
"where status = 2 or status = 4"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def delete(job : Job)
|
def delete(id : String)
|
||||||
self.delete job.id
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
end
|
db.exec "delete from queue where id = (?)", id
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def delete_status(status : JobStatus)
|
def delete(job : Job)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
self.delete job.id
|
||||||
db.exec "delete from queue where status = (?)", status.to_i
|
end
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def count_status(status : JobStatus)
|
def delete_status(status : JobStatus)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
return db.query_one "select count(*) from queue where "\
|
db.exec "delete from queue where status = (?)", status.to_i
|
||||||
"status = (?)", status.to_i, as: Int32
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
def count
|
def count_status(status : JobStatus)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
return db.query_one "select count(*) from queue", as: Int32
|
return db.query_one "select count(*) from queue where " \
|
||||||
end
|
"status = (?)", status.to_i, as: Int32
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def set_status(status : JobStatus, job : Job)
|
def count
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set status = (?) where id = (?)",
|
return db.query_one "select count(*) from queue", as: Int32
|
||||||
status.to_i, job.id
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
def get_all
|
def set_status(status : JobStatus, job : Job)
|
||||||
jobs = [] of Job
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
db.exec "update queue set status = (?) where id = (?)",
|
||||||
jobs = db.query_all "select * from queue order by time", do |rs|
|
status.to_i, job.id
|
||||||
Job.from_query_result rs
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
return jobs
|
|
||||||
end
|
|
||||||
|
|
||||||
def add_success(job : Job)
|
def get_all
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
jobs = [] of Job
|
||||||
db.exec "update queue set success_count = success_count + 1 " \
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
"where id = (?)", job.id
|
jobs = db.query_all "select * from queue order by time" do |rs|
|
||||||
end
|
Job.from_query_result rs
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
return jobs
|
||||||
|
end
|
||||||
|
|
||||||
def add_fail(job : Job)
|
def add_success(job : Job)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set fail_count = fail_count + 1 " \
|
db.exec "update queue set success_count = success_count + 1 " \
|
||||||
"where id = (?)", job.id
|
"where id = (?)", job.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_pages(pages : Int32, job : Job)
|
def add_fail(job : Job)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set pages = (?), success_count = 0, " \
|
db.exec "update queue set fail_count = fail_count + 1 " \
|
||||||
"fail_count = 0 where id = (?)", pages, job.id
|
"where id = (?)", job.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_message(msg : String, job : Job)
|
def set_pages(pages : Int32, job : Job)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set status_message = " \
|
db.exec "update queue set pages = (?), success_count = 0, " \
|
||||||
"status_message || (?) || (?) where id = (?)",
|
"fail_count = 0 where id = (?)", pages, job.id
|
||||||
"\n", msg, job.id
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
def pause
|
def add_message(msg : String, job : Job)
|
||||||
@downloader.not_nil!.stopped = true
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
end
|
db.exec "update queue set status_message = " \
|
||||||
|
"status_message || (?) || (?) where id = (?)",
|
||||||
|
"\n", msg, job.id
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def resume
|
def pause
|
||||||
@downloader.not_nil!.stopped = false
|
@downloader.not_nil!.stopped = true
|
||||||
end
|
end
|
||||||
|
|
||||||
def paused?
|
def resume
|
||||||
@downloader.not_nil!.stopped
|
@downloader.not_nil!.stopped = false
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
class Downloader
|
def paused?
|
||||||
property stopped = false
|
@downloader.not_nil!.stopped
|
||||||
@downloading = false
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def initialize(@queue : Queue, @api : API, @library_path : String,
|
class Downloader
|
||||||
@wait_seconds : Int32, @retries : Int32,
|
property stopped = false
|
||||||
@logger : Logger)
|
@downloading = false
|
||||||
@queue.downloader = self
|
|
||||||
|
|
||||||
spawn do
|
def initialize(@queue : Queue, @api : API, @library_path : String,
|
||||||
loop do
|
@wait_seconds : Int32, @retries : Int32,
|
||||||
sleep 1.second
|
@logger : Logger)
|
||||||
next if @stopped || @downloading
|
@queue.downloader = self
|
||||||
begin
|
|
||||||
job = @queue.pop
|
|
||||||
next if job.nil?
|
|
||||||
download job
|
|
||||||
rescue e
|
|
||||||
@logger.error e
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private def download(job : Job)
|
spawn do
|
||||||
@downloading = true
|
loop do
|
||||||
@queue.set_status JobStatus::Downloading, job
|
sleep 1.second
|
||||||
begin
|
next if @stopped || @downloading
|
||||||
chapter = @api.get_chapter(job.id)
|
begin
|
||||||
rescue e
|
job = @queue.pop
|
||||||
@logger.error e
|
next if job.nil?
|
||||||
@queue.set_status JobStatus::Error, job
|
download job
|
||||||
unless e.message.nil?
|
rescue e
|
||||||
@queue.add_message e.message.not_nil!, job
|
@logger.error e
|
||||||
end
|
end
|
||||||
@downloading = false
|
end
|
||||||
return
|
end
|
||||||
end
|
end
|
||||||
@queue.set_pages chapter.pages.size, job
|
|
||||||
lib_dir = @library_path
|
|
||||||
manga_dir = File.join lib_dir, chapter.manga.title
|
|
||||||
unless File.exists? manga_dir
|
|
||||||
Dir.mkdir_p manga_dir
|
|
||||||
end
|
|
||||||
zip_path = File.join manga_dir, "#{job.title}.cbz"
|
|
||||||
|
|
||||||
# Find the number of digits needed to store the number of pages
|
private def download(job : Job)
|
||||||
len = Math.log10(chapter.pages.size).to_i + 1
|
@downloading = true
|
||||||
|
@queue.set_status JobStatus::Downloading, job
|
||||||
|
begin
|
||||||
|
chapter = @api.get_chapter(job.id)
|
||||||
|
rescue e
|
||||||
|
@logger.error e
|
||||||
|
@queue.set_status JobStatus::Error, job
|
||||||
|
unless e.message.nil?
|
||||||
|
@queue.add_message e.message.not_nil!, job
|
||||||
|
end
|
||||||
|
@downloading = false
|
||||||
|
return
|
||||||
|
end
|
||||||
|
@queue.set_pages chapter.pages.size, job
|
||||||
|
lib_dir = @library_path
|
||||||
|
manga_dir = File.join lib_dir, chapter.manga.title
|
||||||
|
unless File.exists? manga_dir
|
||||||
|
Dir.mkdir_p manga_dir
|
||||||
|
end
|
||||||
|
zip_path = File.join manga_dir, "#{job.title}.cbz"
|
||||||
|
|
||||||
writer = Zip::Writer.new zip_path
|
# Find the number of digits needed to store the number of pages
|
||||||
# Create a buffered channel. It works as an FIFO queue
|
len = Math.log10(chapter.pages.size).to_i + 1
|
||||||
channel = Channel(PageJob).new chapter.pages.size
|
|
||||||
spawn do
|
|
||||||
chapter.pages.each_with_index do |tuple, i|
|
|
||||||
fn, url = tuple
|
|
||||||
ext = File.extname fn
|
|
||||||
fn = "#{i.to_s.rjust len, '0'}#{ext}"
|
|
||||||
page_job = PageJob.new url, fn, writer, @retries
|
|
||||||
@logger.debug "Downloading #{url}"
|
|
||||||
loop do
|
|
||||||
sleep @wait_seconds.seconds
|
|
||||||
download_page page_job
|
|
||||||
break if page_job.success ||
|
|
||||||
page_job.tries_remaning <= 0
|
|
||||||
page_job.tries_remaning -= 1
|
|
||||||
@logger.warn "Failed to download page #{url}. " \
|
|
||||||
"Retrying... Remaining retries: " \
|
|
||||||
"#{page_job.tries_remaning}"
|
|
||||||
end
|
|
||||||
|
|
||||||
channel.send page_job
|
writer = Zip::Writer.new zip_path
|
||||||
end
|
# Create a buffered channel. It works as an FIFO queue
|
||||||
end
|
channel = Channel(PageJob).new chapter.pages.size
|
||||||
|
spawn do
|
||||||
|
chapter.pages.each_with_index do |tuple, i|
|
||||||
|
fn, url = tuple
|
||||||
|
ext = File.extname fn
|
||||||
|
fn = "#{i.to_s.rjust len, '0'}#{ext}"
|
||||||
|
page_job = PageJob.new url, fn, writer, @retries
|
||||||
|
@logger.debug "Downloading #{url}"
|
||||||
|
loop do
|
||||||
|
sleep @wait_seconds.seconds
|
||||||
|
download_page page_job
|
||||||
|
break if page_job.success ||
|
||||||
|
page_job.tries_remaning <= 0
|
||||||
|
page_job.tries_remaning -= 1
|
||||||
|
@logger.warn "Failed to download page #{url}. " \
|
||||||
|
"Retrying... Remaining retries: " \
|
||||||
|
"#{page_job.tries_remaning}"
|
||||||
|
end
|
||||||
|
|
||||||
spawn do
|
channel.send page_job
|
||||||
page_jobs = [] of PageJob
|
end
|
||||||
chapter.pages.size.times do
|
end
|
||||||
page_job = channel.receive
|
|
||||||
@logger.debug "[#{page_job.success ? "success" : "failed"}] " \
|
|
||||||
"#{page_job.url}"
|
|
||||||
page_jobs << page_job
|
|
||||||
if page_job.success
|
|
||||||
@queue.add_success job
|
|
||||||
else
|
|
||||||
@queue.add_fail job
|
|
||||||
msg = "Failed to download page #{page_job.url}"
|
|
||||||
@queue.add_message msg, job
|
|
||||||
@logger.error msg
|
|
||||||
end
|
|
||||||
end
|
|
||||||
fail_count = page_jobs.select{|j| !j.success}.size
|
|
||||||
@logger.debug "Download completed. "\
|
|
||||||
"#{fail_count}/#{page_jobs.size} failed"
|
|
||||||
writer.close
|
|
||||||
@logger.debug "cbz File created at #{zip_path}"
|
|
||||||
if fail_count == 0
|
|
||||||
@queue.set_status JobStatus::Completed, job
|
|
||||||
else
|
|
||||||
@queue.set_status JobStatus::MissingPages, job
|
|
||||||
end
|
|
||||||
@downloading = false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private def download_page(job : PageJob)
|
spawn do
|
||||||
@logger.debug "downloading #{job.url}"
|
page_jobs = [] of PageJob
|
||||||
headers = HTTP::Headers {
|
chapter.pages.size.times do
|
||||||
"User-agent" => "Mangadex.cr"
|
page_job = channel.receive
|
||||||
}
|
@logger.debug "[#{page_job.success ? "success" : "failed"}] " \
|
||||||
begin
|
"#{page_job.url}"
|
||||||
HTTP::Client.get job.url, headers do |res|
|
page_jobs << page_job
|
||||||
unless res.success?
|
if page_job.success
|
||||||
raise "Failed to download page #{job.url}. " \
|
@queue.add_success job
|
||||||
"[#{res.status_code}] #{res.status_message}"
|
else
|
||||||
end
|
@queue.add_fail job
|
||||||
job.writer.add job.filename, res.body_io
|
msg = "Failed to download page #{page_job.url}"
|
||||||
end
|
@queue.add_message msg, job
|
||||||
job.success = true
|
@logger.error msg
|
||||||
rescue e
|
end
|
||||||
@logger.error e
|
end
|
||||||
job.success = false
|
fail_count = page_jobs.select { |j| !j.success }.size
|
||||||
end
|
@logger.debug "Download completed. " \
|
||||||
end
|
"#{fail_count}/#{page_jobs.size} failed"
|
||||||
end
|
writer.close
|
||||||
|
@logger.debug "cbz File created at #{zip_path}"
|
||||||
|
if fail_count == 0
|
||||||
|
@queue.set_status JobStatus::Completed, job
|
||||||
|
else
|
||||||
|
@queue.set_status JobStatus::MissingPages, job
|
||||||
|
end
|
||||||
|
@downloading = false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private def download_page(job : PageJob)
|
||||||
|
@logger.debug "downloading #{job.url}"
|
||||||
|
headers = HTTP::Headers{
|
||||||
|
"User-agent" => "Mangadex.cr",
|
||||||
|
}
|
||||||
|
begin
|
||||||
|
HTTP::Client.get job.url, headers do |res|
|
||||||
|
unless res.success?
|
||||||
|
raise "Failed to download page #{job.url}. " \
|
||||||
|
"[#{res.status_code}] #{res.status_message}"
|
||||||
|
end
|
||||||
|
job.writer.add job.filename, res.body_io
|
||||||
|
end
|
||||||
|
job.success = true
|
||||||
|
rescue e
|
||||||
|
@logger.error e
|
||||||
|
job.success = false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
34
src/mango.cr
34
src/mango.cr
@ -8,20 +8,20 @@ VERSION = "0.2.5"
|
|||||||
config_path = nil
|
config_path = nil
|
||||||
|
|
||||||
parser = OptionParser.parse do |parser|
|
parser = OptionParser.parse do |parser|
|
||||||
parser.banner = "Mango e-manga server/reader. Version #{VERSION}\n"
|
parser.banner = "Mango e-manga server/reader. Version #{VERSION}\n"
|
||||||
|
|
||||||
parser.on "-v", "--version", "Show version" do
|
parser.on "-v", "--version", "Show version" do
|
||||||
puts "Version #{VERSION}"
|
puts "Version #{VERSION}"
|
||||||
exit
|
exit
|
||||||
end
|
end
|
||||||
parser.on "-h", "--help", "Show help" do
|
parser.on "-h", "--help", "Show help" do
|
||||||
puts parser
|
puts parser
|
||||||
exit
|
exit
|
||||||
end
|
end
|
||||||
parser.on "-c PATH", "--config=PATH", "Path to the config file. " \
|
parser.on "-c PATH", "--config=PATH",
|
||||||
"Default is `~/.config/mango/config.yml`" do |path|
|
"Path to the config file. Default is `~/.config/mango/config.yml`" do |path|
|
||||||
config_path = path
|
config_path = path
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
config = Config.load config_path
|
config = Config.load config_path
|
||||||
@ -29,11 +29,11 @@ logger = Logger.new config.log_level
|
|||||||
storage = Storage.new config.db_path, logger
|
storage = Storage.new config.db_path, logger
|
||||||
library = Library.new config.library_path, config.scan_interval, logger, storage
|
library = Library.new config.library_path, config.scan_interval, logger, storage
|
||||||
queue = MangaDex::Queue.new config.mangadex["download_queue_db_path"].to_s,
|
queue = MangaDex::Queue.new config.mangadex["download_queue_db_path"].to_s,
|
||||||
logger
|
logger
|
||||||
api = MangaDex::API.new config.mangadex["api_url"].to_s
|
api = MangaDex::API.new config.mangadex["api_url"].to_s
|
||||||
downloader = MangaDex::Downloader.new queue, api, config.library_path,
|
downloader = MangaDex::Downloader.new queue, api, config.library_path,
|
||||||
config.mangadex["download_wait_seconds"].to_i,
|
config.mangadex["download_wait_seconds"].to_i,
|
||||||
config.mangadex["download_retries"].to_i, logger
|
config.mangadex["download_retries"].to_i, logger
|
||||||
|
|
||||||
context = Context.new config, logger, library, storage, queue
|
context = Context.new config, logger, library, storage, queue
|
||||||
|
|
||||||
|
@ -1,108 +1,107 @@
|
|||||||
require "./router"
|
require "./router"
|
||||||
|
|
||||||
class AdminRouter < Router
|
class AdminRouter < Router
|
||||||
def setup
|
def setup
|
||||||
get "/admin" do |env|
|
get "/admin" do |env|
|
||||||
layout "admin"
|
layout "admin"
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/admin/user" do |env|
|
get "/admin/user" do |env|
|
||||||
users = @context.storage.list_users
|
users = @context.storage.list_users
|
||||||
username = get_username env
|
username = get_username env
|
||||||
layout "user"
|
layout "user"
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/admin/user/edit" do |env|
|
get "/admin/user/edit" do |env|
|
||||||
username = env.params.query["username"]?
|
username = env.params.query["username"]?
|
||||||
admin = env.params.query["admin"]?
|
admin = env.params.query["admin"]?
|
||||||
if admin
|
if admin
|
||||||
admin = admin == "true"
|
admin = admin == "true"
|
||||||
end
|
end
|
||||||
error = env.params.query["error"]?
|
error = env.params.query["error"]?
|
||||||
current_user = get_username env
|
current_user = get_username env
|
||||||
new_user = username.nil? && admin.nil?
|
new_user = username.nil? && admin.nil?
|
||||||
layout "user-edit"
|
layout "user-edit"
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/admin/user/edit" do |env|
|
post "/admin/user/edit" do |env|
|
||||||
# creating new user
|
# creating new user
|
||||||
begin
|
begin
|
||||||
username = env.params.body["username"]
|
username = env.params.body["username"]
|
||||||
password = env.params.body["password"]
|
password = env.params.body["password"]
|
||||||
# if `admin` is unchecked, the body hash
|
# if `admin` is unchecked, the body hash
|
||||||
# would not contain `admin`
|
# would not contain `admin`
|
||||||
admin = !env.params.body["admin"]?.nil?
|
admin = !env.params.body["admin"]?.nil?
|
||||||
|
|
||||||
if username.size < 3
|
if username.size < 3
|
||||||
raise "Username should contain at least 3 characters"
|
raise "Username should contain at least 3 characters"
|
||||||
end
|
end
|
||||||
if (username =~ /^[A-Za-z0-9_]+$/).nil?
|
if (username =~ /^[A-Za-z0-9_]+$/).nil?
|
||||||
raise "Username should contain alphanumeric characters "\
|
raise "Username should contain alphanumeric characters " \
|
||||||
"and underscores only"
|
"and underscores only"
|
||||||
end
|
end
|
||||||
if password.size < 6
|
if password.size < 6
|
||||||
raise "Password should contain at least 6 characters"
|
raise "Password should contain at least 6 characters"
|
||||||
end
|
end
|
||||||
if (password =~ /^[[:ascii:]]+$/).nil?
|
if (password =~ /^[[:ascii:]]+$/).nil?
|
||||||
raise "password should contain ASCII characters only"
|
raise "password should contain ASCII characters only"
|
||||||
end
|
end
|
||||||
|
|
||||||
@context.storage.new_user username, password, admin
|
@context.storage.new_user username, password, admin
|
||||||
|
|
||||||
env.redirect "/admin/user"
|
env.redirect "/admin/user"
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
redirect_url = URI.new \
|
redirect_url = URI.new \
|
||||||
path: "/admin/user/edit",\
|
path: "/admin/user/edit",
|
||||||
query: hash_to_query({"error" => e.message})
|
query: hash_to_query({"error" => e.message})
|
||||||
env.redirect redirect_url.to_s
|
env.redirect redirect_url.to_s
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/admin/user/edit/:original_username" do |env|
|
post "/admin/user/edit/:original_username" do |env|
|
||||||
# editing existing user
|
# editing existing user
|
||||||
begin
|
begin
|
||||||
username = env.params.body["username"]
|
username = env.params.body["username"]
|
||||||
password = env.params.body["password"]
|
password = env.params.body["password"]
|
||||||
# if `admin` is unchecked, the body
|
# if `admin` is unchecked, the body hash would not contain `admin`
|
||||||
# hash would not contain `admin`
|
admin = !env.params.body["admin"]?.nil?
|
||||||
admin = !env.params.body["admin"]?.nil?
|
original_username = env.params.url["original_username"]
|
||||||
original_username = env.params.url["original_username"]
|
|
||||||
|
|
||||||
if username.size < 3
|
if username.size < 3
|
||||||
raise "Username should contain at least 3 characters"
|
raise "Username should contain at least 3 characters"
|
||||||
end
|
end
|
||||||
if (username =~ /^[A-Za-z0-9_]+$/).nil?
|
if (username =~ /^[A-Za-z0-9_]+$/).nil?
|
||||||
raise "Username should contain alphanumeric characters "\
|
raise "Username should contain alphanumeric characters " \
|
||||||
"and underscores only"
|
"and underscores only"
|
||||||
end
|
end
|
||||||
|
|
||||||
if password.size != 0
|
if password.size != 0
|
||||||
if password.size < 6
|
if password.size < 6
|
||||||
raise "Password should contain at least 6 characters"
|
raise "Password should contain at least 6 characters"
|
||||||
end
|
end
|
||||||
if (password =~ /^[[:ascii:]]+$/).nil?
|
if (password =~ /^[[:ascii:]]+$/).nil?
|
||||||
raise "password should contain ASCII characters only"
|
raise "password should contain ASCII characters only"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@context.storage.update_user \
|
@context.storage.update_user \
|
||||||
original_username, username, password, admin
|
original_username, username, password, admin
|
||||||
|
|
||||||
env.redirect "/admin/user"
|
env.redirect "/admin/user"
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
redirect_url = URI.new \
|
redirect_url = URI.new \
|
||||||
path: "/admin/user/edit",\
|
path: "/admin/user/edit",
|
||||||
query: hash_to_query({"username" => original_username, \
|
query: hash_to_query({"username" => original_username, \
|
||||||
"admin" => admin, "error" => e.message})
|
"admin" => admin, "error" => e.message})
|
||||||
env.redirect redirect_url.to_s
|
env.redirect redirect_url.to_s
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/admin/downloads" do |env|
|
get "/admin/downloads" do |env|
|
||||||
base_url = @context.config.mangadex["base_url"];
|
base_url = @context.config.mangadex["base_url"]
|
||||||
layout "download-manager"
|
layout "download-manager"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -2,202 +2,200 @@ require "./router"
|
|||||||
require "../mangadex/*"
|
require "../mangadex/*"
|
||||||
|
|
||||||
class APIRouter < Router
|
class APIRouter < Router
|
||||||
def setup
|
def setup
|
||||||
get "/api/page/:tid/:eid/:page" do |env|
|
get "/api/page/:tid/:eid/:page" do |env|
|
||||||
begin
|
begin
|
||||||
tid = env.params.url["tid"]
|
tid = env.params.url["tid"]
|
||||||
eid = env.params.url["eid"]
|
eid = env.params.url["eid"]
|
||||||
page = env.params.url["page"].to_i
|
page = env.params.url["page"].to_i
|
||||||
|
|
||||||
title = @context.library.get_title tid
|
title = @context.library.get_title tid
|
||||||
raise "Title ID `#{tid}` not found" if title.nil?
|
raise "Title ID `#{tid}` not found" if title.nil?
|
||||||
entry = title.get_entry eid
|
entry = title.get_entry eid
|
||||||
raise "Entry ID `#{eid}` of `#{title.title}` not found" if \
|
raise "Entry ID `#{eid}` of `#{title.title}` not found" if entry.nil?
|
||||||
entry.nil?
|
img = entry.read_page page
|
||||||
img = entry.read_page page
|
raise "Failed to load page #{page} of " \
|
||||||
raise "Failed to load page #{page} of " \
|
"`#{title.title}/#{entry.title}`" if img.nil?
|
||||||
"`#{title.title}/#{entry.title}`" if img.nil?
|
|
||||||
|
|
||||||
send_img env, img
|
send_img env, img
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
env.response.status_code = 500
|
env.response.status_code = 500
|
||||||
e.message
|
e.message
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/api/book/:tid" do |env|
|
get "/api/book/:tid" do |env|
|
||||||
begin
|
begin
|
||||||
tid = env.params.url["tid"]
|
tid = env.params.url["tid"]
|
||||||
title = @context.library.get_title tid
|
title = @context.library.get_title tid
|
||||||
raise "Title ID `#{tid}` not found" if title.nil?
|
raise "Title ID `#{tid}` not found" if title.nil?
|
||||||
|
|
||||||
send_json env, title.to_json
|
send_json env, title.to_json
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
env.response.status_code = 500
|
env.response.status_code = 500
|
||||||
e.message
|
e.message
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/api/book" do |env|
|
get "/api/book" do |env|
|
||||||
send_json env, @context.library.to_json
|
send_json env, @context.library.to_json
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/api/admin/scan" do |env|
|
post "/api/admin/scan" do |env|
|
||||||
start = Time.utc
|
start = Time.utc
|
||||||
@context.library.scan
|
@context.library.scan
|
||||||
ms = (Time.utc - start).total_milliseconds
|
ms = (Time.utc - start).total_milliseconds
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"milliseconds" => ms,
|
"milliseconds" => ms,
|
||||||
"titles" => @context.library.titles.size
|
"titles" => @context.library.titles.size,
|
||||||
}.to_json
|
}.to_json
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/api/admin/user/delete/:username" do |env|
|
post "/api/admin/user/delete/:username" do |env|
|
||||||
begin
|
begin
|
||||||
username = env.params.url["username"]
|
username = env.params.url["username"]
|
||||||
@context.storage.delete_user username
|
@context.storage.delete_user username
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"success" => false,
|
"success" => false,
|
||||||
"error" => e.message
|
"error" => e.message,
|
||||||
}.to_json
|
}.to_json
|
||||||
else
|
else
|
||||||
send_json env, {"success" => true}.to_json
|
send_json env, {"success" => true}.to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/api/progress/:title/:entry/:page" do |env|
|
post "/api/progress/:title/:entry/:page" do |env|
|
||||||
begin
|
begin
|
||||||
username = get_username env
|
username = get_username env
|
||||||
title = (@context.library.get_title env.params.url["title"])
|
title = (@context.library.get_title env.params.url["title"])
|
||||||
.not_nil!
|
.not_nil!
|
||||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||||
page = env.params.url["page"].to_i
|
page = env.params.url["page"].to_i
|
||||||
|
|
||||||
raise "incorrect page value" if page < 0 || page > entry.pages
|
raise "incorrect page value" if page < 0 || page > entry.pages
|
||||||
title.save_progress username, entry.title, page
|
title.save_progress username, entry.title, page
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"success" => false,
|
"success" => false,
|
||||||
"error" => e.message
|
"error" => e.message,
|
||||||
}.to_json
|
}.to_json
|
||||||
else
|
else
|
||||||
send_json env, {"success" => true}.to_json
|
send_json env, {"success" => true}.to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/api/admin/display_name/:title/:name" do |env|
|
post "/api/admin/display_name/:title/:name" do |env|
|
||||||
begin
|
begin
|
||||||
title = (@context.library.get_title env.params.url["title"])
|
title = (@context.library.get_title env.params.url["title"])
|
||||||
.not_nil!
|
.not_nil!
|
||||||
name = env.params.url["name"]
|
name = env.params.url["name"]
|
||||||
entry = env.params.query["entry"]?
|
entry = env.params.query["entry"]?
|
||||||
if entry.nil?
|
if entry.nil?
|
||||||
title.set_display_name name
|
title.set_display_name name
|
||||||
else
|
else
|
||||||
eobj = title.get_entry entry
|
eobj = title.get_entry entry
|
||||||
title.set_display_name eobj.not_nil!.title, name
|
title.set_display_name eobj.not_nil!.title, name
|
||||||
end
|
end
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"success" => false,
|
"success" => false,
|
||||||
"error" => e.message
|
"error" => e.message,
|
||||||
}.to_json
|
}.to_json
|
||||||
else
|
else
|
||||||
send_json env, {"success" => true}.to_json
|
send_json env, {"success" => true}.to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/api/admin/mangadex/manga/:id" do |env|
|
get "/api/admin/mangadex/manga/:id" do |env|
|
||||||
begin
|
begin
|
||||||
id = env.params.url["id"]
|
id = env.params.url["id"]
|
||||||
api = MangaDex::API.new \
|
api = MangaDex::API.new @context.config.mangadex["api_url"].to_s
|
||||||
@context.config.mangadex["api_url"].to_s
|
manga = api.get_manga id
|
||||||
manga = api.get_manga id
|
send_json env, manga.to_info_json
|
||||||
send_json env, manga.to_info_json
|
rescue e
|
||||||
rescue e
|
@context.error e
|
||||||
@context.error e
|
send_json env, {"error" => e.message}.to_json
|
||||||
send_json env, {"error" => e.message}.to_json
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
post "/api/admin/mangadex/download" do |env|
|
post "/api/admin/mangadex/download" do |env|
|
||||||
begin
|
begin
|
||||||
chapters = env.params.json["chapters"].as(Array).map{|c| c.as_h}
|
chapters = env.params.json["chapters"].as(Array).map { |c| c.as_h }
|
||||||
jobs = chapters.map {|chapter|
|
jobs = chapters.map { |chapter|
|
||||||
MangaDex::Job.new(
|
MangaDex::Job.new(
|
||||||
chapter["id"].as_s,
|
chapter["id"].as_s,
|
||||||
chapter["manga_id"].as_s,
|
chapter["manga_id"].as_s,
|
||||||
chapter["full_title"].as_s,
|
chapter["full_title"].as_s,
|
||||||
chapter["manga_title"].as_s,
|
chapter["manga_title"].as_s,
|
||||||
MangaDex::JobStatus::Pending,
|
MangaDex::JobStatus::Pending,
|
||||||
Time.unix chapter["time"].as_s.to_i
|
Time.unix chapter["time"].as_s.to_i
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
inserted_count = @context.queue.push jobs
|
inserted_count = @context.queue.push jobs
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"success": inserted_count,
|
"success": inserted_count,
|
||||||
"fail": jobs.size - inserted_count
|
"fail": jobs.size - inserted_count,
|
||||||
}.to_json
|
}.to_json
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
send_json env, {"error" => e.message}.to_json
|
send_json env, {"error" => e.message}.to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/api/admin/mangadex/queue" do |env|
|
get "/api/admin/mangadex/queue" do |env|
|
||||||
begin
|
begin
|
||||||
jobs = @context.queue.get_all
|
jobs = @context.queue.get_all
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"jobs" => jobs,
|
"jobs" => jobs,
|
||||||
"paused" => @context.queue.paused?,
|
"paused" => @context.queue.paused?,
|
||||||
"success" => true
|
"success" => true,
|
||||||
}.to_json
|
}.to_json
|
||||||
rescue e
|
rescue e
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"success" => false,
|
"success" => false,
|
||||||
"error" => e.message
|
"error" => e.message,
|
||||||
}.to_json
|
}.to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
post "/api/admin/mangadex/queue/:action" do |env|
|
post "/api/admin/mangadex/queue/:action" do |env|
|
||||||
begin
|
begin
|
||||||
action = env.params.url["action"]
|
action = env.params.url["action"]
|
||||||
id = env.params.query["id"]?
|
id = env.params.query["id"]?
|
||||||
case action
|
case action
|
||||||
when "delete"
|
when "delete"
|
||||||
if id.nil?
|
if id.nil?
|
||||||
@context.queue.delete_status MangaDex::JobStatus::Completed
|
@context.queue.delete_status MangaDex::JobStatus::Completed
|
||||||
else
|
else
|
||||||
@context.queue.delete id
|
@context.queue.delete id
|
||||||
end
|
end
|
||||||
when "retry"
|
when "retry"
|
||||||
if id.nil?
|
if id.nil?
|
||||||
@context.queue.reset
|
@context.queue.reset
|
||||||
else
|
else
|
||||||
@context.queue.reset id
|
@context.queue.reset id
|
||||||
end
|
end
|
||||||
when "pause"
|
when "pause"
|
||||||
@context.queue.pause
|
@context.queue.pause
|
||||||
when "resume"
|
when "resume"
|
||||||
@context.queue.resume
|
@context.queue.resume
|
||||||
else
|
else
|
||||||
raise "Unknown queue action #{action}"
|
raise "Unknown queue action #{action}"
|
||||||
end
|
end
|
||||||
|
|
||||||
send_json env, {"success" => true}.to_json
|
send_json env, {"success" => true}.to_json
|
||||||
rescue e
|
rescue e
|
||||||
send_json env, {
|
send_json env, {
|
||||||
"success" => false,
|
"success" => false,
|
||||||
"error" => e.message
|
"error" => e.message,
|
||||||
}.to_json
|
}.to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,63 +1,61 @@
|
|||||||
require "./router"
|
require "./router"
|
||||||
|
|
||||||
class MainRouter < Router
|
class MainRouter < Router
|
||||||
def setup
|
def setup
|
||||||
get "/login" do |env|
|
get "/login" do |env|
|
||||||
render "src/views/login.ecr"
|
render "src/views/login.ecr"
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/logout" do |env|
|
get "/logout" do |env|
|
||||||
begin
|
begin
|
||||||
cookie = env.request.cookies
|
cookie = env.request.cookies.find { |c| c.name == "token" }.not_nil!
|
||||||
.find { |c| c.name == "token" }.not_nil!
|
@context.storage.logout cookie.value
|
||||||
@context.storage.logout cookie.value
|
rescue e
|
||||||
rescue e
|
@context.error "Error when attempting to log out: #{e}"
|
||||||
@context.error "Error when attempting to log out: #{e}"
|
ensure
|
||||||
ensure
|
env.redirect "/login"
|
||||||
env.redirect "/login"
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
post "/login" do |env|
|
post "/login" do |env|
|
||||||
begin
|
begin
|
||||||
username = env.params.body["username"]
|
username = env.params.body["username"]
|
||||||
password = env.params.body["password"]
|
password = env.params.body["password"]
|
||||||
token = @context.storage.verify_user(username, password)
|
token = @context.storage.verify_user(username, password).not_nil!
|
||||||
.not_nil!
|
|
||||||
|
|
||||||
cookie = HTTP::Cookie.new "token", token
|
cookie = HTTP::Cookie.new "token", token
|
||||||
cookie.expires = Time.local.shift years: 1
|
cookie.expires = Time.local.shift years: 1
|
||||||
env.response.cookies << cookie
|
env.response.cookies << cookie
|
||||||
env.redirect "/"
|
env.redirect "/"
|
||||||
rescue
|
rescue
|
||||||
env.redirect "/login"
|
env.redirect "/login"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/" do |env|
|
get "/" do |env|
|
||||||
titles = @context.library.titles
|
titles = @context.library.titles
|
||||||
username = get_username env
|
username = get_username env
|
||||||
percentage = titles.map &.load_percetage username
|
percentage = titles.map &.load_percetage username
|
||||||
layout "index"
|
layout "index"
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/book/:title" do |env|
|
get "/book/:title" do |env|
|
||||||
begin
|
begin
|
||||||
title = (@context.library.get_title env.params.url["title"])
|
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||||
.not_nil!
|
username = get_username env
|
||||||
username = get_username env
|
percentage = title.entries.map { |e|
|
||||||
percentage = title.entries.map { |e|
|
title.load_percetage username, e.title
|
||||||
title.load_percetage username, e.title }
|
}
|
||||||
layout "title"
|
layout "title"
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
env.response.status_code = 404
|
env.response.status_code = 404
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/download" do |env|
|
get "/download" do |env|
|
||||||
base_url = @context.config.mangadex["base_url"];
|
base_url = @context.config.mangadex["base_url"]
|
||||||
layout "download"
|
layout "download"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,58 +1,61 @@
|
|||||||
require "./router"
|
require "./router"
|
||||||
|
|
||||||
class ReaderRouter < Router
|
class ReaderRouter < Router
|
||||||
def setup
|
def setup
|
||||||
get "/reader/:title/:entry" do |env|
|
get "/reader/:title/:entry" do |env|
|
||||||
begin
|
begin
|
||||||
title = (@context.library.get_title env.params.url["title"])
|
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||||
.not_nil!
|
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
|
||||||
|
|
||||||
# load progress
|
# load progress
|
||||||
username = get_username env
|
username = get_username env
|
||||||
page = title.load_progress username, entry.title
|
page = title.load_progress username, entry.title
|
||||||
# we go back 2 * `IMGS_PER_PAGE` pages. the infinite scroll
|
# we go back 2 * `IMGS_PER_PAGE` pages. the infinite scroll
|
||||||
# library perloads a few pages in advance, and the user
|
# library perloads a few pages in advance, and the user
|
||||||
# might not have actually read them
|
# might not have actually read them
|
||||||
page = [page - 2 * IMGS_PER_PAGE, 1].max
|
page = [page - 2 * IMGS_PER_PAGE, 1].max
|
||||||
|
|
||||||
env.redirect "/reader/#{title.id}/#{entry.id}/#{page}"
|
env.redirect "/reader/#{title.id}/#{entry.id}/#{page}"
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
env.response.status_code = 404
|
env.response.status_code = 404
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/reader/:title/:entry/:page" do |env|
|
get "/reader/:title/:entry/:page" do |env|
|
||||||
begin
|
begin
|
||||||
title = (@context.library.get_title env.params.url["title"])
|
title = (@context.library.get_title env.params.url["title"]).not_nil!
|
||||||
.not_nil!
|
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
||||||
entry = (title.get_entry env.params.url["entry"]).not_nil!
|
page = env.params.url["page"].to_i
|
||||||
page = env.params.url["page"].to_i
|
raise "" if page > entry.pages || page <= 0
|
||||||
raise "" if page > entry.pages || page <= 0
|
|
||||||
|
|
||||||
# save progress
|
# save progress
|
||||||
username = get_username env
|
username = get_username env
|
||||||
title.save_progress username, entry.title, page
|
title.save_progress username, entry.title, page
|
||||||
|
|
||||||
pages = (page...[entry.pages + 1, page + IMGS_PER_PAGE].min)
|
pages = (page...[entry.pages + 1, page + IMGS_PER_PAGE].min)
|
||||||
urls = pages.map { |idx|
|
urls = pages.map { |idx|
|
||||||
"/api/page/#{title.id}/#{entry.id}/#{idx}" }
|
"/api/page/#{title.id}/#{entry.id}/#{idx}"
|
||||||
reader_urls = pages.map { |idx|
|
}
|
||||||
"/reader/#{title.id}/#{entry.id}/#{idx}" }
|
reader_urls = pages.map { |idx|
|
||||||
next_page = page + IMGS_PER_PAGE
|
"/reader/#{title.id}/#{entry.id}/#{idx}"
|
||||||
next_url = next_page > entry.pages ? nil :
|
}
|
||||||
"/reader/#{title.id}/#{entry.id}/#{next_page}"
|
next_page = page + IMGS_PER_PAGE
|
||||||
exit_url = "/book/#{title.id}"
|
next_url = next_entry_url = nil
|
||||||
next_entry = title.next_entry entry
|
exit_url = "/book/#{title.id}"
|
||||||
next_entry_url = next_entry.nil? ? nil : \
|
next_entry = title.next_entry entry
|
||||||
"/reader/#{title.id}/#{next_entry.id}"
|
unless next_page > entry.pages
|
||||||
|
next_url = "/reader/#{title.id}/#{entry.id}/#{next_page}"
|
||||||
|
end
|
||||||
|
unless next_entry.nil?
|
||||||
|
next_entry_url = "/reader/#{title.id}/#{next_entry.id}"
|
||||||
|
end
|
||||||
|
|
||||||
render "src/views/reader.ecr"
|
render "src/views/reader.ecr"
|
||||||
rescue e
|
rescue e
|
||||||
@context.error e
|
@context.error e
|
||||||
env.response.status_code = 404
|
env.response.status_code = 404
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
require "../context"
|
require "../context"
|
||||||
|
|
||||||
class Router
|
class Router
|
||||||
def initialize(@context : Context)
|
def initialize(@context : Context)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -7,44 +7,42 @@ require "./util"
|
|||||||
require "./routes/*"
|
require "./routes/*"
|
||||||
|
|
||||||
class Server
|
class Server
|
||||||
def initialize(@context : Context)
|
def initialize(@context : Context)
|
||||||
|
error 403 do |env|
|
||||||
|
message = "HTTP 403: You are not authorized to visit #{env.request.path}"
|
||||||
|
layout "message"
|
||||||
|
end
|
||||||
|
error 404 do |env|
|
||||||
|
message = "HTTP 404: Mango cannot find the page #{env.request.path}"
|
||||||
|
layout "message"
|
||||||
|
end
|
||||||
|
error 500 do |env|
|
||||||
|
message = "HTTP 500: Internal server error. Please try again later."
|
||||||
|
layout "message"
|
||||||
|
end
|
||||||
|
|
||||||
error 403 do |env|
|
MainRouter.new(@context).setup
|
||||||
message = "HTTP 403: You are not authorized to visit " \
|
AdminRouter.new(@context).setup
|
||||||
"#{env.request.path}"
|
ReaderRouter.new(@context).setup
|
||||||
layout "message"
|
APIRouter.new(@context).setup
|
||||||
end
|
|
||||||
error 404 do |env|
|
|
||||||
message = "HTTP 404: Mango cannot find the page #{env.request.path}"
|
|
||||||
layout "message"
|
|
||||||
end
|
|
||||||
error 500 do |env|
|
|
||||||
message = "HTTP 500: Internal server error. Please try again later."
|
|
||||||
layout "message"
|
|
||||||
end
|
|
||||||
|
|
||||||
MainRouter.new(@context).setup
|
Kemal.config.logging = false
|
||||||
AdminRouter.new(@context).setup
|
add_handler LogHandler.new @context.logger
|
||||||
ReaderRouter.new(@context).setup
|
add_handler AuthHandler.new @context.storage
|
||||||
APIRouter.new(@context).setup
|
{% if flag?(:release) %}
|
||||||
|
# when building for relase, embed the static files in binary
|
||||||
|
@context.debug "We are in release mode. Using embedded static files."
|
||||||
|
serve_static false
|
||||||
|
add_handler StaticHandler.new
|
||||||
|
{% end %}
|
||||||
|
end
|
||||||
|
|
||||||
Kemal.config.logging = false
|
def start
|
||||||
add_handler LogHandler.new @context.logger
|
@context.debug "Starting Kemal server"
|
||||||
add_handler AuthHandler.new @context.storage
|
{% if flag?(:release) %}
|
||||||
{% if flag?(:release) %}
|
Kemal.config.env = "production"
|
||||||
# when building for relase, embed the static files in binary
|
{% end %}
|
||||||
@context.debug "We are in release mode. Using embedded static files."
|
Kemal.config.port = @context.config.port
|
||||||
serve_static false
|
Kemal.run
|
||||||
add_handler StaticHandler.new
|
end
|
||||||
{% end %}
|
|
||||||
end
|
|
||||||
|
|
||||||
def start
|
|
||||||
@context.debug "Starting Kemal server"
|
|
||||||
{% if flag?(:release) %}
|
|
||||||
Kemal.config.env = "production"
|
|
||||||
{% end %}
|
|
||||||
Kemal.config.port = @context.config.port
|
|
||||||
Kemal.run
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
@ -3,30 +3,30 @@ require "kemal"
|
|||||||
require "./util"
|
require "./util"
|
||||||
|
|
||||||
class FS
|
class FS
|
||||||
extend BakedFileSystem
|
extend BakedFileSystem
|
||||||
{% if flag?(:release) %}
|
{% if flag?(:release) %}
|
||||||
{% if read_file? "#{__DIR__}/../dist/favicon.ico" %}
|
{% if read_file? "#{__DIR__}/../dist/favicon.ico" %}
|
||||||
{% puts "baking ../dist" %}
|
{% puts "baking ../dist" %}
|
||||||
bake_folder "../dist"
|
bake_folder "../dist"
|
||||||
{% else %}
|
{% else %}
|
||||||
{% puts "baking ../public" %}
|
{% puts "baking ../public" %}
|
||||||
bake_folder "../public"
|
bake_folder "../public"
|
||||||
{% end %}
|
{% end %}
|
||||||
{% end %}
|
{% end %}
|
||||||
end
|
end
|
||||||
|
|
||||||
class StaticHandler < Kemal::Handler
|
class StaticHandler < Kemal::Handler
|
||||||
@dirs = ["/css", "/js", "/img", "/favicon.ico"]
|
@dirs = ["/css", "/js", "/img", "/favicon.ico"]
|
||||||
|
|
||||||
def call(env)
|
def call(env)
|
||||||
if request_path_startswith env, @dirs
|
if request_path_startswith env, @dirs
|
||||||
file = FS.get? env.request.path
|
file = FS.get? env.request.path
|
||||||
return call_next env if file.nil?
|
return call_next env if file.nil?
|
||||||
|
|
||||||
slice = Bytes.new file.size
|
slice = Bytes.new file.size
|
||||||
file.read slice
|
file.read slice
|
||||||
return send_file env, slice, file.mime_type
|
return send_file env, slice, file.mime_type
|
||||||
end
|
end
|
||||||
call_next env
|
call_next env
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
298
src/storage.cr
298
src/storage.cr
@ -4,174 +4,172 @@ require "uuid"
|
|||||||
require "base64"
|
require "base64"
|
||||||
|
|
||||||
def hash_password(pw)
|
def hash_password(pw)
|
||||||
Crypto::Bcrypt::Password.create(pw).to_s
|
Crypto::Bcrypt::Password.create(pw).to_s
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_password(hash, pw)
|
def verify_password(hash, pw)
|
||||||
(Crypto::Bcrypt::Password.new hash).verify pw
|
(Crypto::Bcrypt::Password.new hash).verify pw
|
||||||
end
|
end
|
||||||
|
|
||||||
def random_str
|
def random_str
|
||||||
UUID.random.to_s.gsub "-", ""
|
UUID.random.to_s.gsub "-", ""
|
||||||
end
|
end
|
||||||
|
|
||||||
class Storage
|
class Storage
|
||||||
def initialize(@path : String, @logger : Logger)
|
def initialize(@path : String, @logger : Logger)
|
||||||
dir = File.dirname path
|
dir = File.dirname path
|
||||||
unless Dir.exists? dir
|
unless Dir.exists? dir
|
||||||
@logger.info "The DB directory #{dir} does not exist. " \
|
@logger.info "The DB directory #{dir} does not exist. " \
|
||||||
"Attepmting to create it"
|
"Attepmting to create it"
|
||||||
Dir.mkdir_p dir
|
Dir.mkdir_p dir
|
||||||
end
|
end
|
||||||
DB.open "sqlite3://#{path}" do |db|
|
DB.open "sqlite3://#{path}" do |db|
|
||||||
begin
|
begin
|
||||||
# We create the `ids` table first. even if the uses has an
|
# We create the `ids` table first. even if the uses has an
|
||||||
# early version installed and has the `user` table only,
|
# early version installed and has the `user` table only,
|
||||||
# we will still be able to create `ids`
|
# we will still be able to create `ids`
|
||||||
db.exec "create table ids" \
|
db.exec "create table ids" \
|
||||||
"(path text, id text, is_title integer)"
|
"(path text, id text, is_title integer)"
|
||||||
db.exec "create unique index path_idx on ids (path)"
|
db.exec "create unique index path_idx on ids (path)"
|
||||||
db.exec "create unique index id_idx on ids (id)"
|
db.exec "create unique index id_idx on ids (id)"
|
||||||
|
|
||||||
db.exec "create table users" \
|
db.exec "create table users" \
|
||||||
"(username text, password text, token text, admin integer)"
|
"(username text, password text, token text, admin integer)"
|
||||||
rescue e
|
rescue e
|
||||||
unless e.message.not_nil!.ends_with? "already exists"
|
unless e.message.not_nil!.ends_with? "already exists"
|
||||||
@logger.fatal "Error when checking tables in DB: #{e}"
|
@logger.fatal "Error when checking tables in DB: #{e}"
|
||||||
raise e
|
raise e
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
@logger.debug "Creating DB file at #{@path}"
|
@logger.debug "Creating DB file at #{@path}"
|
||||||
db.exec "create unique index username_idx on users (username)"
|
db.exec "create unique index username_idx on users (username)"
|
||||||
db.exec "create unique index token_idx on users (token)"
|
db.exec "create unique index token_idx on users (token)"
|
||||||
random_pw = random_str
|
random_pw = random_str
|
||||||
hash = hash_password random_pw
|
hash = hash_password random_pw
|
||||||
db.exec "insert into users values (?, ?, ?, ?)",
|
db.exec "insert into users values (?, ?, ?, ?)",
|
||||||
"admin", hash, nil, 1
|
"admin", hash, nil, 1
|
||||||
@logger.log "Initial user created. You can log in with " \
|
@logger.log "Initial user created. You can log in with " \
|
||||||
"#{{"username" => "admin", "password" => random_pw}}"
|
"#{{"username" => "admin", "password" => random_pw}}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_user(username, password)
|
def verify_user(username, password)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
begin
|
begin
|
||||||
hash, token = db.query_one "select password, token from "\
|
hash, token = db.query_one "select password, token from " \
|
||||||
"users where username = (?)", \
|
"users where username = (?)",
|
||||||
username, as: {String, String?}
|
username, as: {String, String?}
|
||||||
unless verify_password hash, password
|
unless verify_password hash, password
|
||||||
@logger.debug "Password does not match the hash"
|
@logger.debug "Password does not match the hash"
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
@logger.debug "User #{username} verified"
|
@logger.debug "User #{username} verified"
|
||||||
return token if token
|
return token if token
|
||||||
token = random_str
|
token = random_str
|
||||||
@logger.debug "Updating token for #{username}"
|
@logger.debug "Updating token for #{username}"
|
||||||
db.exec "update users set token = (?) where username = (?)",
|
db.exec "update users set token = (?) where username = (?)",
|
||||||
token, username
|
token, username
|
||||||
return token
|
return token
|
||||||
rescue e
|
rescue e
|
||||||
@logger.error "Error when verifying user #{username}: #{e}"
|
@logger.error "Error when verifying user #{username}: #{e}"
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_token(token)
|
def verify_token(token)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
begin
|
begin
|
||||||
username = db.query_one "select username from users where " \
|
username = db.query_one "select username from users where " \
|
||||||
"token = (?)", token, as: String
|
"token = (?)", token, as: String
|
||||||
return username
|
return username
|
||||||
rescue e
|
rescue e
|
||||||
@logger.debug "Unable to verify token"
|
@logger.debug "Unable to verify token"
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_admin(token)
|
def verify_admin(token)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
begin
|
begin
|
||||||
return db.query_one "select admin from users where " \
|
return db.query_one "select admin from users where " \
|
||||||
"token = (?)", token, as: Bool
|
"token = (?)", token, as: Bool
|
||||||
rescue e
|
rescue e
|
||||||
@logger.debug "Unable to verify user as admin"
|
@logger.debug "Unable to verify user as admin"
|
||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def list_users
|
def list_users
|
||||||
results = Array(Tuple(String, Bool)).new
|
results = Array(Tuple(String, Bool)).new
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.query "select username, admin from users" do |rs|
|
db.query "select username, admin from users" do |rs|
|
||||||
rs.each do
|
rs.each do
|
||||||
results << {rs.read(String), rs.read(Bool)}
|
results << {rs.read(String), rs.read(Bool)}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
results
|
results
|
||||||
end
|
end
|
||||||
|
|
||||||
def new_user(username, password, admin)
|
def new_user(username, password, admin)
|
||||||
admin = (admin ? 1 : 0)
|
admin = (admin ? 1 : 0)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
hash = hash_password password
|
hash = hash_password password
|
||||||
db.exec "insert into users values (?, ?, ?, ?)",
|
db.exec "insert into users values (?, ?, ?, ?)",
|
||||||
username, hash, nil, admin
|
username, hash, nil, admin
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def update_user(original_username, username, password, admin)
|
def update_user(original_username, username, password, admin)
|
||||||
admin = (admin ? 1 : 0)
|
admin = (admin ? 1 : 0)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
if password.size == 0
|
if password.size == 0
|
||||||
db.exec "update users set username = (?), admin = (?) "\
|
db.exec "update users set username = (?), admin = (?) " \
|
||||||
"where username = (?)",\
|
"where username = (?)",
|
||||||
username, admin, original_username
|
username, admin, original_username
|
||||||
else
|
else
|
||||||
hash = hash_password password
|
hash = hash_password password
|
||||||
db.exec "update users set username = (?), admin = (?),"\
|
db.exec "update users set username = (?), admin = (?)," \
|
||||||
"password = (?) where username = (?)",\
|
"password = (?) where username = (?)",
|
||||||
username, admin, hash, original_username
|
username, admin, hash, original_username
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete_user(username)
|
def delete_user(username)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "delete from users where username = (?)", username
|
db.exec "delete from users where username = (?)", username
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def logout(token)
|
def logout(token)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
begin
|
begin
|
||||||
db.exec "update users set token = (?) where token = (?)", \
|
db.exec "update users set token = (?) where token = (?)", nil, token
|
||||||
nil, token
|
rescue
|
||||||
rescue
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
def get_id(path, is_title)
|
def get_id(path, is_title)
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
begin
|
begin
|
||||||
id = db.query_one "select id from ids where path = (?)",
|
id = db.query_one "select id from ids where path = (?)", path,
|
||||||
path, as: {String}
|
as: {String}
|
||||||
return id
|
return id
|
||||||
rescue
|
rescue
|
||||||
id = random_str
|
id = random_str
|
||||||
db.exec "insert into ids values (?, ?, ?)", path, id,
|
db.exec "insert into ids values (?, ?, ?)", path, id, is_title ? 1 : 0
|
||||||
is_title ? 1 : 0
|
return id
|
||||||
return id
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
def to_json(json : JSON::Builder)
|
def to_json(json : JSON::Builder)
|
||||||
json.string self
|
json.string self
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
100
src/util.cr
100
src/util.cr
@ -3,81 +3,81 @@ require "big"
|
|||||||
IMGS_PER_PAGE = 5
|
IMGS_PER_PAGE = 5
|
||||||
|
|
||||||
macro layout(name)
|
macro layout(name)
|
||||||
begin
|
begin
|
||||||
cookie = env.request.cookies.find { |c| c.name == "token" }
|
cookie = env.request.cookies.find { |c| c.name == "token" }
|
||||||
is_admin = false
|
is_admin = false
|
||||||
unless cookie.nil?
|
unless cookie.nil?
|
||||||
is_admin = @context.storage.verify_admin cookie.value
|
is_admin = @context.storage.verify_admin cookie.value
|
||||||
end
|
end
|
||||||
render "src/views/#{{{name}}}.ecr", "src/views/layout.ecr"
|
render "src/views/#{{{name}}}.ecr", "src/views/layout.ecr"
|
||||||
rescue e
|
rescue e
|
||||||
message = e.to_s
|
message = e.to_s
|
||||||
render "message"
|
render "message"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
macro send_img(env, img)
|
macro send_img(env, img)
|
||||||
send_file {{env}}, {{img}}.data, {{img}}.mime
|
send_file {{env}}, {{img}}.data, {{img}}.mime
|
||||||
end
|
end
|
||||||
|
|
||||||
macro get_username(env)
|
macro get_username(env)
|
||||||
# if the request gets here, it has gone through the auth handler, and
|
# if the request gets here, it has gone through the auth handler, and
|
||||||
# we can be sure that a valid token exists, so we can use not_nil! here
|
# we can be sure that a valid token exists, so we can use not_nil! here
|
||||||
cookie = {{env}}.request.cookies.find { |c| c.name == "token" }.not_nil!
|
cookie = {{env}}.request.cookies.find { |c| c.name == "token" }.not_nil!
|
||||||
(@context.storage.verify_token cookie.value).not_nil!
|
(@context.storage.verify_token cookie.value).not_nil!
|
||||||
end
|
end
|
||||||
|
|
||||||
macro send_json(env, json)
|
macro send_json(env, json)
|
||||||
{{env}}.response.content_type = "application/json"
|
{{env}}.response.content_type = "application/json"
|
||||||
{{json}}
|
{{json}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def hash_to_query(hash)
|
def hash_to_query(hash)
|
||||||
hash.map { |k, v| "#{k}=#{v}" }.join("&")
|
hash.map { |k, v| "#{k}=#{v}" }.join("&")
|
||||||
end
|
end
|
||||||
|
|
||||||
def request_path_startswith(env, ary)
|
def request_path_startswith(env, ary)
|
||||||
ary.each do |prefix|
|
ary.each do |prefix|
|
||||||
if env.request.path.starts_with? prefix
|
if env.request.path.starts_with? prefix
|
||||||
return true
|
return true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
|
|
||||||
def is_numeric(str)
|
def is_numeric(str)
|
||||||
/^\d+/.match(str) != nil
|
/^\d+/.match(str) != nil
|
||||||
end
|
end
|
||||||
|
|
||||||
def split_by_alphanumeric(str)
|
def split_by_alphanumeric(str)
|
||||||
arr = [] of String
|
arr = [] of String
|
||||||
str.scan(/([^\d\n\r]*)(\d*)([^\d\n\r]*)/) do |match|
|
str.scan(/([^\d\n\r]*)(\d*)([^\d\n\r]*)/) do |match|
|
||||||
arr += match.captures.select{|s| s != ""}
|
arr += match.captures.select { |s| s != "" }
|
||||||
end
|
end
|
||||||
arr
|
arr
|
||||||
end
|
end
|
||||||
|
|
||||||
def compare_alphanumerically(c, d)
|
def compare_alphanumerically(c, d)
|
||||||
is_c_bigger = c.size <=> d.size
|
is_c_bigger = c.size <=> d.size
|
||||||
if c.size > d.size
|
if c.size > d.size
|
||||||
d += [nil] * (c.size - d.size)
|
d += [nil] * (c.size - d.size)
|
||||||
elsif c.size < d.size
|
elsif c.size < d.size
|
||||||
c += [nil] * (d.size - c.size)
|
c += [nil] * (d.size - c.size)
|
||||||
end
|
end
|
||||||
c.zip(d) do |a, b|
|
c.zip(d) do |a, b|
|
||||||
return -1 if a.nil?
|
return -1 if a.nil?
|
||||||
return 1 if b.nil?
|
return 1 if b.nil?
|
||||||
if is_numeric(a) && is_numeric(b)
|
if is_numeric(a) && is_numeric(b)
|
||||||
compare = a.to_big_i <=> b.to_big_i
|
compare = a.to_big_i <=> b.to_big_i
|
||||||
return compare if compare != 0
|
return compare if compare != 0
|
||||||
else
|
else
|
||||||
compare = a <=> b
|
compare = a <=> b
|
||||||
return compare if compare != 0
|
return compare if compare != 0
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
is_c_bigger
|
is_c_bigger
|
||||||
end
|
end
|
||||||
|
|
||||||
def compare_alphanumerically(a : String, b : String)
|
def compare_alphanumerically(a : String, b : String)
|
||||||
compare_alphanumerically split_by_alphanumeric(a), split_by_alphanumeric(b)
|
compare_alphanumerically split_by_alphanumeric(a), split_by_alphanumeric(b)
|
||||||
end
|
end
|
||||||
|
Loading…
x
Reference in New Issue
Block a user