mirror of
https://github.com/hkalexling/Mango.git
synced 2025-08-03 03:15:31 -04:00
parent
87dea01917
commit
8a83c0df4e
13
Dockerfile.arm32v7
Normal file
13
Dockerfile.arm32v7
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
FROM arm32v7/ubuntu:18.04
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y wget git make llvm-8 llvm-8-dev g++ libsqlite3-dev libyaml-dev libgc-dev libssl-dev libcrypto++-dev libevent-dev libgmp-dev zlib1g-dev libpcre++-dev pkg-config libarchive-dev libxml2-dev libacl1-dev nettle-dev liblzo2-dev liblzma-dev libbz2-dev
|
||||||
|
|
||||||
|
RUN git clone https://github.com/crystal-lang/crystal && cd crystal && git checkout 0.32.1 && make deps && cd ..
|
||||||
|
RUN git clone https://github.com/kostya/myhtml && cd myhtml/src/ext && make && cd ..
|
||||||
|
RUN git clone https://github.com/jessedoyle/duktape.cr && cd duktape.cr/ext && make && cd ..
|
||||||
|
|
||||||
|
COPY mango.o .
|
||||||
|
|
||||||
|
RUN cc 'mango.o' -o 'mango' -rdynamic -lxml2 /myhtml/src/ext/modest-c/lib/libmodest_static.a -L/duktape.cr/src/.build/lib -L/duktape.cr/src/.build/include -lduktape -lm `pkg-config libarchive --libs` -lz `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libssl || printf %s '-lssl -lcrypto'` `command -v pkg-config > /dev/null && pkg-config --libs --silence-errors libcrypto || printf %s '-lcrypto'` -lgmp -lsqlite3 -lyaml -lpcre -lm /usr/lib/arm-linux-gnueabihf/libgc.so -lpthread /crystal/src/ext/libcrystal.a -levent -lrt -ldl -L/usr/bin/../lib/crystal/lib -L/usr/bin/../lib/crystal/lib
|
||||||
|
|
||||||
|
CMD ["./mango"]
|
29
src/main_fiber.cr
Normal file
29
src/main_fiber.cr
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# On ARM, connecting to the SQLite DB from a spawned fiber would crash
|
||||||
|
# https://github.com/crystal-lang/crystal-sqlite3/issues/30
|
||||||
|
# This is a temporary workaround that forces the relevant code to run in the
|
||||||
|
# main fiber
|
||||||
|
|
||||||
|
class MainFiber
|
||||||
|
@@channel = Channel(-> Nil).new
|
||||||
|
@@done = Channel(Bool).new
|
||||||
|
|
||||||
|
def self.start_and_block
|
||||||
|
loop do
|
||||||
|
if proc = @@channel.receive
|
||||||
|
begin
|
||||||
|
proc.call
|
||||||
|
ensure
|
||||||
|
@@done.send true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
Fiber.yield
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.run(&block : -> Nil)
|
||||||
|
@@channel.send block
|
||||||
|
until @@done.receive
|
||||||
|
Fiber.yield
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
@ -1,6 +1,7 @@
|
|||||||
require "./config"
|
require "./config"
|
||||||
require "./queue"
|
require "./queue"
|
||||||
require "./server"
|
require "./server"
|
||||||
|
require "./main_fiber"
|
||||||
require "./mangadex/*"
|
require "./mangadex/*"
|
||||||
require "option_parser"
|
require "option_parser"
|
||||||
require "clim"
|
require "clim"
|
||||||
@ -54,8 +55,7 @@ class CLI < Clim
|
|||||||
|
|
||||||
# empty ARGV so it won't be passed to Kemal
|
# empty ARGV so it won't be passed to Kemal
|
||||||
ARGV.clear
|
ARGV.clear
|
||||||
server = Server.new
|
Server.new.start
|
||||||
server.start
|
|
||||||
end
|
end
|
||||||
|
|
||||||
sub "admin" do
|
sub "admin" do
|
||||||
@ -123,4 +123,8 @@ class CLI < Clim
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
spawn do
|
||||||
CLI.start(ARGV)
|
CLI.start(ARGV)
|
||||||
|
end
|
||||||
|
|
||||||
|
MainFiber.start_and_block
|
||||||
|
28
src/queue.cr
28
src/queue.cr
@ -119,6 +119,7 @@ class Queue
|
|||||||
"Attepmting to create it"
|
"Attepmting to create it"
|
||||||
Dir.mkdir_p dir
|
Dir.mkdir_p dir
|
||||||
end
|
end
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
begin
|
begin
|
||||||
db.exec "create table if not exists queue " \
|
db.exec "create table if not exists queue " \
|
||||||
@ -138,11 +139,13 @@ class Queue
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Push an array of jobs into the queue, and return the number of jobs
|
# Push an array of jobs into the queue, and return the number of jobs
|
||||||
# inserted. Any job already exists in the queue will be ignored.
|
# inserted. Any job already exists in the queue will be ignored.
|
||||||
def push(jobs : Array(Job))
|
def push(jobs : Array(Job))
|
||||||
start_count = self.count
|
start_count = self.count
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
jobs.each do |job|
|
jobs.each do |job|
|
||||||
db.exec "insert or ignore into queue values " \
|
db.exec "insert or ignore into queue values " \
|
||||||
@ -152,16 +155,19 @@ class Queue
|
|||||||
job.success_count, job.fail_count, job.time.to_unix_ms
|
job.success_count, job.fail_count, job.time.to_unix_ms
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
self.count - start_count
|
self.count - start_count
|
||||||
end
|
end
|
||||||
|
|
||||||
def reset(id : String)
|
def reset(id : String)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set status = 0, status_message = '', " \
|
db.exec "update queue set status = 0, status_message = '', " \
|
||||||
"pages = 0, success_count = 0, fail_count = 0 " \
|
"pages = 0, success_count = 0, fail_count = 0 " \
|
||||||
"where id = (?)", id
|
"where id = (?)", id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def reset(job : Job)
|
def reset(job : Job)
|
||||||
self.reset job.id
|
self.reset job.id
|
||||||
@ -169,91 +175,113 @@ class Queue
|
|||||||
|
|
||||||
# Reset all failed tasks (missing pages and error)
|
# Reset all failed tasks (missing pages and error)
|
||||||
def reset
|
def reset
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set status = 0, status_message = '', " \
|
db.exec "update queue set status = 0, status_message = '', " \
|
||||||
"pages = 0, success_count = 0, fail_count = 0 " \
|
"pages = 0, success_count = 0, fail_count = 0 " \
|
||||||
"where status = 2 or status = 4"
|
"where status = 2 or status = 4"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def delete(id : String)
|
def delete(id : String)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "delete from queue where id = (?)", id
|
db.exec "delete from queue where id = (?)", id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def delete(job : Job)
|
def delete(job : Job)
|
||||||
self.delete job.id
|
self.delete job.id
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete_status(status : JobStatus)
|
def delete_status(status : JobStatus)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "delete from queue where status = (?)", status.to_i
|
db.exec "delete from queue where status = (?)", status.to_i
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def count_status(status : JobStatus)
|
def count_status(status : JobStatus)
|
||||||
num = 0
|
num = 0
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
num = db.query_one "select count(*) from queue where " \
|
num = db.query_one "select count(*) from queue where " \
|
||||||
"status = (?)", status.to_i, as: Int32
|
"status = (?)", status.to_i, as: Int32
|
||||||
end
|
end
|
||||||
|
end
|
||||||
num
|
num
|
||||||
end
|
end
|
||||||
|
|
||||||
def count
|
def count
|
||||||
num = 0
|
num = 0
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
num = db.query_one "select count(*) from queue", as: Int32
|
num = db.query_one "select count(*) from queue", as: Int32
|
||||||
end
|
end
|
||||||
|
end
|
||||||
num
|
num
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_status(status : JobStatus, job : Job)
|
def set_status(status : JobStatus, job : Job)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set status = (?) where id = (?)",
|
db.exec "update queue set status = (?) where id = (?)",
|
||||||
status.to_i, job.id
|
status.to_i, job.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def get_all
|
def get_all
|
||||||
jobs = [] of Job
|
jobs = [] of Job
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
jobs = db.query_all "select * from queue order by time" do |rs|
|
jobs = db.query_all "select * from queue order by time" do |rs|
|
||||||
Job.from_query_result rs
|
Job.from_query_result rs
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
jobs
|
jobs
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_success(job : Job)
|
def add_success(job : Job)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set success_count = success_count + 1 " \
|
db.exec "update queue set success_count = success_count + 1 " \
|
||||||
"where id = (?)", job.id
|
"where id = (?)", job.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def add_fail(job : Job)
|
def add_fail(job : Job)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set fail_count = fail_count + 1 " \
|
db.exec "update queue set fail_count = fail_count + 1 " \
|
||||||
"where id = (?)", job.id
|
"where id = (?)", job.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def set_pages(pages : Int32, job : Job)
|
def set_pages(pages : Int32, job : Job)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set pages = (?), success_count = 0, " \
|
db.exec "update queue set pages = (?), success_count = 0, " \
|
||||||
"fail_count = 0 where id = (?)", pages, job.id
|
"fail_count = 0 where id = (?)", pages, job.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def add_message(msg : String, job : Job)
|
def add_message(msg : String, job : Job)
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
db.exec "update queue set status_message = " \
|
db.exec "update queue set status_message = " \
|
||||||
"status_message || (?) || (?) where id = (?)",
|
"status_message || (?) || (?) where id = (?)",
|
||||||
"\n", msg, job.id
|
"\n", msg, job.id
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def <<(downloader : Downloader)
|
def <<(downloader : Downloader)
|
||||||
@downloaders << downloader
|
@downloaders << downloader
|
||||||
|
@ -32,6 +32,7 @@ class Storage
|
|||||||
"Attepmting to create it"
|
"Attepmting to create it"
|
||||||
Dir.mkdir_p dir
|
Dir.mkdir_p dir
|
||||||
end
|
end
|
||||||
|
MainFiber.run do
|
||||||
DB.open "sqlite3://#{@path}" do |db|
|
DB.open "sqlite3://#{@path}" do |db|
|
||||||
begin
|
begin
|
||||||
# We create the `ids` table first. even if the uses has an
|
# We create the `ids` table first. even if the uses has an
|
||||||
@ -66,6 +67,7 @@ class Storage
|
|||||||
@db = DB.open "sqlite3://#{@path}"
|
@db = DB.open "sqlite3://#{@path}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
macro init_admin
|
macro init_admin
|
||||||
random_pw = random_str
|
random_pw = random_str
|
||||||
@ -87,6 +89,8 @@ class Storage
|
|||||||
end
|
end
|
||||||
|
|
||||||
def verify_user(username, password)
|
def verify_user(username, password)
|
||||||
|
out_token = nil
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
begin
|
begin
|
||||||
hash, token = db.query_one "select password, token from " \
|
hash, token = db.query_one "select password, token from " \
|
||||||
@ -94,24 +98,29 @@ class Storage
|
|||||||
username, as: {String, String?}
|
username, as: {String, String?}
|
||||||
unless verify_password hash, password
|
unless verify_password hash, password
|
||||||
Logger.debug "Password does not match the hash"
|
Logger.debug "Password does not match the hash"
|
||||||
return nil
|
next
|
||||||
end
|
end
|
||||||
Logger.debug "User #{username} verified"
|
Logger.debug "User #{username} verified"
|
||||||
return token if token
|
if token
|
||||||
|
out_token = token
|
||||||
|
next
|
||||||
|
end
|
||||||
token = random_str
|
token = random_str
|
||||||
Logger.debug "Updating token for #{username}"
|
Logger.debug "Updating token for #{username}"
|
||||||
db.exec "update users set token = (?) where username = (?)",
|
db.exec "update users set token = (?) where username = (?)",
|
||||||
token, username
|
token, username
|
||||||
return token
|
out_token = token
|
||||||
rescue e
|
rescue e
|
||||||
Logger.error "Error when verifying user #{username}: #{e}"
|
Logger.error "Error when verifying user #{username}: #{e}"
|
||||||
return nil
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
out_token
|
||||||
|
end
|
||||||
|
|
||||||
def verify_token(token)
|
def verify_token(token)
|
||||||
username = nil
|
username = nil
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
begin
|
begin
|
||||||
username = db.query_one "select username from users where " \
|
username = db.query_one "select username from users where " \
|
||||||
@ -120,11 +129,13 @@ class Storage
|
|||||||
Logger.debug "Unable to verify token"
|
Logger.debug "Unable to verify token"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
username
|
username
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_admin(token)
|
def verify_admin(token)
|
||||||
is_admin = false
|
is_admin = false
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
begin
|
begin
|
||||||
is_admin = db.query_one "select admin from users where " \
|
is_admin = db.query_one "select admin from users where " \
|
||||||
@ -133,11 +144,13 @@ class Storage
|
|||||||
Logger.debug "Unable to verify user as admin"
|
Logger.debug "Unable to verify user as admin"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
is_admin
|
is_admin
|
||||||
end
|
end
|
||||||
|
|
||||||
def list_users
|
def list_users
|
||||||
results = Array(Tuple(String, Bool)).new
|
results = Array(Tuple(String, Bool)).new
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
db.query "select username, admin from users" do |rs|
|
db.query "select username, admin from users" do |rs|
|
||||||
rs.each do
|
rs.each do
|
||||||
@ -145,6 +158,7 @@ class Storage
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
results
|
results
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -152,17 +166,20 @@ class Storage
|
|||||||
validate_username username
|
validate_username username
|
||||||
validate_password password
|
validate_password password
|
||||||
admin = (admin ? 1 : 0)
|
admin = (admin ? 1 : 0)
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
hash = hash_password password
|
hash = hash_password password
|
||||||
db.exec "insert into users values (?, ?, ?, ?)",
|
db.exec "insert into users values (?, ?, ?, ?)",
|
||||||
username, hash, nil, admin
|
username, hash, nil, admin
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def update_user(original_username, username, password, admin)
|
def update_user(original_username, username, password, admin)
|
||||||
admin = (admin ? 1 : 0)
|
admin = (admin ? 1 : 0)
|
||||||
validate_username username
|
validate_username username
|
||||||
validate_password password unless password.empty?
|
validate_password password unless password.empty?
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
if password.empty?
|
if password.empty?
|
||||||
db.exec "update users set username = (?), admin = (?) " \
|
db.exec "update users set username = (?), admin = (?) " \
|
||||||
@ -176,14 +193,18 @@ class Storage
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def delete_user(username)
|
def delete_user(username)
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
db.exec "delete from users where username = (?)", username
|
db.exec "delete from users where username = (?)", username
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def logout(token)
|
def logout(token)
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
begin
|
begin
|
||||||
db.exec "update users set token = (?) where token = (?)", nil, token
|
db.exec "update users set token = (?) where token = (?)", nil, token
|
||||||
@ -191,13 +212,16 @@ class Storage
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def get_id(path, is_title)
|
def get_id(path, is_title)
|
||||||
id = nil
|
id = nil
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
id = db.query_one? "select id from ids where path = (?)", path,
|
id = db.query_one? "select id from ids where path = (?)", path,
|
||||||
as: {String}
|
as: {String}
|
||||||
end
|
end
|
||||||
|
end
|
||||||
id
|
id
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -206,6 +230,7 @@ class Storage
|
|||||||
end
|
end
|
||||||
|
|
||||||
def bulk_insert_ids
|
def bulk_insert_ids
|
||||||
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
db.transaction do |tx|
|
db.transaction do |tx|
|
||||||
@insert_ids.each do |tp|
|
@insert_ids.each do |tp|
|
||||||
@ -216,12 +241,15 @@ class Storage
|
|||||||
end
|
end
|
||||||
@insert_ids.clear
|
@insert_ids.clear
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def close
|
def close
|
||||||
|
MainFiber.run do
|
||||||
unless @db.nil?
|
unless @db.nil?
|
||||||
@db.not_nil!.close
|
@db.not_nil!.close
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def to_json(json : JSON::Builder)
|
def to_json(json : JSON::Builder)
|
||||||
json.string self
|
json.string self
|
||||||
|
Loading…
x
Reference in New Issue
Block a user