mirror of
https://github.com/hkalexling/Mango.git
synced 2025-08-03 11:25:29 -04:00
Merge branch 'dev' into feature/right-to-left
This commit is contained in:
commit
b6c8386caf
@ -104,6 +104,15 @@
|
|||||||
"contributions": [
|
"contributions": [
|
||||||
"infra"
|
"infra"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "lincolnthedev",
|
||||||
|
"name": "i use arch btw",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/41193328?v=4",
|
||||||
|
"profile": "https://lncn.dev",
|
||||||
|
"contributions": [
|
||||||
|
"infra"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"contributorsPerLine": 7,
|
"contributorsPerLine": 7,
|
||||||
|
@ -1,2 +1,9 @@
|
|||||||
node_modules
|
node_modules
|
||||||
lib
|
lib
|
||||||
|
Dockerfile
|
||||||
|
Dockerfile.arm32v7
|
||||||
|
Dockerfile.arm64v8
|
||||||
|
README.md
|
||||||
|
.all-contributorsrc
|
||||||
|
env.example
|
||||||
|
.github/
|
||||||
|
@ -51,7 +51,7 @@ The official docker images are available on [Dockerhub](https://hub.docker.com/r
|
|||||||
### CLI
|
### CLI
|
||||||
|
|
||||||
```
|
```
|
||||||
Mango - Manga Server and Web Reader. Version 0.23.0
|
Mango - Manga Server and Web Reader. Version 0.24.0
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
||||||
@ -86,6 +86,10 @@ log_level: info
|
|||||||
upload_path: ~/mango/uploads
|
upload_path: ~/mango/uploads
|
||||||
plugin_path: ~/mango/plugins
|
plugin_path: ~/mango/plugins
|
||||||
download_timeout_seconds: 30
|
download_timeout_seconds: 30
|
||||||
|
library_cache_path: ~/mango/library.yml.gz
|
||||||
|
cache_enabled: false
|
||||||
|
cache_size_mbs: 50
|
||||||
|
cache_log_enabled: true
|
||||||
disable_login: false
|
disable_login: false
|
||||||
default_username: ""
|
default_username: ""
|
||||||
auth_proxy_header_name: ""
|
auth_proxy_header_name: ""
|
||||||
@ -97,12 +101,12 @@ mangadex:
|
|||||||
download_queue_db_path: ~/mango/queue.db
|
download_queue_db_path: ~/mango/queue.db
|
||||||
chapter_rename_rule: '[Vol.{volume} ][Ch.{chapter} ]{title|id}'
|
chapter_rename_rule: '[Vol.{volume} ][Ch.{chapter} ]{title|id}'
|
||||||
manga_rename_rule: '{title}'
|
manga_rename_rule: '{title}'
|
||||||
subscription_update_interval_hours: 24
|
|
||||||
```
|
```
|
||||||
|
|
||||||
- `scan_interval_minutes`, `thumbnail_generation_interval_hours` and `db_optimization_interval_hours` can be any non-negative integer. Setting them to `0` disables the periodic tasks
|
- `scan_interval_minutes`, `thumbnail_generation_interval_hours` and `db_optimization_interval_hours` can be any non-negative integer. Setting them to `0` disables the periodic tasks
|
||||||
- `log_level` can be `debug`, `info`, `warn`, `error`, `fatal` or `off`. Setting it to `off` disables the logging
|
- `log_level` can be `debug`, `info`, `warn`, `error`, `fatal` or `off`. Setting it to `off` disables the logging
|
||||||
- You can disable authentication by setting `disable_login` to true. Note that `default_username` must be set to an existing username for this to work.
|
- You can disable authentication by setting `disable_login` to true. Note that `default_username` must be set to an existing username for this to work.
|
||||||
|
- By setting `cache_enabled` to `true`, you can enable an experimental feature where Mango caches library metadata to improve page load time. You can further fine-tune the feature with `cache_size_mbs` and `cache_log_enabled`.
|
||||||
|
|
||||||
### Library Structure
|
### Library Structure
|
||||||
|
|
||||||
@ -174,6 +178,7 @@ Please check the [development guideline](https://github.com/hkalexling/Mango/wik
|
|||||||
<td align="center"><a href="https://github.com/Leeingnyo"><img src="https://avatars0.githubusercontent.com/u/6760150?v=4?s=100" width="100px;" alt=""/><br /><sub><b>이인용</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=Leeingnyo" title="Code">💻</a></td>
|
<td align="center"><a href="https://github.com/Leeingnyo"><img src="https://avatars0.githubusercontent.com/u/6760150?v=4?s=100" width="100px;" alt=""/><br /><sub><b>이인용</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=Leeingnyo" title="Code">💻</a></td>
|
||||||
<td align="center"><a href="http://h45h74x.eu.org"><img src="https://avatars1.githubusercontent.com/u/27204033?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simon</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=h45h74x" title="Code">💻</a></td>
|
<td align="center"><a href="http://h45h74x.eu.org"><img src="https://avatars1.githubusercontent.com/u/27204033?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simon</b></sub></a><br /><a href="https://github.com/hkalexling/Mango/commits?author=h45h74x" title="Code">💻</a></td>
|
||||||
<td align="center"><a href="https://github.com/davidkna"><img src="https://avatars.githubusercontent.com/u/835177?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Knaack</b></sub></a><br /><a href="#infra-davidkna" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
<td align="center"><a href="https://github.com/davidkna"><img src="https://avatars.githubusercontent.com/u/835177?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Knaack</b></sub></a><br /><a href="#infra-davidkna" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||||
|
<td align="center"><a href="https://lncn.dev"><img src="https://avatars.githubusercontent.com/u/41193328?v=4?s=100" width="100px;" alt=""/><br /><sub><b>i use arch btw</b></sub></a><br /><a href="#infra-lincolnthedev" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
@ -68,7 +68,12 @@ const buildTable = (chapters) => {
|
|||||||
$('table').append(thead);
|
$('table').append(thead);
|
||||||
|
|
||||||
const rows = chapters.map(ch => {
|
const rows = chapters.map(ch => {
|
||||||
const tds = Object.values(ch).map(v => `<td>${v}</td>`).join('');
|
const tds = Object.values(ch).map(v => {
|
||||||
|
const maxLength = 40;
|
||||||
|
const shouldShrink = v.length > maxLength;
|
||||||
|
const content = shouldShrink ? `<span title="${v}">${v.substring(0, maxLength)}...</span><div uk-dropdown><span>${v}</span></div>` : v;
|
||||||
|
return `<td>${content}</td>`
|
||||||
|
}).join('');
|
||||||
return `<tr data-id="${ch.id}" data-title="${ch.title}">${tds}</tr>`;
|
return `<tr data-id="${ch.id}" data-title="${ch.title}">${tds}</tr>`;
|
||||||
});
|
});
|
||||||
const tbody = `<tbody id="selectable">${rows}</tbody>`;
|
const tbody = `<tbody id="selectable">${rows}</tbody>`;
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
name: mango
|
name: mango
|
||||||
version: 0.23.0
|
version: 0.24.0
|
||||||
|
|
||||||
authors:
|
authors:
|
||||||
- Alex Ling <hkalexling@gmail.com>
|
- Alex Ling <hkalexling@gmail.com>
|
||||||
|
@ -61,3 +61,13 @@ describe "chapter_sort" do
|
|||||||
end.should eq ary
|
end.should eq ary
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "sanitize_filename" do
|
||||||
|
it "returns a random string for empty sanitized string" do
|
||||||
|
sanitize_filename("..").should_not eq sanitize_filename("..")
|
||||||
|
end
|
||||||
|
it "sanitizes correctly" do
|
||||||
|
sanitize_filename(".. \n\v.\rマンゴー/|*()<[1/2] 3.14 hello world ")
|
||||||
|
.should eq "マンゴー_()[1_2] 3.14 hello world"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
@ -11,6 +11,8 @@ class Config
|
|||||||
property session_secret : String = "mango-session-secret"
|
property session_secret : String = "mango-session-secret"
|
||||||
property library_path : String = File.expand_path "~/mango/library",
|
property library_path : String = File.expand_path "~/mango/library",
|
||||||
home: true
|
home: true
|
||||||
|
property library_cache_path = File.expand_path "~/mango/library.yml.gz",
|
||||||
|
home: true
|
||||||
property db_path : String = File.expand_path "~/mango/mango.db", home: true
|
property db_path : String = File.expand_path "~/mango/mango.db", home: true
|
||||||
property scan_interval_minutes : Int32 = 5
|
property scan_interval_minutes : Int32 = 5
|
||||||
property thumbnail_generation_interval_hours : Int32 = 24
|
property thumbnail_generation_interval_hours : Int32 = 24
|
||||||
@ -20,6 +22,9 @@ class Config
|
|||||||
property plugin_path : String = File.expand_path "~/mango/plugins",
|
property plugin_path : String = File.expand_path "~/mango/plugins",
|
||||||
home: true
|
home: true
|
||||||
property download_timeout_seconds : Int32 = 30
|
property download_timeout_seconds : Int32 = 30
|
||||||
|
property cache_enabled = false
|
||||||
|
property cache_size_mbs = 50
|
||||||
|
property cache_log_enabled = true
|
||||||
property disable_login = false
|
property disable_login = false
|
||||||
property default_username = ""
|
property default_username = ""
|
||||||
property auth_proxy_header_name = ""
|
property auth_proxy_header_name = ""
|
||||||
|
@ -54,8 +54,9 @@ class AuthHandler < Kemal::Handler
|
|||||||
end
|
end
|
||||||
|
|
||||||
def call(env)
|
def call(env)
|
||||||
# Skip all authentication if requesting /login, /logout, or a static file
|
# Skip all authentication if requesting /login, /logout, /api/login,
|
||||||
if request_path_startswith(env, ["/login", "/logout"]) ||
|
# or a static file
|
||||||
|
if request_path_startswith(env, ["/login", "/logout", "/api/login"]) ||
|
||||||
requesting_static_file env
|
requesting_static_file env
|
||||||
return call_next(env)
|
return call_next(env)
|
||||||
end
|
end
|
||||||
|
188
src/library/cache.cr
Normal file
188
src/library/cache.cr
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
require "digest"
|
||||||
|
|
||||||
|
require "./entry"
|
||||||
|
require "./types"
|
||||||
|
|
||||||
|
# Base class for an entry in the LRU cache.
|
||||||
|
# There are two ways to use it:
|
||||||
|
# 1. Use it as it is by instantiating with the appropriate `SaveT` and
|
||||||
|
# `ReturnT`. Note that in this case, `SaveT` and `ReturnT` must be the
|
||||||
|
# same type. That is, the input value will be stored as it is without
|
||||||
|
# any transformation.
|
||||||
|
# 2. You can also subclass it and provide custom implementations for
|
||||||
|
# `to_save_t` and `to_return_t`. This allows you to transform and store
|
||||||
|
# the input value to a different type. See `SortedEntriesCacheEntry` as
|
||||||
|
# an example.
|
||||||
|
private class CacheEntry(SaveT, ReturnT)
|
||||||
|
getter key : String, atime : Time
|
||||||
|
|
||||||
|
@value : SaveT
|
||||||
|
|
||||||
|
def initialize(@key : String, value : ReturnT)
|
||||||
|
@atime = @ctime = Time.utc
|
||||||
|
@value = self.class.to_save_t value
|
||||||
|
end
|
||||||
|
|
||||||
|
def value
|
||||||
|
@atime = Time.utc
|
||||||
|
self.class.to_return_t @value
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.to_save_t(value : ReturnT)
|
||||||
|
value
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.to_return_t(value : SaveT)
|
||||||
|
value
|
||||||
|
end
|
||||||
|
|
||||||
|
def instance_size
|
||||||
|
instance_sizeof(CacheEntry(SaveT, ReturnT)) + # sizeof itself
|
||||||
|
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
|
||||||
|
@value.instance_size
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SortedEntriesCacheEntry < CacheEntry(Array(String), Array(Entry))
|
||||||
|
def self.to_save_t(value : Array(Entry))
|
||||||
|
value.map &.id
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.to_return_t(value : Array(String))
|
||||||
|
ids_to_entries value
|
||||||
|
end
|
||||||
|
|
||||||
|
private def self.ids_to_entries(ids : Array(String))
|
||||||
|
e_map = Library.default.deep_entries.to_h { |entry| {entry.id, entry} }
|
||||||
|
entries = [] of Entry
|
||||||
|
begin
|
||||||
|
ids.each do |id|
|
||||||
|
entries << e_map[id]
|
||||||
|
end
|
||||||
|
return entries if ids.size == entries.size
|
||||||
|
rescue
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def instance_size
|
||||||
|
instance_sizeof(SortedEntriesCacheEntry) + # sizeof itself
|
||||||
|
instance_sizeof(String) + @key.bytesize + # allocated memory for @key
|
||||||
|
@value.size * (instance_sizeof(String) + sizeof(String)) +
|
||||||
|
@value.sum(&.bytesize) # elements in Array(String)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.gen_key(book_id : String, username : String,
|
||||||
|
entries : Array(Entry), opt : SortOptions?)
|
||||||
|
entries_sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
|
||||||
|
user_context = opt && opt.method == SortMethod::Progress ? username : ""
|
||||||
|
sig = Digest::SHA1.hexdigest (book_id + entries_sig + user_context +
|
||||||
|
(opt ? opt.to_tuple.to_s : "nil"))
|
||||||
|
"#{sig}:sorted_entries"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class String
|
||||||
|
def instance_size
|
||||||
|
instance_sizeof(String) + bytesize
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
struct Tuple(*T)
|
||||||
|
def instance_size
|
||||||
|
sizeof(T) + # total size of non-reference types
|
||||||
|
self.sum do |e|
|
||||||
|
next 0 unless e.is_a? Reference
|
||||||
|
if e.responds_to? :instance_size
|
||||||
|
e.instance_size
|
||||||
|
else
|
||||||
|
instance_sizeof(typeof(e))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
alias CacheableType = Array(Entry) | String | Tuple(String, Int32)
|
||||||
|
alias CacheEntryType = SortedEntriesCacheEntry |
|
||||||
|
CacheEntry(String, String) |
|
||||||
|
CacheEntry(Tuple(String, Int32), Tuple(String, Int32))
|
||||||
|
|
||||||
|
def generate_cache_entry(key : String, value : CacheableType)
|
||||||
|
if value.is_a? Array(Entry)
|
||||||
|
SortedEntriesCacheEntry.new key, value
|
||||||
|
else
|
||||||
|
CacheEntry(typeof(value), typeof(value)).new key, value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# LRU Cache
|
||||||
|
class LRUCache
|
||||||
|
@@limit : Int128 = Int128.new 0
|
||||||
|
@@should_log = true
|
||||||
|
# key => entry
|
||||||
|
@@cache = {} of String => CacheEntryType
|
||||||
|
|
||||||
|
def self.enabled
|
||||||
|
Config.current.cache_enabled
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.init
|
||||||
|
cache_size = Config.current.cache_size_mbs
|
||||||
|
@@limit = Int128.new cache_size * 1024 * 1024 if enabled
|
||||||
|
@@should_log = Config.current.cache_log_enabled
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.get(key : String)
|
||||||
|
return unless enabled
|
||||||
|
entry = @@cache[key]?
|
||||||
|
if @@should_log
|
||||||
|
Logger.debug "LRUCache #{entry.nil? ? "miss" : "hit"} #{key}"
|
||||||
|
end
|
||||||
|
return entry.value unless entry.nil?
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.set(cache_entry : CacheEntryType)
|
||||||
|
return unless enabled
|
||||||
|
key = cache_entry.key
|
||||||
|
@@cache[key] = cache_entry
|
||||||
|
Logger.debug "LRUCache cached #{key}" if @@should_log
|
||||||
|
remove_least_recent_access
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.invalidate(key : String)
|
||||||
|
return unless enabled
|
||||||
|
@@cache.delete key
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.print
|
||||||
|
return unless @@should_log
|
||||||
|
sum = @@cache.sum { |_, entry| entry.instance_size }
|
||||||
|
Logger.debug "---- LRU Cache ----"
|
||||||
|
Logger.debug "Size: #{sum} Bytes"
|
||||||
|
Logger.debug "List:"
|
||||||
|
@@cache.each do |k, v|
|
||||||
|
Logger.debug "#{k} | #{v.atime} | #{v.instance_size}"
|
||||||
|
end
|
||||||
|
Logger.debug "-------------------"
|
||||||
|
end
|
||||||
|
|
||||||
|
private def self.is_cache_full
|
||||||
|
sum = @@cache.sum { |_, entry| entry.instance_size }
|
||||||
|
sum > @@limit
|
||||||
|
end
|
||||||
|
|
||||||
|
private def self.remove_least_recent_access
|
||||||
|
if @@should_log && is_cache_full
|
||||||
|
Logger.debug "Removing entries from LRUCache"
|
||||||
|
end
|
||||||
|
while is_cache_full && @@cache.size > 0
|
||||||
|
min_tuple = @@cache.min_by { |_, entry| entry.atime }
|
||||||
|
min_key = min_tuple[0]
|
||||||
|
min_entry = min_tuple[1]
|
||||||
|
|
||||||
|
Logger.debug " \
|
||||||
|
Target: #{min_key}, \
|
||||||
|
Last Access Time: #{min_entry.atime}" if @@should_log
|
||||||
|
invalidate min_key
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
@ -1,6 +1,9 @@
|
|||||||
require "image_size"
|
require "image_size"
|
||||||
|
require "yaml"
|
||||||
|
|
||||||
class Entry
|
class Entry
|
||||||
|
include YAML::Serializable
|
||||||
|
|
||||||
getter zip_path : String, book : Title, title : String,
|
getter zip_path : String, book : Title, title : String,
|
||||||
size : String, pages : Int32, id : String, encoded_path : String,
|
size : String, pages : Int32, id : String, encoded_path : String,
|
||||||
encoded_title : String, mtime : Time, err_msg : String?
|
encoded_title : String, mtime : Time, err_msg : String?
|
||||||
@ -46,7 +49,7 @@ class Entry
|
|||||||
file.close
|
file.close
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_slim_json : String
|
def build_json(*, slim = false)
|
||||||
JSON.build do |json|
|
JSON.build do |json|
|
||||||
json.object do
|
json.object do
|
||||||
{% for str in ["zip_path", "title", "size", "id"] %}
|
{% for str in ["zip_path", "title", "size", "id"] %}
|
||||||
@ -54,22 +57,14 @@ class Entry
|
|||||||
{% end %}
|
{% end %}
|
||||||
json.field "title_id", @book.id
|
json.field "title_id", @book.id
|
||||||
json.field "pages" { json.number @pages }
|
json.field "pages" { json.number @pages }
|
||||||
end
|
unless slim
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def to_json(json : JSON::Builder)
|
|
||||||
json.object do
|
|
||||||
{% for str in ["zip_path", "title", "size", "id"] %}
|
|
||||||
json.field {{str}}, @{{str.id}}
|
|
||||||
{% end %}
|
|
||||||
json.field "title_id", @book.id
|
|
||||||
json.field "display_name", @book.display_name @title
|
json.field "display_name", @book.display_name @title
|
||||||
json.field "cover_url", cover_url
|
json.field "cover_url", cover_url
|
||||||
json.field "pages" { json.number @pages }
|
|
||||||
json.field "mtime" { json.number @mtime.to_unix }
|
json.field "mtime" { json.number @mtime.to_unix }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def display_name
|
def display_name
|
||||||
@book.display_name @title
|
@book.display_name @title
|
||||||
@ -81,9 +76,17 @@ class Entry
|
|||||||
|
|
||||||
def cover_url
|
def cover_url
|
||||||
return "#{Config.current.base_url}img/icon.png" if @err_msg
|
return "#{Config.current.base_url}img/icon.png" if @err_msg
|
||||||
url = "#{Config.current.base_url}api/cover/#{@book.id}/#{@id}"
|
|
||||||
|
unless @book.entry_cover_url_cache
|
||||||
TitleInfo.new @book.dir do |info|
|
TitleInfo.new @book.dir do |info|
|
||||||
info_url = info.entry_cover_url[@title]?
|
@book.entry_cover_url_cache = info.entry_cover_url
|
||||||
|
end
|
||||||
|
end
|
||||||
|
entry_cover_url = @book.entry_cover_url_cache
|
||||||
|
|
||||||
|
url = "#{Config.current.base_url}api/cover/#{@book.id}/#{@id}"
|
||||||
|
if entry_cover_url
|
||||||
|
info_url = entry_cover_url[@title]?
|
||||||
unless info_url.nil? || info_url.empty?
|
unless info_url.nil? || info_url.empty?
|
||||||
url = File.join Config.current.base_url, info_url
|
url = File.join Config.current.base_url, info_url
|
||||||
end
|
end
|
||||||
@ -170,6 +173,16 @@ class Entry
|
|||||||
# For backward backward compatibility with v0.1.0, we save entry titles
|
# For backward backward compatibility with v0.1.0, we save entry titles
|
||||||
# instead of IDs in info.json
|
# instead of IDs in info.json
|
||||||
def save_progress(username, page)
|
def save_progress(username, page)
|
||||||
|
LRUCache.invalidate "#{@book.id}:#{username}:progress_sum"
|
||||||
|
@book.parents.each do |parent|
|
||||||
|
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
|
||||||
|
end
|
||||||
|
[false, true].each do |ascend|
|
||||||
|
sorted_entries_cache_key = SortedEntriesCacheEntry.gen_key @book.id,
|
||||||
|
username, @book.entries, SortOptions.new(SortMethod::Progress, ascend)
|
||||||
|
LRUCache.invalidate sorted_entries_cache_key
|
||||||
|
end
|
||||||
|
|
||||||
TitleInfo.new @book.dir do |info|
|
TitleInfo.new @book.dir do |info|
|
||||||
if info.progress[username]?.nil?
|
if info.progress[username]?.nil?
|
||||||
info.progress[username] = {@title => page}
|
info.progress[username] = {@title => page}
|
||||||
|
@ -1,12 +1,38 @@
|
|||||||
class Library
|
class Library
|
||||||
|
include YAML::Serializable
|
||||||
|
|
||||||
getter dir : String, title_ids : Array(String),
|
getter dir : String, title_ids : Array(String),
|
||||||
title_hash : Hash(String, Title)
|
title_hash : Hash(String, Title)
|
||||||
|
|
||||||
use_default
|
use_default
|
||||||
|
|
||||||
def initialize
|
def save_instance
|
||||||
register_mime_types
|
path = Config.current.library_cache_path
|
||||||
|
Logger.debug "Caching library to #{path}"
|
||||||
|
|
||||||
|
writer = Compress::Gzip::Writer.new path,
|
||||||
|
Compress::Gzip::BEST_COMPRESSION
|
||||||
|
writer.write self.to_yaml.to_slice
|
||||||
|
writer.close
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.load_instance
|
||||||
|
path = Config.current.library_cache_path
|
||||||
|
return unless File.exists? path
|
||||||
|
|
||||||
|
Logger.debug "Loading cached library from #{path}"
|
||||||
|
|
||||||
|
begin
|
||||||
|
Compress::Gzip::Reader.open path do |content|
|
||||||
|
@@default = Library.from_yaml content
|
||||||
|
end
|
||||||
|
Library.default.register_jobs
|
||||||
|
rescue e
|
||||||
|
Logger.error e
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize
|
||||||
@dir = Config.current.library_path
|
@dir = Config.current.library_path
|
||||||
# explicitly initialize @titles to bypass the compiler check. it will
|
# explicitly initialize @titles to bypass the compiler check. it will
|
||||||
# be filled with actual Titles in the `scan` call below
|
# be filled with actual Titles in the `scan` call below
|
||||||
@ -16,6 +42,12 @@ class Library
|
|||||||
@entries_count = 0
|
@entries_count = 0
|
||||||
@thumbnails_count = 0
|
@thumbnails_count = 0
|
||||||
|
|
||||||
|
register_jobs
|
||||||
|
end
|
||||||
|
|
||||||
|
protected def register_jobs
|
||||||
|
register_mime_types
|
||||||
|
|
||||||
scan_interval = Config.current.scan_interval_minutes
|
scan_interval = Config.current.scan_interval_minutes
|
||||||
if scan_interval < 1
|
if scan_interval < 1
|
||||||
scan
|
scan
|
||||||
@ -25,7 +57,7 @@ class Library
|
|||||||
start = Time.local
|
start = Time.local
|
||||||
scan
|
scan
|
||||||
ms = (Time.local - start).total_milliseconds
|
ms = (Time.local - start).total_milliseconds
|
||||||
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
Logger.debug "Library initialized in #{ms}ms"
|
||||||
sleep scan_interval.minutes
|
sleep scan_interval.minutes
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -51,11 +83,6 @@ class Library
|
|||||||
def sorted_titles(username, opt : SortOptions? = nil)
|
def sorted_titles(username, opt : SortOptions? = nil)
|
||||||
if opt.nil?
|
if opt.nil?
|
||||||
opt = SortOptions.from_info_json @dir, username
|
opt = SortOptions.from_info_json @dir, username
|
||||||
else
|
|
||||||
TitleInfo.new @dir do |info|
|
|
||||||
info.sort_by[username] = opt.to_tuple
|
|
||||||
info.save
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Helper function from src/util/util.cr
|
# Helper function from src/util/util.cr
|
||||||
@ -66,14 +93,18 @@ class Library
|
|||||||
titles + titles.flat_map &.deep_titles
|
titles + titles.flat_map &.deep_titles
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_slim_json : String
|
def deep_entries
|
||||||
|
titles.flat_map &.deep_entries
|
||||||
|
end
|
||||||
|
|
||||||
|
def build_json(*, slim = false, depth = -1)
|
||||||
JSON.build do |json|
|
JSON.build do |json|
|
||||||
json.object do
|
json.object do
|
||||||
json.field "dir", @dir
|
json.field "dir", @dir
|
||||||
json.field "titles" do
|
json.field "titles" do
|
||||||
json.array do
|
json.array do
|
||||||
self.titles.each do |title|
|
self.titles.each do |title|
|
||||||
json.raw title.to_slim_json
|
json.raw title.build_json(slim: slim, depth: depth)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -81,15 +112,6 @@ class Library
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_json(json : JSON::Builder)
|
|
||||||
json.object do
|
|
||||||
json.field "dir", @dir
|
|
||||||
json.field "titles" do
|
|
||||||
json.raw self.titles.to_json
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_title(tid)
|
def get_title(tid)
|
||||||
@title_hash[tid]?
|
@title_hash[tid]?
|
||||||
end
|
end
|
||||||
@ -99,6 +121,7 @@ class Library
|
|||||||
end
|
end
|
||||||
|
|
||||||
def scan
|
def scan
|
||||||
|
start = Time.local
|
||||||
unless Dir.exists? @dir
|
unless Dir.exists? @dir
|
||||||
Logger.info "The library directory #{@dir} does not exist. " \
|
Logger.info "The library directory #{@dir} does not exist. " \
|
||||||
"Attempting to create it"
|
"Attempting to create it"
|
||||||
@ -107,14 +130,38 @@ class Library
|
|||||||
|
|
||||||
storage = Storage.new auto_close: false
|
storage = Storage.new auto_close: false
|
||||||
|
|
||||||
(Dir.entries @dir)
|
examine_context : ExamineContext = {
|
||||||
|
cached_contents_signature: {} of String => String,
|
||||||
|
deleted_title_ids: [] of String,
|
||||||
|
deleted_entry_ids: [] of String,
|
||||||
|
}
|
||||||
|
|
||||||
|
library_paths = (Dir.entries @dir)
|
||||||
.select { |fn| !fn.starts_with? "." }
|
.select { |fn| !fn.starts_with? "." }
|
||||||
.map { |fn| File.join @dir, fn }
|
.map { |fn| File.join @dir, fn }
|
||||||
|
@title_ids.select! do |title_id|
|
||||||
|
title = @title_hash[title_id]
|
||||||
|
next false unless library_paths.includes? title.dir
|
||||||
|
existence = title.examine examine_context
|
||||||
|
unless existence
|
||||||
|
examine_context["deleted_title_ids"].concat [title_id] +
|
||||||
|
title.deep_titles.map &.id
|
||||||
|
examine_context["deleted_entry_ids"].concat title.deep_entries.map &.id
|
||||||
|
end
|
||||||
|
existence
|
||||||
|
end
|
||||||
|
remained_title_dirs = @title_ids.map { |id| title_hash[id].dir }
|
||||||
|
examine_context["deleted_title_ids"].each do |title_id|
|
||||||
|
@title_hash.delete title_id
|
||||||
|
end
|
||||||
|
|
||||||
|
cache = examine_context["cached_contents_signature"]
|
||||||
|
library_paths
|
||||||
|
.select { |path| !(remained_title_dirs.includes? path) }
|
||||||
.select { |path| File.directory? path }
|
.select { |path| File.directory? path }
|
||||||
.map { |path| Title.new path, "" }
|
.map { |path| Title.new path, "", cache }
|
||||||
.select { |title| !(title.entries.empty? && title.titles.empty?) }
|
.select { |title| !(title.entries.empty? && title.titles.empty?) }
|
||||||
.sort! { |a, b| a.title <=> b.title }
|
.sort! { |a, b| a.title <=> b.title }
|
||||||
.tap { |_| @title_ids.clear }
|
|
||||||
.each do |title|
|
.each do |title|
|
||||||
@title_hash[title.id] = title
|
@title_hash[title.id] = title
|
||||||
@title_ids << title.id
|
@title_ids << title.id
|
||||||
@ -123,8 +170,15 @@ class Library
|
|||||||
storage.bulk_insert_ids
|
storage.bulk_insert_ids
|
||||||
storage.close
|
storage.close
|
||||||
|
|
||||||
Logger.debug "Scan completed"
|
ms = (Time.local - start).total_milliseconds
|
||||||
Storage.default.mark_unavailable
|
Logger.info "Scanned #{@title_ids.size} titles in #{ms}ms"
|
||||||
|
|
||||||
|
Storage.default.mark_unavailable examine_context["deleted_entry_ids"],
|
||||||
|
examine_context["deleted_title_ids"]
|
||||||
|
|
||||||
|
spawn do
|
||||||
|
save_instance
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_continue_reading_entries(username)
|
def get_continue_reading_entries(username)
|
||||||
|
@ -1,13 +1,25 @@
|
|||||||
|
require "digest"
|
||||||
require "../archive"
|
require "../archive"
|
||||||
|
|
||||||
class Title
|
class Title
|
||||||
|
include YAML::Serializable
|
||||||
|
|
||||||
getter dir : String, parent_id : String, title_ids : Array(String),
|
getter dir : String, parent_id : String, title_ids : Array(String),
|
||||||
entries : Array(Entry), title : String, id : String,
|
entries : Array(Entry), title : String, id : String,
|
||||||
encoded_title : String, mtime : Time, signature : UInt64
|
encoded_title : String, mtime : Time, signature : UInt64,
|
||||||
|
entry_cover_url_cache : Hash(String, String)?
|
||||||
|
setter entry_cover_url_cache : Hash(String, String)?
|
||||||
|
|
||||||
|
@[YAML::Field(ignore: true)]
|
||||||
@entry_display_name_cache : Hash(String, String)?
|
@entry_display_name_cache : Hash(String, String)?
|
||||||
|
@[YAML::Field(ignore: true)]
|
||||||
|
@entry_cover_url_cache : Hash(String, String)?
|
||||||
|
@[YAML::Field(ignore: true)]
|
||||||
|
@cached_display_name : String?
|
||||||
|
@[YAML::Field(ignore: true)]
|
||||||
|
@cached_cover_url : String?
|
||||||
|
|
||||||
def initialize(@dir : String, @parent_id)
|
def initialize(@dir : String, @parent_id, cache = {} of String => String)
|
||||||
storage = Storage.default
|
storage = Storage.default
|
||||||
@signature = Dir.signature dir
|
@signature = Dir.signature dir
|
||||||
id = storage.get_title_id dir, signature
|
id = storage.get_title_id dir, signature
|
||||||
@ -20,6 +32,7 @@ class Title
|
|||||||
})
|
})
|
||||||
end
|
end
|
||||||
@id = id
|
@id = id
|
||||||
|
@contents_signature = Dir.contents_signature dir, cache
|
||||||
@title = File.basename dir
|
@title = File.basename dir
|
||||||
@encoded_title = URI.encode @title
|
@encoded_title = URI.encode @title
|
||||||
@title_ids = [] of String
|
@title_ids = [] of String
|
||||||
@ -30,7 +43,7 @@ class Title
|
|||||||
next if fn.starts_with? "."
|
next if fn.starts_with? "."
|
||||||
path = File.join dir, fn
|
path = File.join dir, fn
|
||||||
if File.directory? path
|
if File.directory? path
|
||||||
title = Title.new path, @id
|
title = Title.new path, @id, cache
|
||||||
next if title.entries.size == 0 && title.titles.size == 0
|
next if title.entries.size == 0 && title.titles.size == 0
|
||||||
Library.default.title_hash[title.id] = title
|
Library.default.title_hash[title.id] = title
|
||||||
@title_ids << title.id
|
@title_ids << title.id
|
||||||
@ -57,55 +70,167 @@ class Title
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_slim_json : String
|
# Utility method used in library rescanning.
|
||||||
|
# - When the title does not exist on the file system anymore, return false
|
||||||
|
# and let it be deleted from the library instance
|
||||||
|
# - When the title exists, but its contents signature is now different from
|
||||||
|
# the cache, it means some of its content (nested titles or entries)
|
||||||
|
# has been added, deleted, or renamed. In this case we update its
|
||||||
|
# contents signature and instance variables
|
||||||
|
# - When the title exists and its contents signature is still the same, we
|
||||||
|
# return true so it can be reused without rescanning
|
||||||
|
def examine(context : ExamineContext) : Bool
|
||||||
|
return false unless Dir.exists? @dir
|
||||||
|
contents_signature = Dir.contents_signature @dir,
|
||||||
|
context["cached_contents_signature"]
|
||||||
|
return true if @contents_signature == contents_signature
|
||||||
|
|
||||||
|
@contents_signature = contents_signature
|
||||||
|
@signature = Dir.signature @dir
|
||||||
|
storage = Storage.default
|
||||||
|
id = storage.get_title_id dir, signature
|
||||||
|
if id.nil?
|
||||||
|
id = random_str
|
||||||
|
storage.insert_title_id({
|
||||||
|
path: dir,
|
||||||
|
id: id,
|
||||||
|
signature: signature.to_s,
|
||||||
|
})
|
||||||
|
end
|
||||||
|
@id = id
|
||||||
|
@mtime = File.info(@dir).modification_time
|
||||||
|
|
||||||
|
previous_titles_size = @title_ids.size
|
||||||
|
@title_ids.select! do |title_id|
|
||||||
|
title = Library.default.get_title title_id
|
||||||
|
unless title # for if data consistency broken
|
||||||
|
context["deleted_title_ids"].concat [title_id]
|
||||||
|
next false
|
||||||
|
end
|
||||||
|
existence = title.examine context
|
||||||
|
unless existence
|
||||||
|
context["deleted_title_ids"].concat [title_id] +
|
||||||
|
title.deep_titles.map &.id
|
||||||
|
context["deleted_entry_ids"].concat title.deep_entries.map &.id
|
||||||
|
end
|
||||||
|
existence
|
||||||
|
end
|
||||||
|
remained_title_dirs = @title_ids.map do |title_id|
|
||||||
|
title = Library.default.get_title! title_id
|
||||||
|
title.dir
|
||||||
|
end
|
||||||
|
|
||||||
|
previous_entries_size = @entries.size
|
||||||
|
@entries.select! do |entry|
|
||||||
|
existence = File.exists? entry.zip_path
|
||||||
|
Fiber.yield
|
||||||
|
context["deleted_entry_ids"] << entry.id unless existence
|
||||||
|
existence
|
||||||
|
end
|
||||||
|
remained_entry_zip_paths = @entries.map &.zip_path
|
||||||
|
|
||||||
|
is_titles_added = false
|
||||||
|
is_entries_added = false
|
||||||
|
Dir.entries(dir).each do |fn|
|
||||||
|
next if fn.starts_with? "."
|
||||||
|
path = File.join dir, fn
|
||||||
|
if File.directory? path
|
||||||
|
next if remained_title_dirs.includes? path
|
||||||
|
title = Title.new path, @id, context["cached_contents_signature"]
|
||||||
|
next if title.entries.size == 0 && title.titles.size == 0
|
||||||
|
Library.default.title_hash[title.id] = title
|
||||||
|
@title_ids << title.id
|
||||||
|
is_titles_added = true
|
||||||
|
|
||||||
|
# We think they are removed, but they are here!
|
||||||
|
# Cancel reserved jobs
|
||||||
|
revival_title_ids = [title.id] + title.deep_titles.map &.id
|
||||||
|
context["deleted_title_ids"].select! do |deleted_title_id|
|
||||||
|
!(revival_title_ids.includes? deleted_title_id)
|
||||||
|
end
|
||||||
|
revival_entry_ids = title.deep_entries.map &.id
|
||||||
|
context["deleted_entry_ids"].select! do |deleted_entry_id|
|
||||||
|
!(revival_entry_ids.includes? deleted_entry_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
next
|
||||||
|
end
|
||||||
|
if is_supported_file path
|
||||||
|
next if remained_entry_zip_paths.includes? path
|
||||||
|
entry = Entry.new path, self
|
||||||
|
if entry.pages > 0 || entry.err_msg
|
||||||
|
@entries << entry
|
||||||
|
is_entries_added = true
|
||||||
|
context["deleted_entry_ids"].select! do |deleted_entry_id|
|
||||||
|
entry.id != deleted_entry_id
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
mtimes = [@mtime]
|
||||||
|
mtimes += @title_ids.map { |e| Library.default.title_hash[e].mtime }
|
||||||
|
mtimes += @entries.map &.mtime
|
||||||
|
@mtime = mtimes.max
|
||||||
|
|
||||||
|
if is_titles_added || previous_titles_size != @title_ids.size
|
||||||
|
@title_ids.sort! do |a, b|
|
||||||
|
compare_numerically Library.default.title_hash[a].title,
|
||||||
|
Library.default.title_hash[b].title
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if is_entries_added || previous_entries_size != @entries.size
|
||||||
|
sorter = ChapterSorter.new @entries.map &.title
|
||||||
|
@entries.sort! do |a, b|
|
||||||
|
sorter.compare a.title, b.title
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if @title_ids.size > 0 || @entries.size > 0
|
||||||
|
true
|
||||||
|
else
|
||||||
|
context["deleted_title_ids"].concat [@id]
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
alias SortContext = NamedTuple(username: String, opt: SortOptions)
|
||||||
|
|
||||||
|
def build_json(*, slim = false, depth = -1,
|
||||||
|
sort_context : SortContext? = nil)
|
||||||
JSON.build do |json|
|
JSON.build do |json|
|
||||||
json.object do
|
json.object do
|
||||||
{% for str in ["dir", "title", "id"] %}
|
{% for str in ["dir", "title", "id"] %}
|
||||||
json.field {{str}}, @{{str.id}}
|
json.field {{str}}, @{{str.id}}
|
||||||
{% end %}
|
{% end %}
|
||||||
json.field "signature" { json.number @signature }
|
json.field "signature" { json.number @signature }
|
||||||
json.field "titles" do
|
unless slim
|
||||||
json.array do
|
|
||||||
self.titles.each do |title|
|
|
||||||
json.raw title.to_slim_json
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
json.field "entries" do
|
|
||||||
json.array do
|
|
||||||
@entries.each do |entry|
|
|
||||||
json.raw entry.to_slim_json
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
json.field "parents" do
|
|
||||||
json.array do
|
|
||||||
self.parents.each do |title|
|
|
||||||
json.object do
|
|
||||||
json.field "title", title.title
|
|
||||||
json.field "id", title.id
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def to_json(json : JSON::Builder)
|
|
||||||
json.object do
|
|
||||||
{% for str in ["dir", "title", "id"] %}
|
|
||||||
json.field {{str}}, @{{str.id}}
|
|
||||||
{% end %}
|
|
||||||
json.field "signature" { json.number @signature }
|
|
||||||
json.field "display_name", display_name
|
json.field "display_name", display_name
|
||||||
json.field "cover_url", cover_url
|
json.field "cover_url", cover_url
|
||||||
json.field "mtime" { json.number @mtime.to_unix }
|
json.field "mtime" { json.number @mtime.to_unix }
|
||||||
|
end
|
||||||
|
unless depth == 0
|
||||||
json.field "titles" do
|
json.field "titles" do
|
||||||
json.raw self.titles.to_json
|
json.array do
|
||||||
|
self.titles.each do |title|
|
||||||
|
json.raw title.build_json(slim: slim,
|
||||||
|
depth: depth > 0 ? depth - 1 : depth)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
json.field "entries" do
|
json.field "entries" do
|
||||||
json.raw @entries.to_json
|
json.array do
|
||||||
|
_entries = if sort_context
|
||||||
|
sorted_entries sort_context[:username],
|
||||||
|
sort_context[:opt]
|
||||||
|
else
|
||||||
|
@entries
|
||||||
|
end
|
||||||
|
_entries.each do |entry|
|
||||||
|
json.raw entry.build_json(slim: slim)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
json.field "parents" do
|
json.field "parents" do
|
||||||
json.array do
|
json.array do
|
||||||
@ -119,6 +244,7 @@ class Title
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def titles
|
def titles
|
||||||
@title_ids.map { |tid| Library.default.get_title! tid }
|
@title_ids.map { |tid| Library.default.get_title! tid }
|
||||||
@ -177,11 +303,15 @@ class Title
|
|||||||
end
|
end
|
||||||
|
|
||||||
def display_name
|
def display_name
|
||||||
|
cached_display_name = @cached_display_name
|
||||||
|
return cached_display_name unless cached_display_name.nil?
|
||||||
|
|
||||||
dn = @title
|
dn = @title
|
||||||
TitleInfo.new @dir do |info|
|
TitleInfo.new @dir do |info|
|
||||||
info_dn = info.display_name
|
info_dn = info.display_name
|
||||||
dn = info_dn unless info_dn.empty?
|
dn = info_dn unless info_dn.empty?
|
||||||
end
|
end
|
||||||
|
@cached_display_name = dn
|
||||||
dn
|
dn
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -205,6 +335,7 @@ class Title
|
|||||||
end
|
end
|
||||||
|
|
||||||
def set_display_name(dn)
|
def set_display_name(dn)
|
||||||
|
@cached_display_name = dn
|
||||||
TitleInfo.new @dir do |info|
|
TitleInfo.new @dir do |info|
|
||||||
info.display_name = dn
|
info.display_name = dn
|
||||||
info.save
|
info.save
|
||||||
@ -214,11 +345,15 @@ class Title
|
|||||||
def set_display_name(entry_name : String, dn)
|
def set_display_name(entry_name : String, dn)
|
||||||
TitleInfo.new @dir do |info|
|
TitleInfo.new @dir do |info|
|
||||||
info.entry_display_name[entry_name] = dn
|
info.entry_display_name[entry_name] = dn
|
||||||
|
@entry_display_name_cache = info.entry_display_name
|
||||||
info.save
|
info.save
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def cover_url
|
def cover_url
|
||||||
|
cached_cover_url = @cached_cover_url
|
||||||
|
return cached_cover_url unless cached_cover_url.nil?
|
||||||
|
|
||||||
url = "#{Config.current.base_url}img/icon.png"
|
url = "#{Config.current.base_url}img/icon.png"
|
||||||
readable_entries = @entries.select &.err_msg.nil?
|
readable_entries = @entries.select &.err_msg.nil?
|
||||||
if readable_entries.size > 0
|
if readable_entries.size > 0
|
||||||
@ -230,10 +365,12 @@ class Title
|
|||||||
url = File.join Config.current.base_url, info_url
|
url = File.join Config.current.base_url, info_url
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@cached_cover_url = url
|
||||||
url
|
url
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_cover_url(url : String)
|
def set_cover_url(url : String)
|
||||||
|
@cached_cover_url = url
|
||||||
TitleInfo.new @dir do |info|
|
TitleInfo.new @dir do |info|
|
||||||
info.cover_url = url
|
info.cover_url = url
|
||||||
info.save
|
info.save
|
||||||
@ -243,6 +380,7 @@ class Title
|
|||||||
def set_cover_url(entry_name : String, url : String)
|
def set_cover_url(entry_name : String, url : String)
|
||||||
TitleInfo.new @dir do |info|
|
TitleInfo.new @dir do |info|
|
||||||
info.entry_cover_url[entry_name] = url
|
info.entry_cover_url[entry_name] = url
|
||||||
|
@entry_cover_url_cache = info.entry_cover_url
|
||||||
info.save
|
info.save
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -262,8 +400,15 @@ class Title
|
|||||||
end
|
end
|
||||||
|
|
||||||
def deep_read_page_count(username) : Int32
|
def deep_read_page_count(username) : Int32
|
||||||
load_progress_for_all_entries(username).sum +
|
key = "#{@id}:#{username}:progress_sum"
|
||||||
|
sig = Digest::SHA1.hexdigest (entries.map &.id).to_s
|
||||||
|
cached_sum = LRUCache.get key
|
||||||
|
return cached_sum[1] if cached_sum.is_a? Tuple(String, Int32) &&
|
||||||
|
cached_sum[0] == sig
|
||||||
|
sum = load_progress_for_all_entries(username, nil, true).sum +
|
||||||
titles.flat_map(&.deep_read_page_count username).sum
|
titles.flat_map(&.deep_read_page_count username).sum
|
||||||
|
LRUCache.set generate_cache_entry key, {sig, sum}
|
||||||
|
sum
|
||||||
end
|
end
|
||||||
|
|
||||||
def deep_total_page_count : Int32
|
def deep_total_page_count : Int32
|
||||||
@ -317,13 +462,12 @@ class Title
|
|||||||
# use the default (auto, ascending)
|
# use the default (auto, ascending)
|
||||||
# When `opt` is not nil, it saves the options to info.json
|
# When `opt` is not nil, it saves the options to info.json
|
||||||
def sorted_entries(username, opt : SortOptions? = nil)
|
def sorted_entries(username, opt : SortOptions? = nil)
|
||||||
|
cache_key = SortedEntriesCacheEntry.gen_key @id, username, @entries, opt
|
||||||
|
cached_entries = LRUCache.get cache_key
|
||||||
|
return cached_entries if cached_entries.is_a? Array(Entry)
|
||||||
|
|
||||||
if opt.nil?
|
if opt.nil?
|
||||||
opt = SortOptions.from_info_json @dir, username
|
opt = SortOptions.from_info_json @dir, username
|
||||||
else
|
|
||||||
TitleInfo.new @dir do |info|
|
|
||||||
info.sort_by[username] = opt.to_tuple
|
|
||||||
info.save
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
case opt.not_nil!.method
|
case opt.not_nil!.method
|
||||||
@ -355,6 +499,7 @@ class Title
|
|||||||
|
|
||||||
ary.reverse! unless opt.not_nil!.ascend
|
ary.reverse! unless opt.not_nil!.ascend
|
||||||
|
|
||||||
|
LRUCache.set generate_cache_entry cache_key, ary
|
||||||
ary
|
ary
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -416,6 +561,17 @@ class Title
|
|||||||
end
|
end
|
||||||
|
|
||||||
def bulk_progress(action, ids : Array(String), username)
|
def bulk_progress(action, ids : Array(String), username)
|
||||||
|
LRUCache.invalidate "#{@id}:#{username}:progress_sum"
|
||||||
|
parents.each do |parent|
|
||||||
|
LRUCache.invalidate "#{parent.id}:#{username}:progress_sum"
|
||||||
|
end
|
||||||
|
[false, true].each do |ascend|
|
||||||
|
sorted_entries_cache_key =
|
||||||
|
SortedEntriesCacheEntry.gen_key @id, username, @entries,
|
||||||
|
SortOptions.new(SortMethod::Progress, ascend)
|
||||||
|
LRUCache.invalidate sorted_entries_cache_key
|
||||||
|
end
|
||||||
|
|
||||||
selected_entries = ids
|
selected_entries = ids
|
||||||
.map { |id|
|
.map { |id|
|
||||||
@entries.find &.id.==(id)
|
@entries.find &.id.==(id)
|
||||||
|
@ -1,4 +1,12 @@
|
|||||||
SUPPORTED_IMG_TYPES = ["image/jpeg", "image/png", "image/webp"]
|
SUPPORTED_IMG_TYPES = %w(
|
||||||
|
image/jpeg
|
||||||
|
image/png
|
||||||
|
image/webp
|
||||||
|
image/apng
|
||||||
|
image/avif
|
||||||
|
image/gif
|
||||||
|
image/svg+xml
|
||||||
|
)
|
||||||
|
|
||||||
enum SortMethod
|
enum SortMethod
|
||||||
Auto
|
Auto
|
||||||
@ -88,6 +96,18 @@ class TitleInfo
|
|||||||
@@mutex_hash = {} of String => Mutex
|
@@mutex_hash = {} of String => Mutex
|
||||||
|
|
||||||
def self.new(dir, &)
|
def self.new(dir, &)
|
||||||
|
key = "#{dir}:info.json"
|
||||||
|
info = LRUCache.get key
|
||||||
|
if info.is_a? String
|
||||||
|
begin
|
||||||
|
instance = TitleInfo.from_json info
|
||||||
|
instance.dir = dir
|
||||||
|
yield instance
|
||||||
|
return
|
||||||
|
rescue
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
if @@mutex_hash[dir]?
|
if @@mutex_hash[dir]?
|
||||||
mutex = @@mutex_hash[dir]
|
mutex = @@mutex_hash[dir]
|
||||||
else
|
else
|
||||||
@ -101,6 +121,7 @@ class TitleInfo
|
|||||||
instance = TitleInfo.from_json File.read json_path
|
instance = TitleInfo.from_json File.read json_path
|
||||||
end
|
end
|
||||||
instance.dir = dir
|
instance.dir = dir
|
||||||
|
LRUCache.set generate_cache_entry key, instance.to_json
|
||||||
yield instance
|
yield instance
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -108,5 +129,12 @@ class TitleInfo
|
|||||||
def save
|
def save
|
||||||
json_path = File.join @dir, "info.json"
|
json_path = File.join @dir, "info.json"
|
||||||
File.write json_path, self.to_pretty_json
|
File.write json_path, self.to_pretty_json
|
||||||
|
key = "#{@dir}:info.json"
|
||||||
|
LRUCache.set generate_cache_entry key, self.to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
alias ExamineContext = NamedTuple(
|
||||||
|
cached_contents_signature: Hash(String, String),
|
||||||
|
deleted_title_ids: Array(String),
|
||||||
|
deleted_entry_ids: Array(String))
|
||||||
|
@ -7,7 +7,7 @@ require "option_parser"
|
|||||||
require "clim"
|
require "clim"
|
||||||
require "tallboy"
|
require "tallboy"
|
||||||
|
|
||||||
MANGO_VERSION = "0.23.0"
|
MANGO_VERSION = "0.24.0"
|
||||||
|
|
||||||
# From http://www.network-science.de/ascii/
|
# From http://www.network-science.de/ascii/
|
||||||
BANNER = %{
|
BANNER = %{
|
||||||
@ -55,8 +55,10 @@ class CLI < Clim
|
|||||||
Config.load(opts.config).set_current
|
Config.load(opts.config).set_current
|
||||||
|
|
||||||
# Initialize main components
|
# Initialize main components
|
||||||
|
LRUCache.init
|
||||||
Storage.default
|
Storage.default
|
||||||
Queue.default
|
Queue.default
|
||||||
|
Library.load_instance
|
||||||
Library.default
|
Library.default
|
||||||
Plugin::Downloader.default
|
Plugin::Downloader.default
|
||||||
|
|
||||||
|
@ -23,11 +23,6 @@ class Plugin
|
|||||||
job
|
job
|
||||||
end
|
end
|
||||||
|
|
||||||
private def process_filename(str)
|
|
||||||
return "_" if str == ".."
|
|
||||||
str.gsub "/", "_"
|
|
||||||
end
|
|
||||||
|
|
||||||
private def download(job : Queue::Job)
|
private def download(job : Queue::Job)
|
||||||
@downloading = true
|
@downloading = true
|
||||||
@queue.set_status Queue::JobStatus::Downloading, job
|
@queue.set_status Queue::JobStatus::Downloading, job
|
||||||
@ -42,8 +37,8 @@ class Plugin
|
|||||||
|
|
||||||
pages = info["pages"].as_i
|
pages = info["pages"].as_i
|
||||||
|
|
||||||
manga_title = process_filename job.manga_title
|
manga_title = sanitize_filename job.manga_title
|
||||||
chapter_title = process_filename info["title"].as_s
|
chapter_title = sanitize_filename info["title"].as_s
|
||||||
|
|
||||||
@queue.set_pages pages, job
|
@queue.set_pages pages, job
|
||||||
lib_dir = @library_path
|
lib_dir = @library_path
|
||||||
@ -68,7 +63,7 @@ class Plugin
|
|||||||
while page = plugin.next_page
|
while page = plugin.next_page
|
||||||
break unless @queue.exists? job
|
break unless @queue.exists? job
|
||||||
|
|
||||||
fn = process_filename page["filename"].as_s
|
fn = sanitize_filename page["filename"].as_s
|
||||||
url = page["url"].as_s
|
url = page["url"].as_s
|
||||||
headers = HTTP::Headers.new
|
headers = HTTP::Headers.new
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ struct APIRouter
|
|||||||
|
|
||||||
# Authentication
|
# Authentication
|
||||||
|
|
||||||
All endpoints require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
|
All endpoints except `/api/login` require authentication. After logging in, your session ID would be stored as a cookie named `mango-sessid-#{Config.current.port}`, which can be used to authenticate the API access. Note that all admin API endpoints (`/api/admin/...`) require the logged-in user to have admin access.
|
||||||
|
|
||||||
# Terminologies
|
# Terminologies
|
||||||
|
|
||||||
@ -56,6 +56,29 @@ struct APIRouter
|
|||||||
"error" => String?,
|
"error" => String?,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Koa.describe "Authenticates a user", <<-MD
|
||||||
|
After successful login, the cookie `mango-sessid-#{Config.current.port}` will contain a valid session ID that can be used for subsequent requests
|
||||||
|
MD
|
||||||
|
Koa.body schema: {
|
||||||
|
"username" => String,
|
||||||
|
"password" => String,
|
||||||
|
}
|
||||||
|
Koa.tag "users"
|
||||||
|
post "/api/login" do |env|
|
||||||
|
begin
|
||||||
|
username = env.params.json["username"].as String
|
||||||
|
password = env.params.json["password"].as String
|
||||||
|
token = Storage.default.verify_user(username, password).not_nil!
|
||||||
|
|
||||||
|
env.session.string "token", token
|
||||||
|
"Authenticated"
|
||||||
|
rescue e
|
||||||
|
Logger.error e
|
||||||
|
env.response.status_code = 403
|
||||||
|
e.message
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
Koa.describe "Returns a page in a manga entry"
|
Koa.describe "Returns a page in a manga entry"
|
||||||
Koa.path "tid", desc: "Title ID"
|
Koa.path "tid", desc: "Title ID"
|
||||||
Koa.path "eid", desc: "Entry ID"
|
Koa.path "eid", desc: "Entry ID"
|
||||||
@ -133,24 +156,38 @@ struct APIRouter
|
|||||||
end
|
end
|
||||||
|
|
||||||
Koa.describe "Returns the book with title `tid`", <<-MD
|
Koa.describe "Returns the book with title `tid`", <<-MD
|
||||||
Supply the `tid` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
||||||
|
- Supply the `depth` query parameter to control the depth of nested titles to return.
|
||||||
|
- When `depth` is 1, returns the top-level titles and sub-titles/entries one level in them
|
||||||
|
- When `depth` is 0, returns the top-level titles without their sub-titles/entries
|
||||||
|
- When `depth` is N, returns the top-level titles and sub-titles/entries N levels in them
|
||||||
|
- When `depth` is negative, returns the entire library
|
||||||
MD
|
MD
|
||||||
Koa.path "tid", desc: "Title ID"
|
Koa.path "tid", desc: "Title ID"
|
||||||
Koa.query "slim"
|
Koa.query "slim"
|
||||||
|
Koa.query "depth"
|
||||||
|
Koa.query "sort", desc: "Sorting option for entries. Can be one of 'auto', 'title', 'progress', 'time_added' and 'time_modified'"
|
||||||
|
Koa.query "ascend", desc: "Sorting direction for entries. Set to 0 for the descending order. Doesn't work without specifying 'sort'"
|
||||||
Koa.response 200, schema: "title"
|
Koa.response 200, schema: "title"
|
||||||
Koa.response 404, "Title not found"
|
Koa.response 404, "Title not found"
|
||||||
Koa.tag "library"
|
Koa.tag "library"
|
||||||
get "/api/book/:tid" do |env|
|
get "/api/book/:tid" do |env|
|
||||||
begin
|
begin
|
||||||
|
username = get_username env
|
||||||
|
|
||||||
|
sort_opt = SortOptions.new
|
||||||
|
get_sort_opt
|
||||||
|
|
||||||
tid = env.params.url["tid"]
|
tid = env.params.url["tid"]
|
||||||
title = Library.default.get_title tid
|
title = Library.default.get_title tid
|
||||||
raise "Title ID `#{tid}` not found" if title.nil?
|
raise "Title ID `#{tid}` not found" if title.nil?
|
||||||
|
|
||||||
if env.params.query["slim"]?
|
slim = !env.params.query["slim"]?.nil?
|
||||||
send_json env, title.to_slim_json
|
depth = env.params.query["depth"]?.try(&.to_i?) || -1
|
||||||
else
|
|
||||||
send_json env, title.to_json
|
send_json env, title.build_json(slim: slim, depth: depth,
|
||||||
end
|
sort_context: {username: username,
|
||||||
|
opt: sort_opt})
|
||||||
rescue e
|
rescue e
|
||||||
Logger.error e
|
Logger.error e
|
||||||
env.response.status_code = 404
|
env.response.status_code = 404
|
||||||
@ -159,20 +196,25 @@ struct APIRouter
|
|||||||
end
|
end
|
||||||
|
|
||||||
Koa.describe "Returns the entire library with all titles and entries", <<-MD
|
Koa.describe "Returns the entire library with all titles and entries", <<-MD
|
||||||
Supply the `tid` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
- Supply the `slim` query parameter to strip away "display_name", "cover_url", and "mtime" from the returned object to speed up the loading time
|
||||||
|
- Supply the `dpeth` query parameter to control the depth of nested titles to return.
|
||||||
|
- When `depth` is 1, returns the requested title and sub-titles/entries one level in it
|
||||||
|
- When `depth` is 0, returns the requested title without its sub-titles/entries
|
||||||
|
- When `depth` is N, returns the requested title and sub-titles/entries N levels in it
|
||||||
|
- When `depth` is negative, returns the requested title and all sub-titles/entries in it
|
||||||
MD
|
MD
|
||||||
Koa.query "slim"
|
Koa.query "slim"
|
||||||
|
Koa.query "depth"
|
||||||
Koa.response 200, schema: {
|
Koa.response 200, schema: {
|
||||||
"dir" => String,
|
"dir" => String,
|
||||||
"titles" => ["title"],
|
"titles" => ["title"],
|
||||||
}
|
}
|
||||||
Koa.tag "library"
|
Koa.tag "library"
|
||||||
get "/api/library" do |env|
|
get "/api/library" do |env|
|
||||||
if env.params.query["slim"]?
|
slim = !env.params.query["slim"]?.nil?
|
||||||
send_json env, Library.default.to_slim_json
|
depth = env.params.query["depth"]?.try(&.to_i?) || -1
|
||||||
else
|
|
||||||
send_json env, Library.default.to_json
|
send_json env, Library.default.build_json(slim: slim, depth: depth)
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
Koa.describe "Triggers a library scan"
|
Koa.describe "Triggers a library scan"
|
||||||
|
@ -41,7 +41,7 @@ struct MainRouter
|
|||||||
username = get_username env
|
username = get_username env
|
||||||
|
|
||||||
sort_opt = SortOptions.from_info_json Library.default.dir, username
|
sort_opt = SortOptions.from_info_json Library.default.dir, username
|
||||||
get_sort_opt
|
get_and_save_sort_opt Library.default.dir
|
||||||
|
|
||||||
titles = Library.default.sorted_titles username, sort_opt
|
titles = Library.default.sorted_titles username, sort_opt
|
||||||
percentage = titles.map &.load_percentage username
|
percentage = titles.map &.load_percentage username
|
||||||
@ -59,12 +59,12 @@ struct MainRouter
|
|||||||
username = get_username env
|
username = get_username env
|
||||||
|
|
||||||
sort_opt = SortOptions.from_info_json title.dir, username
|
sort_opt = SortOptions.from_info_json title.dir, username
|
||||||
get_sort_opt
|
get_and_save_sort_opt title.dir
|
||||||
|
|
||||||
entries = title.sorted_entries username, sort_opt
|
entries = title.sorted_entries username, sort_opt
|
||||||
|
|
||||||
percentage = title.load_percentage_for_all_entries username, sort_opt
|
percentage = title.load_percentage_for_all_entries username, sort_opt
|
||||||
title_percentage = title.titles.map &.load_percentage username
|
title_percentage = title.titles.map &.load_percentage username
|
||||||
|
|
||||||
layout "title"
|
layout "title"
|
||||||
rescue e
|
rescue e
|
||||||
Logger.error e
|
Logger.error e
|
||||||
|
@ -428,12 +428,21 @@ class Storage
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def mark_unavailable
|
# Mark titles and entries that no longer exist on the file system as
|
||||||
|
# unavailable. By supplying `id_candidates` and `titles_candidates`, it
|
||||||
|
# only checks the existence of the candidate titles/entries to speed up
|
||||||
|
# the process.
|
||||||
|
def mark_unavailable(ids_candidates : Array(String)?,
|
||||||
|
titles_candidates : Array(String)?)
|
||||||
MainFiber.run do
|
MainFiber.run do
|
||||||
get_db do |db|
|
get_db do |db|
|
||||||
# Detect dangling entry IDs
|
# Detect dangling entry IDs
|
||||||
trash_ids = [] of String
|
trash_ids = [] of String
|
||||||
db.query "select path, id from ids where unavailable = 0" do |rs|
|
query = "select path, id from ids where unavailable = 0"
|
||||||
|
unless ids_candidates.nil?
|
||||||
|
query += " and id in (#{ids_candidates.join "," { |i| "'#{i}'" }})"
|
||||||
|
end
|
||||||
|
db.query query do |rs|
|
||||||
rs.each do
|
rs.each do
|
||||||
path = rs.read String
|
path = rs.read String
|
||||||
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
||||||
@ -449,7 +458,11 @@ class Storage
|
|||||||
|
|
||||||
# Detect dangling title IDs
|
# Detect dangling title IDs
|
||||||
trash_titles = [] of String
|
trash_titles = [] of String
|
||||||
db.query "select path, id from titles where unavailable = 0" do |rs|
|
query = "select path, id from titles where unavailable = 0"
|
||||||
|
unless titles_candidates.nil?
|
||||||
|
query += " and id in (#{titles_candidates.join "," { |i| "'#{i}'" }})"
|
||||||
|
end
|
||||||
|
db.query query do |rs|
|
||||||
rs.each do
|
rs.each do
|
||||||
path = rs.read String
|
path = rs.read String
|
||||||
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
fullpath = Path.new(path).expand(Config.current.library_path).to_s
|
||||||
|
@ -48,4 +48,32 @@ class Dir
|
|||||||
end
|
end
|
||||||
Digest::CRC32.checksum(signatures.sort.join).to_u64
|
Digest::CRC32.checksum(signatures.sort.join).to_u64
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Returns the contents signature of the directory at dirname for checking
|
||||||
|
# to rescan.
|
||||||
|
# Rescan conditions:
|
||||||
|
# - When a file added, moved, removed, renamed (including which in nested
|
||||||
|
# directories)
|
||||||
|
def self.contents_signature(dirname, cache = {} of String => String) : String
|
||||||
|
return cache[dirname] if cache[dirname]?
|
||||||
|
Fiber.yield
|
||||||
|
signatures = [] of String
|
||||||
|
self.open dirname do |dir|
|
||||||
|
dir.entries.sort.each do |fn|
|
||||||
|
next if fn.starts_with? "."
|
||||||
|
path = File.join dirname, fn
|
||||||
|
if File.directory? path
|
||||||
|
signatures << Dir.contents_signature path, cache
|
||||||
|
else
|
||||||
|
# Only add its signature value to `signatures` when it is a
|
||||||
|
# supported file
|
||||||
|
signatures << fn if is_supported_file fn
|
||||||
|
end
|
||||||
|
Fiber.yield
|
||||||
|
end
|
||||||
|
end
|
||||||
|
hash = Digest::SHA1.hexdigest(signatures.join)
|
||||||
|
cache[dirname] = hash
|
||||||
|
hash
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -35,6 +35,11 @@ def register_mime_types
|
|||||||
# FontAwesome fonts
|
# FontAwesome fonts
|
||||||
".woff" => "font/woff",
|
".woff" => "font/woff",
|
||||||
".woff2" => "font/woff2",
|
".woff2" => "font/woff2",
|
||||||
|
|
||||||
|
# Supported image formats. JPG, PNG, GIF, WebP, and SVG are already
|
||||||
|
# defiend by Crystal in `MIME.DEFAULT_TYPES`
|
||||||
|
".apng" => "image/apng",
|
||||||
|
".avif" => "image/avif",
|
||||||
}.each do |k, v|
|
}.each do |k, v|
|
||||||
MIME.register k, v
|
MIME.register k, v
|
||||||
end
|
end
|
||||||
@ -120,3 +125,22 @@ class String
|
|||||||
match / s.size
|
match / s.size
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Does the followings:
|
||||||
|
# - turns space-like characters into the normal whitespaces ( )
|
||||||
|
# - strips and collapses spaces
|
||||||
|
# - removes ASCII control characters
|
||||||
|
# - replaces slashes (/) with underscores (_)
|
||||||
|
# - removes leading dots (.)
|
||||||
|
# - removes the following special characters: \:*?"<>|
|
||||||
|
#
|
||||||
|
# If the sanitized string is empty, returns a random string instead.
|
||||||
|
def sanitize_filename(str : String) : String
|
||||||
|
sanitized = str
|
||||||
|
.gsub(/\s+/, " ")
|
||||||
|
.strip
|
||||||
|
.gsub(/\//, "_")
|
||||||
|
.gsub(/^[\.\s]+/, "")
|
||||||
|
.gsub(/[\177\000-\031\\:\*\?\"<>\|]/, "")
|
||||||
|
sanitized.size > 0 ? sanitized : random_str
|
||||||
|
end
|
||||||
|
@ -107,6 +107,26 @@ macro get_sort_opt
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
macro get_and_save_sort_opt(dir)
|
||||||
|
sort_method = env.params.query["sort"]?
|
||||||
|
|
||||||
|
if sort_method
|
||||||
|
is_ascending = true
|
||||||
|
|
||||||
|
ascend = env.params.query["ascend"]?
|
||||||
|
if ascend && ascend.to_i? == 0
|
||||||
|
is_ascending = false
|
||||||
|
end
|
||||||
|
|
||||||
|
sort_opt = SortOptions.new sort_method, is_ascending
|
||||||
|
|
||||||
|
TitleInfo.new {{dir}} do |info|
|
||||||
|
info.sort_by[username] = sort_opt.to_tuple
|
||||||
|
info.save
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
module HTTP
|
module HTTP
|
||||||
class Client
|
class Client
|
||||||
private def self.exec(uri : URI, tls : TLSContext = nil)
|
private def self.exec(uri : URI, tls : TLSContext = nil)
|
||||||
|
@ -32,10 +32,10 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="uk-position-top">
|
<div class="uk-position-top">
|
||||||
<div class="uk-navbar-container uk-navbar-transparent" uk-navbar="uk-navbar">
|
<div class="uk-navbar-container uk-navbar-transparent" uk-navbar="uk-navbar">
|
||||||
<div class="uk-navbar-left uk-hidden@s">
|
<div class="uk-navbar-left uk-hidden@m">
|
||||||
<div class="uk-navbar-toggle" uk-navbar-toggle-icon="uk-navbar-toggle-icon" uk-toggle="target: #mobile-nav"></div>
|
<div class="uk-navbar-toggle" uk-navbar-toggle-icon="uk-navbar-toggle-icon" uk-toggle="target: #mobile-nav"></div>
|
||||||
</div>
|
</div>
|
||||||
<div class="uk-navbar-left uk-visible@s">
|
<div class="uk-navbar-left uk-visible@m">
|
||||||
<a class="uk-navbar-item uk-logo" href="<%= base_url %>"><img src="<%= base_url %>img/icon.png" style="width:90px;height:90px;"></a>
|
<a class="uk-navbar-item uk-logo" href="<%= base_url %>"><img src="<%= base_url %>img/icon.png" style="width:90px;height:90px;"></a>
|
||||||
<ul class="uk-navbar-nav">
|
<ul class="uk-navbar-nav">
|
||||||
<li><a href="<%= base_url %>">Home</a></li>
|
<li><a href="<%= base_url %>">Home</a></li>
|
||||||
@ -57,7 +57,7 @@
|
|||||||
<% end %>
|
<% end %>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
<div class="uk-navbar-right uk-visible@s">
|
<div class="uk-navbar-right uk-visible@m">
|
||||||
<ul class="uk-navbar-nav">
|
<ul class="uk-navbar-nav">
|
||||||
<li><a onclick="toggleTheme()"><i class="fas fa-adjust"></i></a></li>
|
<li><a onclick="toggleTheme()"><i class="fas fa-adjust"></i></a></li>
|
||||||
<li><a href="<%= base_url %>logout">Logout</a></li>
|
<li><a href="<%= base_url %>logout">Logout</a></li>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user