forked from 20xd6/simple_blog_cms
Update publish_blog.py
This commit is contained in:
parent
9cdb918098
commit
de5477b5ec
163
publish_blog.py
163
publish_blog.py
@ -0,0 +1,163 @@
|
|||||||
|
import datetime
|
||||||
|
import html
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
import markdown
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
import find_ini
|
||||||
|
import tagging
|
||||||
|
|
||||||
|
|
||||||
|
def publish_path(article_path):
|
||||||
|
if os.path.exists(article_path):
|
||||||
|
parse_file(article_path)
|
||||||
|
else:
|
||||||
|
print("The given path of {}\nWas not found.".format(os.path.abspath(article_path)))
|
||||||
|
|
||||||
|
|
||||||
|
def publish_parse():
|
||||||
|
md_files = list(Path('.').glob('*.md'))
|
||||||
|
md_article = choose_option(md_files, "file to publish")
|
||||||
|
parse_file(md_article)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_file(article_path):
|
||||||
|
blog_config = find_ini.return_options()
|
||||||
|
web_root = blog_config['general']['web_root']
|
||||||
|
sub_folder = blog_config['general']['sub_folder']
|
||||||
|
print("Parsing {}".format(os.path.abspath(article_path)))
|
||||||
|
title_from_header = find_header(os.path.abspath(article_path))
|
||||||
|
title_from_path = find_filename(os.path.abspath(article_path))
|
||||||
|
final_title = choose_option([title_from_path, title_from_header], "title")
|
||||||
|
print("The selected title is: {0}".format(final_title))
|
||||||
|
date_now = datetime.datetime.now()
|
||||||
|
year = date_now.strftime("%Y")
|
||||||
|
month = date_now.strftime("%m")
|
||||||
|
index_check_path = os.path.join(web_root, sub_folder, 'by_year')
|
||||||
|
path_to_publish = os.path.join(index_check_path, year, month)
|
||||||
|
article_number = next_article_num(path_to_publish)
|
||||||
|
path_to_publish = os.path.join(path_to_publish, article_number + final_title.replace(' ', '_'))
|
||||||
|
print("Publishing to: {0}".format(path_to_publish))
|
||||||
|
os.makedirs(path_to_publish)
|
||||||
|
index_file = os.path.join(blog_config['general']['web_root'], 'common', 'index.php')
|
||||||
|
place_indexes(index_check_path, path_to_publish, index_file)
|
||||||
|
add_datestamp(article_path, os.path.join(path_to_publish, "article.md"))
|
||||||
|
article_imgs = find_local_links(article_path)
|
||||||
|
copy_all_imgs(article_imgs, path_to_publish)
|
||||||
|
menu_path = os.path.join(web_root, "common", "menu.php")
|
||||||
|
update_menu(menu_path, year, month)
|
||||||
|
tagging.tag_article(path_to_publish)
|
||||||
|
|
||||||
|
|
||||||
|
def place_indexes(root_of_index, path_to_index, index_file):
|
||||||
|
while path_to_index != root_of_index:
|
||||||
|
print(path_to_index)
|
||||||
|
if not os.path.exists(os.path.join(path_to_index, "index.php")):
|
||||||
|
os.symlink(index_file, os.path.join(path_to_index, "index.php"))
|
||||||
|
path_to_index = os.path.dirname(path_to_index)
|
||||||
|
|
||||||
|
|
||||||
|
def find_header(article_path):
|
||||||
|
with open(article_path, "r") as article_source:
|
||||||
|
article_text = article_source.read()
|
||||||
|
article_html = markdown.markdown(article_text)
|
||||||
|
soup = BeautifulSoup(article_html, 'html.parser')
|
||||||
|
return soup.h1.string
|
||||||
|
|
||||||
|
|
||||||
|
def find_local_links(article_path):
|
||||||
|
article_imgs = []
|
||||||
|
with open(article_path, "r") as article_source:
|
||||||
|
article_text = article_source.read()
|
||||||
|
article_html = markdown.markdown(article_text)
|
||||||
|
soup = BeautifulSoup(article_html, 'html.parser')
|
||||||
|
for local_link in soup.find_all('a'):
|
||||||
|
escaped_link = html.unescape(local_link.get('href'))
|
||||||
|
if is_local(escaped_link):
|
||||||
|
print(escaped_link)
|
||||||
|
article_imgs.append(escaped_link)
|
||||||
|
for imgs in soup.find_all('img'):
|
||||||
|
print(imgs.get('src'))
|
||||||
|
article_imgs.append(imgs.get('src'))
|
||||||
|
return article_imgs
|
||||||
|
|
||||||
|
|
||||||
|
def is_local(file_href):
|
||||||
|
remote_extensions = ["http://", "https://", "ftp://", "tel:", "mailto:"]
|
||||||
|
for rx in remote_extensions:
|
||||||
|
if file_href.startswith(rx):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def add_datestamp(article_path, output_path):
|
||||||
|
publish_time = "Published: " + datetime.datetime.now().strftime("%Y/%m/%d")
|
||||||
|
with open(article_path, "r") as file_line:
|
||||||
|
file_contents = file_line.readlines()
|
||||||
|
file_contents.insert(2, "<p id=\"date\">" + publish_time + "</p>\n")
|
||||||
|
with open(output_path, "w") as file_lines:
|
||||||
|
file_contents = "".join(file_contents)
|
||||||
|
file_lines.write(file_contents)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_all_imgs(article_imgs, path_to_publish):
|
||||||
|
for img in article_imgs:
|
||||||
|
img_src = os.path.join('.', img)
|
||||||
|
img_dest = os.path.join(path_to_publish, img)
|
||||||
|
if not os.path.exists(os.path.dirname(img_dest)):
|
||||||
|
os.makedirs(os.path.dirname(img_dest))
|
||||||
|
shutil.copyfile(img_src, img_dest)
|
||||||
|
|
||||||
|
|
||||||
|
def find_filename(article_path):
|
||||||
|
base_name = os.path.basename(article_path)
|
||||||
|
return os.path.splitext(base_name)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def next_article_num(source_dir):
|
||||||
|
source_path = Path(source_dir)
|
||||||
|
number_of_dirs = 0
|
||||||
|
for file in source_path.glob('*'):
|
||||||
|
if file.is_dir():
|
||||||
|
number_of_dirs += 1
|
||||||
|
next_num = str(number_of_dirs + 1)
|
||||||
|
return next_num.zfill(2) + "_"
|
||||||
|
|
||||||
|
|
||||||
|
def update_menu(menu_path, year, month):
|
||||||
|
print(menu_path)
|
||||||
|
with open(menu_path, "r") as menu_source:
|
||||||
|
menu_text = menu_source.read()
|
||||||
|
soup = BeautifulSoup(menu_text, 'html.parser')
|
||||||
|
list_items = soup.find_all(id="blog")
|
||||||
|
last_post = soup.find_all(id="current_year")
|
||||||
|
# current_date = soup.find_all(attrs={"id": "date"})
|
||||||
|
item_count = 0
|
||||||
|
for items in last_post:
|
||||||
|
item_count += 1
|
||||||
|
print(item_count)
|
||||||
|
print(type(items))
|
||||||
|
print(items.attrs)
|
||||||
|
if "date=" in items:
|
||||||
|
print(items)
|
||||||
|
# print(list_items)
|
||||||
|
# print(last_post)
|
||||||
|
# print(current_date)
|
||||||
|
|
||||||
|
|
||||||
|
def choose_option(options, selecting):
|
||||||
|
count = 1
|
||||||
|
for option in options:
|
||||||
|
print("{0}. {1}".format(count, option))
|
||||||
|
count += 1
|
||||||
|
else:
|
||||||
|
print("q. exit")
|
||||||
|
option_selection = input("Select a " + selecting + ": ")
|
||||||
|
if option_selection.lower() == "q":
|
||||||
|
sys.exit()
|
||||||
|
else:
|
||||||
|
option_selection = int(option_selection)
|
||||||
|
return options[option_selection - 1]
|
Loading…
x
Reference in New Issue
Block a user