mirror of
https://github.com/TangentFoxy/.lua-files.git
synced 2024-11-20 21:34:23 +00:00
fix #31
This commit is contained in:
parent
063437b596
commit
217c39f53f
24
README.md
24
README.md
@ -18,6 +18,30 @@ Personally convenient Lua scripts to add to my path.
|
|||||||
### make-epub.lua
|
### make-epub.lua
|
||||||
This script is only intended for personal use. Do not use it to infringe on copyright.
|
This script is only intended for personal use. Do not use it to infringe on copyright.
|
||||||
|
|
||||||
|
```
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
make-epub.lua <config (JSON file)> [action]
|
||||||
|
|
||||||
|
If "." is used instead of a JSON file, every JSON file in the current directory
|
||||||
|
will be used to make multiple ebooks back-to-back.
|
||||||
|
|
||||||
|
[action]: If not specified, all steps will be taken in order (except cleanall).
|
||||||
|
download: All pages will be downloaded to their own HTML files.
|
||||||
|
convert: Each page is converted to Markdown.
|
||||||
|
concat: A file is created for each section out of its pages.
|
||||||
|
markdown: Metadata frontmatter and Markdown section files will be
|
||||||
|
concatenated into a single Markdown file.
|
||||||
|
epub: Markdown file will be converted to an ePub using pandoc.
|
||||||
|
cleanpage: All page files will be deleted, along with their extra
|
||||||
|
directories.
|
||||||
|
cleanall: Deletes everything except the config file and ePub.
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- Lua libraries: htmlparser, dkjson (or compatible)
|
||||||
|
- Binaries: pandoc, curl
|
||||||
|
```
|
||||||
|
|
||||||
The JSON config spec has two major variations ("Book" and "Anthology").
|
The JSON config spec has two major variations ("Book" and "Anthology").
|
||||||
|
|
||||||
The following is shared:
|
The following is shared:
|
||||||
|
@ -4,16 +4,19 @@ local help = [[Usage:
|
|||||||
|
|
||||||
make-epub.lua <config (JSON file)> [action]
|
make-epub.lua <config (JSON file)> [action]
|
||||||
|
|
||||||
[action]: If not specified, all steps will be taken in order (except clean*).
|
If "." is used instead of a JSON file, every JSON file in the current directory
|
||||||
|
will be used to make multiple ebooks back-to-back.
|
||||||
|
|
||||||
|
[action]: If not specified, all steps will be taken in order (except cleanall).
|
||||||
download: All pages will be downloaded to their own HTML files.
|
download: All pages will be downloaded to their own HTML files.
|
||||||
convert: Each page is converted to Markdown.
|
convert: Each page is converted to Markdown.
|
||||||
concat: A file is created for each section out of its pages.
|
concat: A file is created for each section out of its pages.
|
||||||
markdown: Metadata frontmatter and Markdown section files will be
|
markdown: Metadata frontmatter and Markdown section files will be
|
||||||
concatenated into a single Markdown file.
|
concatenated into a single Markdown file.
|
||||||
epub: Markdown file will be converted to an ePub using pandoc.
|
epub: Markdown file will be converted to an ePub using pandoc.
|
||||||
cleanhtml: All HTML files will be deleted, along with their extra
|
cleanpage: All page files will be deleted, along with their extra
|
||||||
directories.
|
directories.
|
||||||
cleanall: Deletes everything except the ePub.
|
cleanall: Deletes everything except the config file and ePub.
|
||||||
|
|
||||||
Requirements:
|
Requirements:
|
||||||
- Lua libraries: htmlparser, dkjson (or compatible)
|
- Lua libraries: htmlparser, dkjson (or compatible)
|
||||||
@ -269,33 +272,20 @@ local function make_epub(config)
|
|||||||
os.execute("pandoc --from markdown --to epub " .. (base_file_name .. ".md"):enquote() .. " -o " .. (output_dir .. path_separator .. base_file_name .. ".epub"):enquote() .. " --toc=true")
|
os.execute("pandoc --from markdown --to epub " .. (base_file_name .. ".md"):enquote() .. " -o " .. (output_dir .. path_separator .. base_file_name .. ".epub"):enquote() .. " --toc=true")
|
||||||
end
|
end
|
||||||
|
|
||||||
local function rm_html_files(config)
|
local function rm_page_files(config)
|
||||||
local working_dir = get_base_file_name(config)
|
local working_dir = get_base_file_name(config)
|
||||||
os.execute("sleep 1") -- attempt to fix #14
|
os.execute("sleep 1") -- attempt to fix #14
|
||||||
|
|
||||||
for section = config.sections.start, config.sections.finish do
|
for section = config.sections.start, config.sections.finish do
|
||||||
local section_dir = working_dir .. path_separator .. tostring(section)
|
local section_dir = working_dir .. path_separator .. tostring(section)
|
||||||
os.execute("rm " .. (section_dir .. ".html"):enquote())
|
os.execute(utility.recursive_remove_command .. section_dir:enquote())
|
||||||
|
|
||||||
for page = 1, config.page_counts[section - (config.sections.start - 1)] do
|
|
||||||
os.execute("rm " .. (section_dir .. path_separator .. page .. ".html"):enquote())
|
|
||||||
end
|
|
||||||
|
|
||||||
os.execute("rmdir " .. section_dir:enquote()) -- NOTE this is no longer possible due to Markdown versions of each page existing
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
local function rm_all(config)
|
local function rm_all(config)
|
||||||
-- TODO use structure of rm_html_files because there's a Markdown file for every HTML file now..
|
|
||||||
local working_dir = get_base_file_name(config)
|
local working_dir = get_base_file_name(config)
|
||||||
rm_html_files(config)
|
|
||||||
|
|
||||||
for section = config.sections.start, config.sections.finish do
|
os.execute(utility.recursive_remove_command .. working_dir:enquote())
|
||||||
local section_file_name = working_dir .. path_separator .. tostring(section) .. ".md"
|
|
||||||
os.execute("rm " .. section_file_name:enquote())
|
|
||||||
end
|
|
||||||
|
|
||||||
os.execute("rmdir " .. working_dir:enquote())
|
|
||||||
os.execute("rm " .. (get_base_file_name(config) .. ".md"):enquote())
|
os.execute("rm " .. (get_base_file_name(config) .. ".md"):enquote())
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -327,7 +317,7 @@ local function main(arguments)
|
|||||||
concat = concatenate_pages,
|
concat = concatenate_pages,
|
||||||
markdown = write_markdown_file,
|
markdown = write_markdown_file,
|
||||||
epub = make_epub,
|
epub = make_epub,
|
||||||
cleanhtml = rm_html_files,
|
cleanpage = rm_page_files,
|
||||||
cleanall = rm_all,
|
cleanall = rm_all,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -345,12 +335,12 @@ local function main(arguments)
|
|||||||
convert_pages(config)
|
convert_pages(config)
|
||||||
print("\nConcatenating pages...\n")
|
print("\nConcatenating pages...\n")
|
||||||
concatenate_pages(config)
|
concatenate_pages(config)
|
||||||
|
print("\nRemoving page files...\n")
|
||||||
|
rm_page_files(config)
|
||||||
print("\nWriting Markdown file...\n")
|
print("\nWriting Markdown file...\n")
|
||||||
write_markdown_file(config)
|
write_markdown_file(config)
|
||||||
print("\nMaking ePub...\n")
|
print("\nMaking ePub...\n")
|
||||||
make_epub(config)
|
make_epub(config)
|
||||||
-- print("\nRemoving HTML files...\n")
|
|
||||||
-- rm_html_files(config)
|
|
||||||
print("\nDone!\n")
|
print("\nDone!\n")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -17,9 +17,11 @@ local utility = {}
|
|||||||
if package.config:sub(1, 1) == "\\" then
|
if package.config:sub(1, 1) == "\\" then
|
||||||
utility.OS = "Windows"
|
utility.OS = "Windows"
|
||||||
utility.path_separator = "\\"
|
utility.path_separator = "\\"
|
||||||
|
utility.recursive_remove_command = "rmdir /s "
|
||||||
else
|
else
|
||||||
utility.OS = "UNIX-like"
|
utility.OS = "UNIX-like"
|
||||||
utility.path_separator = "/"
|
utility.path_separator = "/"
|
||||||
|
utility.recursive_remove_command = "rm -r "
|
||||||
end
|
end
|
||||||
|
|
||||||
utility.path = arg[0]:match("@?(.*/)") or arg[0]:match("@?(.*\\)") -- inspired by discussion in https://stackoverflow.com/q/6380820
|
utility.path = arg[0]:match("@?(.*/)") or arg[0]:match("@?(.*\\)") -- inspired by discussion in https://stackoverflow.com/q/6380820
|
||||||
|
Loading…
Reference in New Issue
Block a user