mirror of
https://github.com/koreader/koreader.git
synced 2025-12-13 20:36:53 +01:00
Use new Archiver module where applicable (#13782)
- ArchiveViewer: instead of using unzip and to support more formats (only CBR & RAR for now) - NewsDownloader & Wikipedia plugins: instead of using `ZipWriter` - device: re-implement `Device:unpackArchive` and get rid of device specific `untar` implementations - readerui: instead of using unzip for document provider detection
This commit is contained in:
2
base
2
base
Submodule base updated: 3b5dcdce5d...60145efe0d
@@ -4,6 +4,7 @@ ReaderUI is an abstraction for a reader interface.
|
||||
It works using data gathered from a document interface.
|
||||
]]--
|
||||
|
||||
local Archiver = require("ffi/archiver")
|
||||
local BD = require("ui/bidi")
|
||||
local BookList = require("ui/widget/booklist")
|
||||
local Device = require("device")
|
||||
@@ -622,18 +623,16 @@ function ReaderUI:extendProvider(file, provider, is_provider_forced)
|
||||
-- or on the original file double extension ("fb2.zip" etc).
|
||||
local _, file_type = filemanagerutil.splitFileNameType(file) -- supports double-extension
|
||||
if file_type == "zip" then
|
||||
-- read the content of zip-file and get extension of the 1st file
|
||||
local std_out = io.popen("unzip -qql \"" .. file .. "\"")
|
||||
if std_out then
|
||||
local size, ext
|
||||
for line in std_out:lines() do
|
||||
size, ext = string.match(line, "%s+(%d+)%s+.+%.([^.]+)")
|
||||
if size and ext then break end
|
||||
end
|
||||
std_out:close()
|
||||
if ext ~= nil then
|
||||
file_type = ext:lower()
|
||||
local arc = Archiver.Reader:new()
|
||||
if arc:open(file) then
|
||||
for entry in arc:iterate() do
|
||||
local ext = util.getFileNameSuffix(entry.path)
|
||||
if ext and entry.mode == "file" and entry.size > 0 then
|
||||
file_type = ext:lower()
|
||||
break
|
||||
end
|
||||
end
|
||||
arc:close()
|
||||
end
|
||||
if not is_provider_forced then
|
||||
local providers = DocumentRegistry:getProviders("dummy." .. file_type)
|
||||
|
||||
@@ -544,10 +544,6 @@ function Device:_showLightDialog()
|
||||
end
|
||||
end
|
||||
|
||||
function Device:untar(archive, extract_to)
|
||||
return android.untar(archive, extract_to)
|
||||
end
|
||||
|
||||
function Device:download(link, name, ok_text)
|
||||
local ConfirmBox = require("ui/widget/confirmbox")
|
||||
local InfoMessage = require("ui/widget/infomessage")
|
||||
|
||||
@@ -4,6 +4,7 @@ Generic device abstraction.
|
||||
This module defines stubs for common methods.
|
||||
--]]
|
||||
|
||||
local Archiver = require("ffi/archiver")
|
||||
local DataStorage = require("datastorage")
|
||||
local Event = require("ui/event")
|
||||
local Geom = require("ui/geometry")
|
||||
@@ -1024,26 +1025,36 @@ end
|
||||
function Device:unpackArchive(archive, extract_to, with_stripped_root)
|
||||
require("dbg").dassert(type(archive) == "string")
|
||||
local BD = require("ui/bidi")
|
||||
local ok
|
||||
if archive:match("%.tar%.bz2$") or archive:match("%.tar%.gz$") or archive:match("%.tar%.lz$") or archive:match("%.tgz$") then
|
||||
ok = self:untar(archive, extract_to, with_stripped_root)
|
||||
else
|
||||
return false, T(_("Couldn't extract archive:\n\n%1\n\nUnrecognized filename extension."), BD.filepath(archive))
|
||||
local arc = Archiver.Reader:new()
|
||||
local ok = arc:open(archive)
|
||||
if ok then
|
||||
for entry in arc:iterate() do
|
||||
local dest_path = entry.path
|
||||
if with_stripped_root then
|
||||
local __, tail = dest_path:match("([^/]*)/*(.*)")
|
||||
if tail then
|
||||
-- Non-root: strip one level.
|
||||
dest_path = tail
|
||||
elseif entry.mode == 'directory' then
|
||||
-- Root directory: ignore.
|
||||
goto continue
|
||||
else -- luacheck: ignore 542
|
||||
-- Root non-directory: don't strip.
|
||||
end
|
||||
end
|
||||
if not arc:extractToPath(entry.path, extract_to.."/"..dest_path) then
|
||||
break
|
||||
end
|
||||
::continue::
|
||||
end
|
||||
ok = not arc.err
|
||||
end
|
||||
if not ok then
|
||||
return false, T(_("Extracting archive failed:\n\n%1"), BD.filepath(archive))
|
||||
return false, T(_("Extracting archive failed:\n\n%1"), BD.filepath(archive))..string.format("\n\n(%s)", arc.err)
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
function Device:untar(archive, extract_to, with_stripped_root)
|
||||
local cmd = "./tar xf %q -C %q"
|
||||
if with_stripped_root then
|
||||
cmd = cmd .. " --strip-components=1"
|
||||
end
|
||||
return os.execute(cmd:format(archive, extract_to))
|
||||
end
|
||||
|
||||
-- Update our UIManager reference once it's ready
|
||||
function Device:_UIManagerReady(uimgr)
|
||||
-- Our own ref
|
||||
|
||||
@@ -776,11 +776,6 @@ function Kindle:readyToSuspend(delay)
|
||||
self.suspend_time = time.boottime_or_realtime_coarse()
|
||||
end
|
||||
|
||||
-- We add --no-same-permissions --no-same-owner to make the userstore fuse proxy happy...
|
||||
function Kindle:untar(archive, extract_to)
|
||||
return os.execute(("./tar --no-same-permissions --no-same-owner -xf %q -C %q"):format(archive, extract_to))
|
||||
end
|
||||
|
||||
function Kindle:UIManagerReady(uimgr)
|
||||
UIManager = uimgr
|
||||
end
|
||||
|
||||
@@ -836,28 +836,31 @@ function Wikipedia:createEpub(epub_path, page, lang, with_images)
|
||||
-- Open the zip file (with .tmp for now, as crengine may still
|
||||
-- have a handle to the final epub_path, and we don't want to
|
||||
-- delete a good one if we fail/cancel later)
|
||||
local Archiver = require("ffi/archiver")
|
||||
local epub = Archiver.Writer:new{}
|
||||
local epub_path_tmp = epub_path .. ".tmp"
|
||||
local ZipWriter = require("ffi/zipwriter")
|
||||
local epub = ZipWriter:new{}
|
||||
if not epub:open(epub_path_tmp) then
|
||||
if not epub:open(epub_path_tmp, "epub") then
|
||||
return false
|
||||
end
|
||||
|
||||
-- We now create and add all the required epub files
|
||||
local mtime = os.time()
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- /mimetype : always "application/epub+zip"
|
||||
epub:add("mimetype", "application/epub+zip", true)
|
||||
epub:setZipCompression("store")
|
||||
epub:addFileFromMemory("mimetype", "application/epub+zip", mtime)
|
||||
epub:setZipCompression("deflate")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- /META-INF/container.xml : always the same content
|
||||
epub:add("META-INF/container.xml", [[
|
||||
epub:addFileFromMemory("META-INF/container.xml", [[
|
||||
<?xml version="1.0"?>
|
||||
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
|
||||
<rootfiles>
|
||||
<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>
|
||||
</rootfiles>
|
||||
</container>]])
|
||||
</container>]], mtime)
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- OEBPS/content.opf : metadata + list of other files (paths relative to OEBPS/ directory)
|
||||
@@ -916,14 +919,14 @@ function Wikipedia:createEpub(epub_path, page, lang, with_images)
|
||||
</spine>
|
||||
</package>
|
||||
]])
|
||||
epub:add("OEBPS/content.opf", table.concat(content_opf_parts))
|
||||
epub:addFileFromMemory("OEBPS/content.opf", table.concat(content_opf_parts), mtime)
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- OEBPS/stylesheet.css
|
||||
-- crengine will use its own data/epub.css, we just add/fix a few styles
|
||||
-- to look more alike wikipedia web pages (that the user can ignore
|
||||
-- with "Embedded Style" off)
|
||||
epub:add("OEBPS/stylesheet.css", [[
|
||||
epub:addFileFromMemory("OEBPS/stylesheet.css", [[
|
||||
/* Generic styling picked from our epub.css (see it for comments),
|
||||
to give this epub a book look even if used with html5.css */
|
||||
body {
|
||||
@@ -1270,7 +1273,7 @@ abbr.abbr {
|
||||
display: none;
|
||||
}
|
||||
/* hiding .noprint may discard some interesting links */
|
||||
]])
|
||||
]], mtime)
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- OEBPS/toc.ncx : table of content
|
||||
@@ -1351,7 +1354,7 @@ abbr.abbr {
|
||||
</navMap>
|
||||
</ncx>
|
||||
]])
|
||||
epub:add("OEBPS/toc.ncx", table.concat(toc_ncx_parts))
|
||||
epub:addFileFromMemory("OEBPS/toc.ncx", table.concat(toc_ncx_parts), mtime)
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- HTML table of content
|
||||
@@ -1479,7 +1482,7 @@ abbr.abbr {
|
||||
if self:isWikipediaLanguageRTL(lang) then
|
||||
html_dir = ' dir="rtl"'
|
||||
end
|
||||
epub:add("OEBPS/content.html", string.format([[
|
||||
epub:addFileFromMemory("OEBPS/content.html", string.format([[
|
||||
<html xmlns="http://www.w3.org/1999/xhtml"%s>
|
||||
<head>
|
||||
<title>%s</title>
|
||||
@@ -1493,7 +1496,7 @@ abbr.abbr {
|
||||
%s
|
||||
</body>
|
||||
</html>
|
||||
]], html_dir, page_cleaned, page_htmltitle, lang:upper(), saved_on, see_online_version, html))
|
||||
]], html_dir, page_cleaned, page_htmltitle, lang:upper(), saved_on, see_online_version, html), mtime)
|
||||
|
||||
-- Force a GC to free the memory we used till now (the second call may
|
||||
-- help reclaim more memory).
|
||||
@@ -1535,11 +1538,11 @@ abbr.abbr {
|
||||
end
|
||||
if success then
|
||||
-- Images do not need to be compressed, so spare some cpu cycles
|
||||
local no_compression = true
|
||||
if img.mimetype == "image/svg+xml" then -- except for SVG images (which are XML text)
|
||||
no_compression = false
|
||||
if img.mimetype ~= "image/svg+xml" then -- except for SVG images (which are XML text)
|
||||
epub:setZipCompression("store")
|
||||
end
|
||||
epub:add("OEBPS/"..img.imgpath, content, no_compression)
|
||||
epub:addFileFromMemory("OEBPS/"..img.imgpath, content, mtime)
|
||||
epub:setZipCompression("deflate")
|
||||
else
|
||||
go_on = UI:confirm(T(_("Downloading image %1 failed. Continue anyway?"), inum), _("Stop"), _("Continue"))
|
||||
if not go_on then
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
local Archiver = require("ffi/archiver")
|
||||
local BD = require("ui/bidi")
|
||||
local ButtonDialog = require("ui/widget/buttondialog")
|
||||
local DocumentRegistry = require("document/documentregistry")
|
||||
@@ -15,28 +16,22 @@ local T = ffiUtil.template
|
||||
local ArchiveViewer = WidgetContainer:extend{
|
||||
name = "archiveviewer",
|
||||
fullname = _("Archive viewer"),
|
||||
arc_file = nil, -- archive
|
||||
arc = nil, -- archive
|
||||
-- list_table is a flat table containing archive files and folders
|
||||
-- key - a full path of the folder ("/" for root), for all folders and subfolders of any level
|
||||
-- value - a subtable of subfolders and files in the folder
|
||||
-- subtable key - a name of a subfolder ending with /, or a name of a file (without path)
|
||||
-- subtable value - false for subfolders, or file size (string)
|
||||
list_table = nil,
|
||||
arc_type = nil,
|
||||
arc_ext = {
|
||||
cbz = true,
|
||||
epub = true,
|
||||
zip = true,
|
||||
},
|
||||
}
|
||||
|
||||
local ZIP_LIST = "unzip -qql \"%1\""
|
||||
local ZIP_EXTRACT_CONTENT = "unzip -qqp \"%1\" \"%2\""
|
||||
local ZIP_EXTRACT_FILE = "unzip -qqo \"%1\" \"%2\" -d \"%3\"" -- overwrite
|
||||
|
||||
local function getSuffix(file)
|
||||
return util.getFileNameSuffix(file):lower()
|
||||
end
|
||||
local SUPPORTED_EXTENSIONS = {
|
||||
cbr = true,
|
||||
cbz = true,
|
||||
epub = true,
|
||||
rar = true,
|
||||
zip = true,
|
||||
}
|
||||
|
||||
function ArchiveViewer:init()
|
||||
self:registerDocumentRegistryAuxProvider()
|
||||
@@ -53,23 +48,18 @@ function ArchiveViewer:registerDocumentRegistryAuxProvider()
|
||||
end
|
||||
|
||||
function ArchiveViewer:isFileTypeSupported(file)
|
||||
return self.arc_ext[getSuffix(file)] and true or false
|
||||
return SUPPORTED_EXTENSIONS[util.getFileNameSuffix(file):lower()] ~= nil
|
||||
end
|
||||
|
||||
function ArchiveViewer:openFile(file)
|
||||
local _, filename = util.splitFilePathName(file)
|
||||
local fileext = getSuffix(filename)
|
||||
if fileext == "cbz" or fileext == "epub" or fileext == "zip" then
|
||||
self.arc_type = "zip"
|
||||
end
|
||||
self.arc_file = file
|
||||
|
||||
self.arc = Archiver.Reader:new()
|
||||
self.fm_updated = nil
|
||||
self.list_table = {}
|
||||
if self.arc_type == "zip" then
|
||||
self:getZipListTable()
|
||||
else -- add other archivers here
|
||||
return
|
||||
|
||||
if self.arc:open(file) then
|
||||
self:getListTable()
|
||||
end
|
||||
|
||||
self.menu = Menu:new{
|
||||
@@ -97,7 +87,7 @@ function ArchiveViewer:openFile(file)
|
||||
UIManager:show(self.menu)
|
||||
end
|
||||
|
||||
function ArchiveViewer:getZipListTable()
|
||||
function ArchiveViewer:getListTable()
|
||||
local function parse_path(filepath, filesize)
|
||||
if not filepath then return end
|
||||
local path, name = util.splitFilePathName(filepath)
|
||||
@@ -119,14 +109,10 @@ function ArchiveViewer:getZipListTable()
|
||||
end
|
||||
end
|
||||
|
||||
local std_out = io.popen(T(ZIP_LIST, self.arc_file))
|
||||
if std_out then
|
||||
for line in std_out:lines() do
|
||||
-- entry datetime not used so far
|
||||
local fsize, fname = string.match(line, "%s+(%d+)%s+[-0-9]+%s+[0-9:]+%s+(.+)")
|
||||
parse_path(fname, fsize or 0)
|
||||
for entry in self.arc:iterate() do
|
||||
if entry.mode == "file" then
|
||||
parse_path(entry.path, entry.size)
|
||||
end
|
||||
std_out:close()
|
||||
end
|
||||
end
|
||||
|
||||
@@ -266,29 +252,12 @@ function ArchiveViewer:viewFile(filepath)
|
||||
end
|
||||
|
||||
function ArchiveViewer:extractFile(filepath)
|
||||
if self.arc_type == "zip" then
|
||||
local std_out = io.popen(T(ZIP_EXTRACT_FILE, self.arc_file, filepath, util.splitFilePathName(self.arc_file)))
|
||||
if std_out then
|
||||
std_out:close()
|
||||
end
|
||||
else
|
||||
return
|
||||
end
|
||||
self.fm_updated = true
|
||||
local directory = util.splitFilePathName(self.arc.filepath)
|
||||
self.fm_updated = self.arc:extractToPath(filepath, directory .. filepath)
|
||||
end
|
||||
|
||||
function ArchiveViewer:extractContent(filepath)
|
||||
local content
|
||||
if self.arc_type == "zip" then
|
||||
local std_out = io.popen(T(ZIP_EXTRACT_CONTENT, self.arc_file, filepath))
|
||||
if std_out then
|
||||
content = std_out:read("*all")
|
||||
std_out:close()
|
||||
return content
|
||||
end
|
||||
else
|
||||
return
|
||||
end
|
||||
return self.arc:extractToMemory(filepath)
|
||||
end
|
||||
|
||||
return ArchiveViewer
|
||||
|
||||
@@ -442,29 +442,32 @@ function EpubDownloadBackend:createEpub(epub_path, html, url, include_images, me
|
||||
-- Open the zip file (with .tmp for now, as crengine may still
|
||||
-- have a handle to the final epub_path, and we don't want to
|
||||
-- delete a good one if we fail/cancel later)
|
||||
local Archiver = require("ffi/archiver")
|
||||
local epub = Archiver.Writer:new{}
|
||||
local epub_path_tmp = epub_path .. ".tmp"
|
||||
local ZipWriter = require("ffi/zipwriter")
|
||||
local epub = ZipWriter:new{}
|
||||
if not epub:open(epub_path_tmp) then
|
||||
if not epub:open(epub_path_tmp, "epub") then
|
||||
logger.dbg("Failed to open epub_path_tmp")
|
||||
return false
|
||||
end
|
||||
|
||||
-- We now create and add all the required epub files
|
||||
local mtime = os.time()
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- /mimetype : always "application/epub+zip"
|
||||
epub:add("mimetype", "application/epub+zip", true)
|
||||
epub:setZipCompression("store")
|
||||
epub:addFileFromMemory("mimetype", "application/epub+zip", mtime)
|
||||
epub:setZipCompression("deflate")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- /META-INF/container.xml : always the same content
|
||||
epub:add("META-INF/container.xml", [[
|
||||
epub:addFileFromMemory("META-INF/container.xml", [[
|
||||
<?xml version="1.0"?>
|
||||
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
|
||||
<rootfiles>
|
||||
<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>
|
||||
</rootfiles>
|
||||
</container>]])
|
||||
</container>]], mtime)
|
||||
logger.dbg("Added META-INF/container.xml")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
@@ -517,7 +520,7 @@ function EpubDownloadBackend:createEpub(epub_path, html, url, include_images, me
|
||||
</spine>
|
||||
</package>
|
||||
]])
|
||||
epub:add("OEBPS/content.opf", table.concat(content_opf_parts))
|
||||
epub:addFileFromMemory("OEBPS/content.opf", table.concat(content_opf_parts), mtime)
|
||||
logger.dbg("Added OEBPS/content.opf")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
@@ -525,9 +528,9 @@ function EpubDownloadBackend:createEpub(epub_path, html, url, include_images, me
|
||||
--- @todo We told it we'd include a stylesheet.css, so it's probably best
|
||||
-- that we do. In theory, we could try to fetch any *.css files linked in
|
||||
-- the main html.
|
||||
epub:add("OEBPS/stylesheet.css", [[
|
||||
epub:addFileFromMemory("OEBPS/stylesheet.css", [[
|
||||
/* Empty */
|
||||
]])
|
||||
]], mtime)
|
||||
logger.dbg("Added OEBPS/stylesheet.css")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
@@ -567,12 +570,12 @@ function EpubDownloadBackend:createEpub(epub_path, html, url, include_images, me
|
||||
</navMap>
|
||||
</ncx>
|
||||
]])
|
||||
epub:add("OEBPS/toc.ncx", table.concat(toc_ncx_parts))
|
||||
epub:addFileFromMemory("OEBPS/toc.ncx", table.concat(toc_ncx_parts), mtime)
|
||||
logger.dbg("Added OEBPS/toc.ncx")
|
||||
|
||||
-- ----------------------------------------------------------------
|
||||
-- OEBPS/content.html
|
||||
epub:add("OEBPS/content.html", html)
|
||||
epub:addFileFromMemory("OEBPS/content.html", html, mtime)
|
||||
logger.dbg("Added OEBPS/content.html")
|
||||
|
||||
-- Force a GC to free the memory we used till now (the second call may
|
||||
@@ -619,7 +622,7 @@ function EpubDownloadBackend:createEpub(epub_path, html, url, include_images, me
|
||||
if img.mimetype == "image/svg+xml" then -- except for SVG images (which are XML text)
|
||||
no_compression = false
|
||||
end
|
||||
epub:add("OEBPS/"..img.imgpath, content, no_compression)
|
||||
epub:addFileFromMemory("OEBPS/"..img.imgpath, content, no_compression, mtime)
|
||||
else
|
||||
go_on = UI:confirm(T(_("Downloading image %1 failed. Continue anyway?"), inum), _("Stop"), _("Continue"))
|
||||
if not go_on then
|
||||
|
||||
Reference in New Issue
Block a user