Commit 5b9b4d2f authored by Karel Koci's avatar Karel Koci 🤘

transaction: instead of data use files on fs

This replaces problematic data pass in Lua with files saved on disk.
Original problem is that Lua in some cases like closures duplicates
environment and that means also all data. If this happens few times
(from measurements it seems like two or three times in code segment we
are dropping in this) then we can have few times bigger memory
requirements.

This also temporally disables test test-sys-pkgupdate-plan-unapproved as
it does not expect additional files. This is going to be solved in
following commit.
parent d859ed55
......@@ -25,17 +25,20 @@ local type = type
local setmetatable = setmetatable
local getmetatable = getmetatable
local assert = assert
local pcall = pcall
local table = table
local string = string
local math = math
local io = io
local unpack = unpack
local mkdir = mkdir
local stat = stat
local events_wait = events_wait
local run_util = run_util
module "utils"
-- luacheck: globals lines2set map set2arr arr2set cleanup_dirs read_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay randstr arr_prune arr_inv file_exists
-- luacheck: globals lines2set map set2arr arr2set cleanup_dirs dir_ensure mkdirp read_file write_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay randstr arr_prune arr_inv file_exists
--[[
Convert provided text into set of lines. Doesn't care about the order.
......@@ -127,6 +130,56 @@ function read_file(filename)
return content
end
--[[
Write data to given file.
]]
function write_file(filename, data)
mkdirp(filename:gsub('/[^/]*$', '/')) -- note: file name is stripped
local f, err = io.open(filename, "w")
if not f then
return nil, err
end
f:write(data)
f:close()
end
--[[
Create directory on given path.
This function does not fail if directory is already there.
It returns true if directory is there (was created or existed). On the other hand
it returns false if there is some other path that is not directory.
]]
function dir_ensure(dir)
-- Try creating it.
local ok, err = pcall(function () mkdir(dir) end)
if not ok then
-- It may have failed because it already exists, check it
local tp = stat(dir)
if not tp then
-- It does not create, so creation failed for some reason
error(err)
elseif tp ~= "d" then
-- It failed because there is some file
return false
end
-- else ‒ there's the given directory, so it failed because it pre-existed. That's OK.
end
return true
end
--[[
Create directory on given path while all parent directories are created as well.
This does not fail if directory already exists.
]]
function mkdirp(dir)
if stat(dir) == "d" then return end -- quick exit
local created = "/"
for segment in dir:gmatch("([^/]+)") do
created = created .. segment .. "/"
dir_ensure(created)
end
end
--[[
Make a deep copy of passed data. This does not work on userdata, on functions
(which might have some local upvalues) and metatables (it doesn't fail, it just
......
......@@ -25,13 +25,14 @@ local DIE = DIE
module "syscnf"
-- Variables accessed from outside of this module
-- luacheck: globals root_dir status_file info_dir pkg_temp_dir dir_opkg_collided target_model target_board
-- luacheck: globals root_dir status_file info_dir pkg_download_dir pkg_unpacked_dir dir_opkg_collided target_model target_board
-- Functions that we want to access from outside of this module
-- luacheck: globals set_root_dir set_target
local status_file_suffix = "usr/lib/opkg/status"
local info_dir_suffix = "usr/lib/opkg/info/"
local pkg_temp_dir_suffix = "usr/share/updater/unpacked/"
local pkg_unpacked_dir_suffix = "usr/share/updater/unpacked/"
local pkg_download_dir_suffix = "usr/share/updater/download/"
local dir_opkg_collided_suffix = "usr/share/updater/collided/"
--[[
......@@ -68,8 +69,10 @@ function set_root_dir(dir)
status_file = dir .. status_file_suffix
-- The directory where unpacked control files of the packages live
info_dir = dir .. info_dir_suffix
-- A directory to which we download packages
pkg_download_dir = dir .. pkg_download_dir_suffix
-- A directory where unpacked packages live
pkg_temp_dir = dir .. pkg_temp_dir_suffix
pkg_unpacked_dir = dir .. pkg_unpacked_dir_suffix
-- Directory where we move files and directories that weren't part of any package.
dir_opkg_collided = dir .. dir_opkg_collided_suffix
end
......
......@@ -60,7 +60,7 @@ module "backend"
-- luacheck: globals cmd_timeout cmd_kill_timeout
-- Functions that we want to access from outside (ex. for testing purposes)
-- luacheck: globals block_parse block_split block_dump_ordered pkg_status_dump package_postprocess status_parse get_parent config_modified
-- luacheck: globals repo_parse status_dump pkg_unpack pkg_examine collision_check installed_confs steal_configs dir_ensure pkg_merge_files pkg_merge_control pkg_config_info pkg_cleanup_files script_run control_cleanup version_cmp version_match run_state user_path_move get_nonconf_files get_changed_files
-- luacheck: globals repo_parse status_dump pkg_unpack pkg_examine collision_check installed_confs steal_configs pkg_merge_files pkg_merge_control pkg_config_info pkg_cleanup_files script_run control_cleanup version_cmp version_match run_state user_path_move get_nonconf_files get_changed_files
--[[
Configuration of the module. It is supported (yet unlikely to be needed) to modify
......@@ -417,9 +417,18 @@ function status_dump(status)
end
end
local function rmrf(dir)
-- TODO: Would it be better to remove from within our code, without calling rm?
events_wait(run_util(function (ecode, _, _, stderr)
if ecode ~= 0 then
WARN("Failed to clean up work directory ", dir, ": ", stderr)
end
end, nil, nil, cmd_timeout, cmd_kill_timeout, "rm", "-rf", dir))
end
--[[
Take the .ipk package (passed as the data, not as a path to a file) and unpack it
into a temporary location somewhere under tmp_dir. If you omit tmp_dir, /tmp is used.
Take the .ipk package and unpack it into a temporary location somewhere under
pkg_unpacked_dir.
It returns a path to a subdirectory of tmp_dir, where the package is unpacked.
There are two further subdirectories, control and data. Data are the files to be merged
......@@ -429,13 +438,13 @@ TODO:
• Sanity checking of the package.
• Less calling of external commands.
]]
function pkg_unpack(package, tmp_dir)
function pkg_unpack(package_path)
-- The first unpack goes into the /tmp
-- We assume s1dir returs sane names of directories ‒ no spaces or strange chars in them
local s1dir = mkdtemp()
-- The results go into the provided dir, or to /tmp if none was provided
-- FIXME: Sanity-check provided tmp_dir ‒ it must not contain strange chars
local s2dir = mkdtemp(tmp_dir)
utils.mkdirp(syscnf.pkg_unpacked_dir)
local s2dir = mkdtemp(syscnf.pkg_unpacked_dir)
-- If anything goes wrong, this is where we find the error message
local err
-- Unpack the ipk into s1dir, getting control.tar.gz and data.tar.gz
......@@ -444,7 +453,7 @@ function pkg_unpack(package, tmp_dir)
if ecode ~= 0 then
err = "Stage 1 unpack failed: " .. stderr
end
end, function () chdir(s1dir) end, package, cmd_timeout, cmd_kill_timeout, "sh", "-c", "gzip -dc | tar x"))
end, nil, nil, cmd_timeout, cmd_kill_timeout, "tar", "-xzf", package_path, "-C", s1dir))
-- TODO: Sanity check debian-binary
return err == nil
end
......@@ -452,11 +461,12 @@ function pkg_unpack(package, tmp_dir)
local function unpack_archive(what)
local archive = s1dir .. "/" .. what .. ".tar.gz"
local dir = s2dir .. "/" .. what
mkdir(dir)
return run_util(function (ecode, _, _, stderr)
if ecode ~= 0 then
err = "Stage 2 unpack of " .. what .. " failed: " .. stderr
end
end, nil, package, cmd_timeout, cmd_kill_timeout, "sh", "-c", "mkdir -p '" .. dir .. "' && cd '" .. dir .. "' && gzip -dc <'" .. archive .. "' | tar xp")
end, nil, nil, cmd_timeout, cmd_kill_timeout, "tar", "-xzf", archive, '-C', dir)
end
local function stage2()
events_wait(unpack_archive("control"), unpack_archive("data"))
......@@ -464,24 +474,12 @@ function pkg_unpack(package, tmp_dir)
end
-- Try-finally like construct, make sure cleanup is called no matter what
local success, ok = pcall(function () return stage1() and stage2() end)
-- Do the cleanups
local events = {}
local function remove(dir)
-- TODO: Would it be better to remove from within our code, without calling rm?
table.insert(events, run_util(function (ecode, _, _, stderr)
if ecode ~= 0 then
WARN("Failed to clean up work directory ", dir, ": ", stderr)
end
end, nil, nil, cmd_timeout, cmd_kill_timeout, "rm", "-rf", dir))
end
-- Intermediate work space, not needed by the caller
remove(s1dir)
rmrf(s1dir)
if err then
-- Clean up the resulting directory in case of errors
remove(s2dir)
rmrf(s2dir)
end
-- Run all the cleanup removes in parallel
events_wait(unpack(events))
-- Cleanup done, call error() if anything failed
if not success then error(ok) end
if not ok then error(err) end
......@@ -764,25 +762,6 @@ function steal_configs(current_status, installed_confs, configs)
return steal
end
-- Ensure the given directory exists
function dir_ensure(dir)
-- Try creating it.
local ok, err = pcall(function () mkdir(dir) end)
if not ok then
-- It may have failed because it already exists, check it
local tp = stat(dir)
if not tp then
-- It does not create, so creation failed for some reason
error(err)
elseif tp ~= "d" then
-- It failed because there is some file
return false
end
-- else ‒ there's the given directory, so it failed because it pre-existed. That's OK.
end
return true
end
--[[
Move anything on given path to dir_opkg_collided. This backups and removes original files.
When keep is set to true, file is copied instead of moved.
......@@ -792,7 +771,7 @@ function user_path_move(path, keep)
local fpath = ""
for dir in (syscnf.dir_opkg_collided .. path):gsub("[^/]*/?$", ""):gmatch("[^/]+") do
local randex = ""
while not dir_ensure(fpath .. "/" .. dir .. randex) do
while not utils.dir_ensure(fpath .. "/" .. dir .. randex) do
-- If there is file with same name, then append some random extension
randex = "." .. utils.randstr(6)
end
......@@ -842,10 +821,10 @@ function pkg_merge_files(dir, dirs, files, configs)
for _, new_dir in ipairs(dirs_sorted) do
DBG("Creating dir " .. new_dir)
local dir = syscnf.root_dir .. new_dir
if not dir_ensure(dir) then
if not utils.dir_ensure(dir) then
-- There is some file that user created. Move it away
user_path_move(dir)
dir_ensure(dir)
utils.dir_ensure(dir)
end
end
--[[
......
......@@ -31,6 +31,7 @@ local error = error
local pcall = pcall
local assert = assert
local pairs = pairs
local collectgarbage = collectgarbage
local unpack = unpack
local table = table
local backend = require "backend"
......@@ -91,7 +92,7 @@ local function pkg_unpack(operations, status)
WARN("Package " .. op.name .. " is not installed. Can't remove")
end
elseif op.op == "install" then
local pkg_dir = backend.pkg_unpack(op.data, syscnf.pkg_temp_dir)
local pkg_dir = backend.pkg_unpack(op.file)
table.insert(dir_cleanups, pkg_dir)
local files, dirs, configs, control = backend.pkg_examine(pkg_dir)
to_remove[control.Package] = true
......@@ -292,14 +293,11 @@ local function perform_internal(operations, journal_status, run_state)
-- Emulate try-finally
local ok, err = pcall(function ()
-- Make sure the temporary directory for unpacked packages exist
local created = ""
for segment in syscnf.pkg_temp_dir:gmatch("([^/]*)/") do
created = created .. segment .. "/"
backend.dir_ensure(created)
end
utils.mkdirp(syscnf.pkg_unpacked_dir)
-- Look at what the current status looks like.
local to_remove, to_install, plan
to_remove, to_install, plan, dir_cleanups, cleanup_actions = step(journal.UNPACKED, pkg_unpack, true, operations, status)
utils.cleanup_dirs({syscnf.pkg_download_dir})
cleanup_actions = cleanup_actions or {} -- just to handle if journal contains no cleanup actions (journal from previous version)
-- Drop the operations. This way, if we are tail-called, then the package buffers may be garbage-collected
operations = nil
......@@ -426,6 +424,7 @@ function perform_queue()
-- Ensure we reset the queue by running it. And also that we allow the garbage collector to collect the data in there.
local queue_cp = queue
queue = {}
collectgarbage() -- explicitly try to collect queue
return errors_format(perform(queue_cp))
end
end
......@@ -442,18 +441,13 @@ end
-- Queue a request to install a package from the given file name.
function queue_install(filename)
local content, err = utils.read_file(filename)
if content then
table.insert(queue, {op = "install", data = content})
else
error(err)
end
table.insert(queue, {op = "install", file = filename})
end
function queue_install_downloaded(data, name, version, modifier)
function queue_install_downloaded(file, name, version, modifier)
table.insert(queue, {
op = "install",
data = data,
file = file,
name = name,
version = version,
reboot = modifier.reboot,
......
......@@ -114,7 +114,9 @@ function prepare(entrypoint)
error(utils.exception("corruption", "The sha256 sum of " .. task.name .. " does not match"))
end
end
transaction.queue_install_downloaded(data, task.name, task.package.Version, task.modifier)
local fpath = syscnf.pkg_download_dir .. task.name .. '-' .. task.package.Version .. '.ipk'
utils.write_file(fpath, data)
transaction.queue_install_downloaded(fpath, task.name, task.package.Version, task.modifier)
elseif task.action == "remove" then
INFO("Queue removal of " .. task.name)
transaction.queue_remove(task.name)
......
......@@ -646,6 +646,7 @@ struct {
{"cp", "/bin/cp"},
{"rm", "/bin/rm"},
{"gzip", "/bin/gzip"},
{"tar", "/bin/tar"},
{"find", "/usr/bin/find"},
{"sh", "/bin/sh"},
{"mktemp", "/bin/mktemp"},
......
......@@ -28,6 +28,7 @@ local lines2set = utils.lines2set
module("backend-tests", package.seeall, lunit.testcase)
local datadir = (os.getenv("S") or ".") .. "/tests/data/"
local tmpdir = os.getenv("TMPDIR") or "/tmp"
-- Tests for the block_parse function
function test_block_parse()
......@@ -257,7 +258,8 @@ local tmp_dirs = {}
Test the chain of functions ‒ unpack, examine
]]
function test_pkg_unpack()
local path = B.pkg_unpack(utils.read_file(datadir .. "updater.ipk"))
syscnf.set_root_dir(tmpdir)
local path = B.pkg_unpack(datadir .. "updater.ipk")
-- Make sure it is deleted on teardown
table.insert(tmp_dirs, path)
-- Check list of extracted files
......
......@@ -26,7 +26,8 @@ function test_set_root_dir()
SC.set_root_dir("/dir/")
assert_equal("/dir/usr/lib/opkg/status", SC.status_file)
assert_equal("/dir/usr/lib/opkg/info/", SC.info_dir)
assert_equal("/dir/usr/share/updater/unpacked/", SC.pkg_temp_dir)
assert_equal("/dir/usr/share/updater/unpacked/", SC.pkg_unpacked_dir)
assert_equal("/dir/usr/share/updater/download/", SC.pkg_download_dir)
assert_equal("/dir/usr/share/updater/collided/", SC.dir_opkg_collided)
end
......
......@@ -9,7 +9,6 @@ TRANS_SYS_TESTS := \
UPD_SYS_TESTS := \
help \
plan \
plan-unapproved \
steal-confs \
simple-update \
multiple-repos \
......
......@@ -22,7 +22,7 @@ local B = require 'backend'
local T = require 'transaction'
local utils = require 'utils'
local journal = require 'journal'
require "syscnf"
local syscnf = require "syscnf"
syscnf.set_root_dir()
......@@ -57,25 +57,9 @@ local intro = {
p = {}
},
{
f = "backend.dir_ensure",
p = {"/"}
},
{
f = "backend.dir_ensure",
p = {"/usr/"}
},
{
f = "backend.dir_ensure",
p = {"/usr/share/"}
},
{
f = "backend.dir_ensure",
p = {"/usr/share/updater/"}
},
{
f = "backend.dir_ensure",
f = "utils.mkdirp",
p = {"/usr/share/updater/unpacked/"}
}
},
}
local function outro(cleanup_dirs, status)
......@@ -120,7 +104,9 @@ local function tables_join(...)
end
local function mocks_install()
mock_gen("backend.dir_ensure")
mock_gen("os.remove")
mock_gen("utils.dir_ensure")
mock_gen("utils.mkdirp")
mock_gen("backend.status_parse", function () fail("Forbidden function backend.status_parse") end)
mock_gen("backend.run_state", function()
return {
......@@ -168,6 +154,10 @@ function test_perform_empty()
f = "journal.write",
p = {journal.UNPACKED, {}, {}, {}, {}, {}}
},
{
f="utils.cleanup_dirs",
p={ { "/usr/share/updater/download/" } }
},
{
f = "backend.collision_check",
p = {test_status, {}, {}}
......@@ -199,7 +189,7 @@ function test_perform_ok()
local result = T.perform({
{
op = "install",
data = "<package data>"
file = "<package>"
}, {
op = "remove",
name = "pkg-rem"
......@@ -223,7 +213,7 @@ function test_perform_ok()
local expected = tables_join(intro, {
{
f = "backend.pkg_unpack",
p = {"<package data>", syscnf.pkg_temp_dir}
p = {"<package>"}
},
{
f = "backend.pkg_examine",
......@@ -258,6 +248,10 @@ function test_perform_ok()
{}
}
},
{
f = "utils.cleanup_dirs",
p = {{syscnf.pkg_download_dir}}
},
{
f = "backend.collision_check",
p = {
......@@ -351,16 +345,16 @@ end
function test_perform_collision()
mocks_install()
mock_gen("backend.collision_check", function () return {f = {["<pkg1name>"] = "new", ["<pkg2name>"] = "new", ["other"] = "existing"}}, {} end)
mock_gen("backend.pkg_unpack", function (data) return data:gsub("data", "dir") end)
mock_gen("backend.pkg_unpack", function (data) return data:gsub("file", "dir") end)
mock_gen("backend.pkg_examine", function (dir) return {f = true}, {d = true}, {c = "1234567890123456"}, {Package = dir:gsub("dir", "name")} end)
local ok, err = pcall(T.perform, {
{
op = "install",
data = "<pkg1data>"
file = "<pkg1file>"
},
{
op = "install",
data = "<pkg2data>"
file = "<pkg2file>"
}
})
assert_false(ok)
......@@ -370,7 +364,7 @@ function test_perform_collision()
local expected = tables_join(intro, {
{
f = "backend.pkg_unpack",
p = {"<pkg1data>", syscnf.pkg_temp_dir}
p = {"<pkg1file>"}
},
{
f = "backend.pkg_examine",
......@@ -378,7 +372,7 @@ function test_perform_collision()
},
{
f = "backend.pkg_unpack",
p = {"<pkg2data>", syscnf.pkg_temp_dir}
p = {"<pkg2file>"}
},
{
f = "backend.pkg_examine",
......@@ -416,6 +410,10 @@ function test_perform_collision()
{}
}
},
{
f = "utils.cleanup_dirs",
p = {{syscnf.pkg_download_dir}}
},
{
f = "backend.collision_check",
p = {
......@@ -552,6 +550,7 @@ function test_recover_late()
status_mod["pkg-rem"] = nil
local intro_mod = utils.clone(intro)
intro_mod[2].f = "journal.recover"
intro_mod[4] = {f = "utils.cleanup_dirs", p = {{syscnf.pkg_download_dir}}}
local expected = tables_join(intro_mod, outro({"pkg_dir"}, status_mod))
assert_table_equal(expected, mocks_called)
end
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment