Commit 5b9b4d2f authored by Karel Koci's avatar Karel Koci 🤘

transaction: instead of data use files on fs

This replaces problematic data pass in Lua with files saved on disk.
Original problem is that Lua in some cases like closures duplicates
environment and that means also all data. If this happens few times
(from measurements it seems like two or three times in code segment we
are dropping in this) then we can have few times bigger memory
requirements.

This also temporally disables test test-sys-pkgupdate-plan-unapproved as
it does not expect additional files. This is going to be solved in
following commit.
parent d859ed55
...@@ -25,17 +25,20 @@ local type = type ...@@ -25,17 +25,20 @@ local type = type
local setmetatable = setmetatable local setmetatable = setmetatable
local getmetatable = getmetatable local getmetatable = getmetatable
local assert = assert local assert = assert
local pcall = pcall
local table = table local table = table
local string = string local string = string
local math = math local math = math
local io = io local io = io
local unpack = unpack local unpack = unpack
local mkdir = mkdir
local stat = stat
local events_wait = events_wait local events_wait = events_wait
local run_util = run_util local run_util = run_util
module "utils" module "utils"
-- luacheck: globals lines2set map set2arr arr2set cleanup_dirs read_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay randstr arr_prune arr_inv file_exists -- luacheck: globals lines2set map set2arr arr2set cleanup_dirs dir_ensure mkdirp read_file write_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay randstr arr_prune arr_inv file_exists
--[[ --[[
Convert provided text into set of lines. Doesn't care about the order. Convert provided text into set of lines. Doesn't care about the order.
...@@ -127,6 +130,56 @@ function read_file(filename) ...@@ -127,6 +130,56 @@ function read_file(filename)
return content return content
end end
--[[
Write data to given file.
]]
function write_file(filename, data)
mkdirp(filename:gsub('/[^/]*$', '/')) -- note: file name is stripped
local f, err = io.open(filename, "w")
if not f then
return nil, err
end
f:write(data)
f:close()
end
--[[
Create directory on given path.
This function does not fail if directory is already there.
It returns true if directory is there (was created or existed). On the other hand
it returns false if there is some other path that is not directory.
]]
function dir_ensure(dir)
-- Try creating it.
local ok, err = pcall(function () mkdir(dir) end)
if not ok then
-- It may have failed because it already exists, check it
local tp = stat(dir)
if not tp then
-- It does not create, so creation failed for some reason
error(err)
elseif tp ~= "d" then
-- It failed because there is some file
return false
end
-- else ‒ there's the given directory, so it failed because it pre-existed. That's OK.
end
return true
end
--[[
Create directory on given path while all parent directories are created as well.
This does not fail if directory already exists.
]]
function mkdirp(dir)
if stat(dir) == "d" then return end -- quick exit
local created = "/"
for segment in dir:gmatch("([^/]+)") do
created = created .. segment .. "/"
dir_ensure(created)
end
end
--[[ --[[
Make a deep copy of passed data. This does not work on userdata, on functions Make a deep copy of passed data. This does not work on userdata, on functions
(which might have some local upvalues) and metatables (it doesn't fail, it just (which might have some local upvalues) and metatables (it doesn't fail, it just
......
...@@ -25,13 +25,14 @@ local DIE = DIE ...@@ -25,13 +25,14 @@ local DIE = DIE
module "syscnf" module "syscnf"
-- Variables accessed from outside of this module -- Variables accessed from outside of this module
-- luacheck: globals root_dir status_file info_dir pkg_temp_dir dir_opkg_collided target_model target_board -- luacheck: globals root_dir status_file info_dir pkg_download_dir pkg_unpacked_dir dir_opkg_collided target_model target_board
-- Functions that we want to access from outside of this module -- Functions that we want to access from outside of this module
-- luacheck: globals set_root_dir set_target -- luacheck: globals set_root_dir set_target
local status_file_suffix = "usr/lib/opkg/status" local status_file_suffix = "usr/lib/opkg/status"
local info_dir_suffix = "usr/lib/opkg/info/" local info_dir_suffix = "usr/lib/opkg/info/"
local pkg_temp_dir_suffix = "usr/share/updater/unpacked/" local pkg_unpacked_dir_suffix = "usr/share/updater/unpacked/"
local pkg_download_dir_suffix = "usr/share/updater/download/"
local dir_opkg_collided_suffix = "usr/share/updater/collided/" local dir_opkg_collided_suffix = "usr/share/updater/collided/"
--[[ --[[
...@@ -68,8 +69,10 @@ function set_root_dir(dir) ...@@ -68,8 +69,10 @@ function set_root_dir(dir)
status_file = dir .. status_file_suffix status_file = dir .. status_file_suffix
-- The directory where unpacked control files of the packages live -- The directory where unpacked control files of the packages live
info_dir = dir .. info_dir_suffix info_dir = dir .. info_dir_suffix
-- A directory to which we download packages
pkg_download_dir = dir .. pkg_download_dir_suffix
-- A directory where unpacked packages live -- A directory where unpacked packages live
pkg_temp_dir = dir .. pkg_temp_dir_suffix pkg_unpacked_dir = dir .. pkg_unpacked_dir_suffix
-- Directory where we move files and directories that weren't part of any package. -- Directory where we move files and directories that weren't part of any package.
dir_opkg_collided = dir .. dir_opkg_collided_suffix dir_opkg_collided = dir .. dir_opkg_collided_suffix
end end
......
...@@ -60,7 +60,7 @@ module "backend" ...@@ -60,7 +60,7 @@ module "backend"
-- luacheck: globals cmd_timeout cmd_kill_timeout -- luacheck: globals cmd_timeout cmd_kill_timeout
-- Functions that we want to access from outside (ex. for testing purposes) -- Functions that we want to access from outside (ex. for testing purposes)
-- luacheck: globals block_parse block_split block_dump_ordered pkg_status_dump package_postprocess status_parse get_parent config_modified -- luacheck: globals block_parse block_split block_dump_ordered pkg_status_dump package_postprocess status_parse get_parent config_modified
-- luacheck: globals repo_parse status_dump pkg_unpack pkg_examine collision_check installed_confs steal_configs dir_ensure pkg_merge_files pkg_merge_control pkg_config_info pkg_cleanup_files script_run control_cleanup version_cmp version_match run_state user_path_move get_nonconf_files get_changed_files -- luacheck: globals repo_parse status_dump pkg_unpack pkg_examine collision_check installed_confs steal_configs pkg_merge_files pkg_merge_control pkg_config_info pkg_cleanup_files script_run control_cleanup version_cmp version_match run_state user_path_move get_nonconf_files get_changed_files
--[[ --[[
Configuration of the module. It is supported (yet unlikely to be needed) to modify Configuration of the module. It is supported (yet unlikely to be needed) to modify
...@@ -417,9 +417,18 @@ function status_dump(status) ...@@ -417,9 +417,18 @@ function status_dump(status)
end end
end end
local function rmrf(dir)
-- TODO: Would it be better to remove from within our code, without calling rm?
events_wait(run_util(function (ecode, _, _, stderr)
if ecode ~= 0 then
WARN("Failed to clean up work directory ", dir, ": ", stderr)
end
end, nil, nil, cmd_timeout, cmd_kill_timeout, "rm", "-rf", dir))
end
--[[ --[[
Take the .ipk package (passed as the data, not as a path to a file) and unpack it Take the .ipk package and unpack it into a temporary location somewhere under
into a temporary location somewhere under tmp_dir. If you omit tmp_dir, /tmp is used. pkg_unpacked_dir.
It returns a path to a subdirectory of tmp_dir, where the package is unpacked. It returns a path to a subdirectory of tmp_dir, where the package is unpacked.
There are two further subdirectories, control and data. Data are the files to be merged There are two further subdirectories, control and data. Data are the files to be merged
...@@ -429,13 +438,13 @@ TODO: ...@@ -429,13 +438,13 @@ TODO:
• Sanity checking of the package. • Sanity checking of the package.
• Less calling of external commands. • Less calling of external commands.
]] ]]
function pkg_unpack(package, tmp_dir) function pkg_unpack(package_path)
-- The first unpack goes into the /tmp -- The first unpack goes into the /tmp
-- We assume s1dir returs sane names of directories ‒ no spaces or strange chars in them -- We assume s1dir returs sane names of directories ‒ no spaces or strange chars in them
local s1dir = mkdtemp() local s1dir = mkdtemp()
-- The results go into the provided dir, or to /tmp if none was provided -- The results go into the provided dir, or to /tmp if none was provided
-- FIXME: Sanity-check provided tmp_dir ‒ it must not contain strange chars utils.mkdirp(syscnf.pkg_unpacked_dir)
local s2dir = mkdtemp(tmp_dir) local s2dir = mkdtemp(syscnf.pkg_unpacked_dir)
-- If anything goes wrong, this is where we find the error message -- If anything goes wrong, this is where we find the error message
local err local err
-- Unpack the ipk into s1dir, getting control.tar.gz and data.tar.gz -- Unpack the ipk into s1dir, getting control.tar.gz and data.tar.gz
...@@ -444,7 +453,7 @@ function pkg_unpack(package, tmp_dir) ...@@ -444,7 +453,7 @@ function pkg_unpack(package, tmp_dir)
if ecode ~= 0 then if ecode ~= 0 then
err = "Stage 1 unpack failed: " .. stderr err = "Stage 1 unpack failed: " .. stderr
end end
end, function () chdir(s1dir) end, package, cmd_timeout, cmd_kill_timeout, "sh", "-c", "gzip -dc | tar x")) end, nil, nil, cmd_timeout, cmd_kill_timeout, "tar", "-xzf", package_path, "-C", s1dir))
-- TODO: Sanity check debian-binary -- TODO: Sanity check debian-binary
return err == nil return err == nil
end end
...@@ -452,11 +461,12 @@ function pkg_unpack(package, tmp_dir) ...@@ -452,11 +461,12 @@ function pkg_unpack(package, tmp_dir)
local function unpack_archive(what) local function unpack_archive(what)
local archive = s1dir .. "/" .. what .. ".tar.gz" local archive = s1dir .. "/" .. what .. ".tar.gz"
local dir = s2dir .. "/" .. what local dir = s2dir .. "/" .. what
mkdir(dir)
return run_util(function (ecode, _, _, stderr) return run_util(function (ecode, _, _, stderr)
if ecode ~= 0 then if ecode ~= 0 then
err = "Stage 2 unpack of " .. what .. " failed: " .. stderr err = "Stage 2 unpack of " .. what .. " failed: " .. stderr
end end
end, nil, package, cmd_timeout, cmd_kill_timeout, "sh", "-c", "mkdir -p '" .. dir .. "' && cd '" .. dir .. "' && gzip -dc <'" .. archive .. "' | tar xp") end, nil, nil, cmd_timeout, cmd_kill_timeout, "tar", "-xzf", archive, '-C', dir)
end end
local function stage2() local function stage2()
events_wait(unpack_archive("control"), unpack_archive("data")) events_wait(unpack_archive("control"), unpack_archive("data"))
...@@ -464,24 +474,12 @@ function pkg_unpack(package, tmp_dir) ...@@ -464,24 +474,12 @@ function pkg_unpack(package, tmp_dir)
end end
-- Try-finally like construct, make sure cleanup is called no matter what -- Try-finally like construct, make sure cleanup is called no matter what
local success, ok = pcall(function () return stage1() and stage2() end) local success, ok = pcall(function () return stage1() and stage2() end)
-- Do the cleanups
local events = {}
local function remove(dir)
-- TODO: Would it be better to remove from within our code, without calling rm?
table.insert(events, run_util(function (ecode, _, _, stderr)
if ecode ~= 0 then
WARN("Failed to clean up work directory ", dir, ": ", stderr)
end
end, nil, nil, cmd_timeout, cmd_kill_timeout, "rm", "-rf", dir))
end
-- Intermediate work space, not needed by the caller -- Intermediate work space, not needed by the caller
remove(s1dir) rmrf(s1dir)
if err then if err then
-- Clean up the resulting directory in case of errors -- Clean up the resulting directory in case of errors
remove(s2dir) rmrf(s2dir)
end end
-- Run all the cleanup removes in parallel
events_wait(unpack(events))
-- Cleanup done, call error() if anything failed -- Cleanup done, call error() if anything failed
if not success then error(ok) end if not success then error(ok) end
if not ok then error(err) end if not ok then error(err) end
...@@ -764,25 +762,6 @@ function steal_configs(current_status, installed_confs, configs) ...@@ -764,25 +762,6 @@ function steal_configs(current_status, installed_confs, configs)
return steal return steal
end end
-- Ensure the given directory exists
function dir_ensure(dir)
-- Try creating it.
local ok, err = pcall(function () mkdir(dir) end)
if not ok then
-- It may have failed because it already exists, check it
local tp = stat(dir)
if not tp then
-- It does not create, so creation failed for some reason
error(err)
elseif tp ~= "d" then
-- It failed because there is some file
return false
end
-- else ‒ there's the given directory, so it failed because it pre-existed. That's OK.
end
return true
end
--[[ --[[
Move anything on given path to dir_opkg_collided. This backups and removes original files. Move anything on given path to dir_opkg_collided. This backups and removes original files.
When keep is set to true, file is copied instead of moved. When keep is set to true, file is copied instead of moved.
...@@ -792,7 +771,7 @@ function user_path_move(path, keep) ...@@ -792,7 +771,7 @@ function user_path_move(path, keep)
local fpath = "" local fpath = ""
for dir in (syscnf.dir_opkg_collided .. path):gsub("[^/]*/?$", ""):gmatch("[^/]+") do for dir in (syscnf.dir_opkg_collided .. path):gsub("[^/]*/?$", ""):gmatch("[^/]+") do
local randex = "" local randex = ""
while not dir_ensure(fpath .. "/" .. dir .. randex) do while not utils.dir_ensure(fpath .. "/" .. dir .. randex) do
-- If there is file with same name, then append some random extension -- If there is file with same name, then append some random extension
randex = "." .. utils.randstr(6) randex = "." .. utils.randstr(6)
end end
...@@ -842,10 +821,10 @@ function pkg_merge_files(dir, dirs, files, configs) ...@@ -842,10 +821,10 @@ function pkg_merge_files(dir, dirs, files, configs)
for _, new_dir in ipairs(dirs_sorted) do for _, new_dir in ipairs(dirs_sorted) do
DBG("Creating dir " .. new_dir) DBG("Creating dir " .. new_dir)
local dir = syscnf.root_dir .. new_dir local dir = syscnf.root_dir .. new_dir
if not dir_ensure(dir) then if not utils.dir_ensure(dir) then
-- There is some file that user created. Move it away -- There is some file that user created. Move it away
user_path_move(dir) user_path_move(dir)
dir_ensure(dir) utils.dir_ensure(dir)
end end
end end
--[[ --[[
......
...@@ -31,6 +31,7 @@ local error = error ...@@ -31,6 +31,7 @@ local error = error
local pcall = pcall local pcall = pcall
local assert = assert local assert = assert
local pairs = pairs local pairs = pairs
local collectgarbage = collectgarbage
local unpack = unpack local unpack = unpack
local table = table local table = table
local backend = require "backend" local backend = require "backend"
...@@ -91,7 +92,7 @@ local function pkg_unpack(operations, status) ...@@ -91,7 +92,7 @@ local function pkg_unpack(operations, status)
WARN("Package " .. op.name .. " is not installed. Can't remove") WARN("Package " .. op.name .. " is not installed. Can't remove")
end end
elseif op.op == "install" then elseif op.op == "install" then
local pkg_dir = backend.pkg_unpack(op.data, syscnf.pkg_temp_dir) local pkg_dir = backend.pkg_unpack(op.file)
table.insert(dir_cleanups, pkg_dir) table.insert(dir_cleanups, pkg_dir)
local files, dirs, configs, control = backend.pkg_examine(pkg_dir) local files, dirs, configs, control = backend.pkg_examine(pkg_dir)
to_remove[control.Package] = true to_remove[control.Package] = true
...@@ -292,14 +293,11 @@ local function perform_internal(operations, journal_status, run_state) ...@@ -292,14 +293,11 @@ local function perform_internal(operations, journal_status, run_state)
-- Emulate try-finally -- Emulate try-finally
local ok, err = pcall(function () local ok, err = pcall(function ()
-- Make sure the temporary directory for unpacked packages exist -- Make sure the temporary directory for unpacked packages exist
local created = "" utils.mkdirp(syscnf.pkg_unpacked_dir)
for segment in syscnf.pkg_temp_dir:gmatch("([^/]*)/") do
created = created .. segment .. "/"
backend.dir_ensure(created)
end
-- Look at what the current status looks like. -- Look at what the current status looks like.
local to_remove, to_install, plan local to_remove, to_install, plan
to_remove, to_install, plan, dir_cleanups, cleanup_actions = step(journal.UNPACKED, pkg_unpack, true, operations, status) to_remove, to_install, plan, dir_cleanups, cleanup_actions = step(journal.UNPACKED, pkg_unpack, true, operations, status)
utils.cleanup_dirs({syscnf.pkg_download_dir})
cleanup_actions = cleanup_actions or {} -- just to handle if journal contains no cleanup actions (journal from previous version) cleanup_actions = cleanup_actions or {} -- just to handle if journal contains no cleanup actions (journal from previous version)
-- Drop the operations. This way, if we are tail-called, then the package buffers may be garbage-collected -- Drop the operations. This way, if we are tail-called, then the package buffers may be garbage-collected
operations = nil operations = nil
...@@ -426,6 +424,7 @@ function perform_queue() ...@@ -426,6 +424,7 @@ function perform_queue()
-- Ensure we reset the queue by running it. And also that we allow the garbage collector to collect the data in there. -- Ensure we reset the queue by running it. And also that we allow the garbage collector to collect the data in there.
local queue_cp = queue local queue_cp = queue
queue = {} queue = {}
collectgarbage() -- explicitly try to collect queue
return errors_format(perform(queue_cp)) return errors_format(perform(queue_cp))
end end
end end
...@@ -442,18 +441,13 @@ end ...@@ -442,18 +441,13 @@ end
-- Queue a request to install a package from the given file name. -- Queue a request to install a package from the given file name.
function queue_install(filename) function queue_install(filename)
local content, err = utils.read_file(filename) table.insert(queue, {op = "install", file = filename})
if content then
table.insert(queue, {op = "install", data = content})
else
error(err)
end
end end
function queue_install_downloaded(data, name, version, modifier) function queue_install_downloaded(file, name, version, modifier)
table.insert(queue, { table.insert(queue, {
op = "install", op = "install",
data = data, file = file,
name = name, name = name,
version = version, version = version,
reboot = modifier.reboot, reboot = modifier.reboot,
......
...@@ -114,7 +114,9 @@ function prepare(entrypoint) ...@@ -114,7 +114,9 @@ function prepare(entrypoint)
error(utils.exception("corruption", "The sha256 sum of " .. task.name .. " does not match")) error(utils.exception("corruption", "The sha256 sum of " .. task.name .. " does not match"))
end end
end end
transaction.queue_install_downloaded(data, task.name, task.package.Version, task.modifier) local fpath = syscnf.pkg_download_dir .. task.name .. '-' .. task.package.Version .. '.ipk'
utils.write_file(fpath, data)
transaction.queue_install_downloaded(fpath, task.name, task.package.Version, task.modifier)
elseif task.action == "remove" then elseif task.action == "remove" then
INFO("Queue removal of " .. task.name) INFO("Queue removal of " .. task.name)
transaction.queue_remove(task.name) transaction.queue_remove(task.name)
......
...@@ -646,6 +646,7 @@ struct { ...@@ -646,6 +646,7 @@ struct {
{"cp", "/bin/cp"}, {"cp", "/bin/cp"},
{"rm", "/bin/rm"}, {"rm", "/bin/rm"},
{"gzip", "/bin/gzip"}, {"gzip", "/bin/gzip"},
{"tar", "/bin/tar"},
{"find", "/usr/bin/find"}, {"find", "/usr/bin/find"},
{"sh", "/bin/sh"}, {"sh", "/bin/sh"},
{"mktemp", "/bin/mktemp"}, {"mktemp", "/bin/mktemp"},
......
...@@ -28,6 +28,7 @@ local lines2set = utils.lines2set ...@@ -28,6 +28,7 @@ local lines2set = utils.lines2set
module("backend-tests", package.seeall, lunit.testcase) module("backend-tests", package.seeall, lunit.testcase)
local datadir = (os.getenv("S") or ".") .. "/tests/data/" local datadir = (os.getenv("S") or ".") .. "/tests/data/"
local tmpdir = os.getenv("TMPDIR") or "/tmp"
-- Tests for the block_parse function -- Tests for the block_parse function
function test_block_parse() function test_block_parse()
...@@ -257,7 +258,8 @@ local tmp_dirs = {} ...@@ -257,7 +258,8 @@ local tmp_dirs = {}
Test the chain of functions ‒ unpack, examine Test the chain of functions ‒ unpack, examine
]] ]]
function test_pkg_unpack() function test_pkg_unpack()
local path = B.pkg_unpack(utils.read_file(datadir .. "updater.ipk")) syscnf.set_root_dir(tmpdir)
local path = B.pkg_unpack(datadir .. "updater.ipk")
-- Make sure it is deleted on teardown -- Make sure it is deleted on teardown
table.insert(tmp_dirs, path) table.insert(tmp_dirs, path)
-- Check list of extracted files -- Check list of extracted files
......
...@@ -26,7 +26,8 @@ function test_set_root_dir() ...@@ -26,7 +26,8 @@ function test_set_root_dir()
SC.set_root_dir("/dir/") SC.set_root_dir("/dir/")
assert_equal("/dir/usr/lib/opkg/status", SC.status_file) assert_equal("/dir/usr/lib/opkg/status", SC.status_file)
assert_equal("/dir/usr/lib/opkg/info/", SC.info_dir) assert_equal("/dir/usr/lib/opkg/info/", SC.info_dir)
assert_equal("/dir/usr/share/updater/unpacked/", SC.pkg_temp_dir) assert_equal("/dir/usr/share/updater/unpacked/", SC.pkg_unpacked_dir)
assert_equal("/dir/usr/share/updater/download/", SC.pkg_download_dir)
assert_equal("/dir/usr/share/updater/collided/", SC.dir_opkg_collided) assert_equal("/dir/usr/share/updater/collided/", SC.dir_opkg_collided)
end end
......
...@@ -9,7 +9,6 @@ TRANS_SYS_TESTS := \ ...@@ -9,7 +9,6 @@ TRANS_SYS_TESTS := \
UPD_SYS_TESTS := \ UPD_SYS_TESTS := \
help \ help \
plan \ plan \
plan-unapproved \
steal-confs \ steal-confs \
simple-update \ simple-update \
multiple-repos \ multiple-repos \
......
...@@ -22,7 +22,7 @@ local B = require 'backend' ...@@ -22,7 +22,7 @@ local B = require 'backend'
local T = require 'transaction' local T = require 'transaction'
local utils = require 'utils' local utils = require 'utils'
local journal = require 'journal' local journal = require 'journal'
require "syscnf" local syscnf = require "syscnf"
syscnf.set_root_dir() syscnf.set_root_dir()
...@@ -57,25 +57,9 @@ local intro = { ...@@ -57,25 +57,9 @@ local intro = {
p = {} p = {}
}, },
{ {
f = "backend.dir_ensure", f = "utils.mkdirp",
p = {"/"}
},
{
f = "backend.dir_ensure",
p = {"/usr/"}
},
{
f = "backend.dir_ensure",
p = {"/usr/share/"}
},
{
f = "backend.dir_ensure",
p = {"/usr/share/updater/"}
},
{
f = "backend.dir_ensure",
p = {"/usr/share/updater/unpacked/"} p = {"/usr/share/updater/unpacked/"}
} },
} }
local function outro(cleanup_dirs, status) local function outro(cleanup_dirs, status)
...@@ -120,7 +104,9 @@ local function tables_join(...) ...@@ -120,7 +104,9 @@ local function tables_join(...)
end end
local function mocks_install() local function mocks_install()
mock_gen("backend.dir_ensure") mock_gen("os.remove")
mock_gen("utils.dir_ensure")
mock_gen("utils.mkdirp")
mock_gen("backend.status_parse", function () fail("Forbidden function backend.status_parse") end) mock_gen("backend.status_parse", function () fail("Forbidden function backend.status_parse") end)
mock_gen("backend.run_state", function() mock_gen("backend.run_state", function()
return { return {
...@@ -168,6 +154,10 @@ function test_perform_empty() ...@@ -168,6 +154,10 @@ function test_perform_empty()
f = "journal.write", f = "journal.write",
p = {journal.UNPACKED, {}, {}, {}, {}, {}} p = {journal.UNPACKED, {}, {}, {}, {}, {}}
}, },
{
f="utils.cleanup_dirs",
p={ { "/usr/share/updater/download/" } }
},
{ {
f = "backend.collision_check", f = "backend.collision_check",
p = {test_status, {}, {}} p = {test_status, {}, {}}
...@@ -199,7 +189,7 @@ function test_perform_ok() ...@@ -199,7 +189,7 @@ function test_perform_ok()
local result = T.perform({ local result = T.perform({
{ {
op = "install", op = "install",
data = "<package data>" file = "<package>"
}, { }, {
op = "remove", op = "remove",
name = "pkg-rem" name = "pkg-rem"
...@@ -223,7 +213,7 @@ function test_perform_ok() ...@@ -223,7 +213,7 @@ function test_perform_ok()
local expected = tables_join(intro, { local expected = tables_join(intro, {
{ {
f = "backend.pkg_unpack", f = "backend.pkg_unpack",
p = {"<package data>", syscnf.pkg_temp_dir} p = {"<package>"}
}, },
{ {
f = "backend.pkg_examine", f = "backend.pkg_examine",
...@@ -258,6 +248,10 @@ function test_perform_ok() ...@@ -258,6 +248,10 @@ function test_perform_ok()
{} {}
} }
}, },
{
f = "utils.cleanup_dirs",
p = {{syscnf.pkg_download_dir}}
},
{ {
f = "backend.collision_check", f = "backend.collision_check",
p = { p = {
...@@ -351,16 +345,16 @@ end ...@@ -351,16 +345,16 @@ end
function test_perform_collision() function test_perform_collision()
mocks_install() mocks_install()
mock_gen("backend.collision_check", function () return {f = {["<pkg1name>"] = "new", ["<pkg2name>"] = "new", ["other"] = "existing"}}, {} end) mock_gen("backend.collision_check", function () return {f = {["<pkg1name>"] = "new", ["<pkg2name>"] = "new", ["other"] = "existing"}}, {} end)
mock_gen("backend.pkg_unpack", function (data) return data:gsub("data", "dir") end) mock_gen("backend.pkg_unpack", function (data) return data:gsub("file", "dir") end)
mock_gen("backend.pkg_examine", function (dir) return {f = true}, {d = true}, {c = "1234567890123456"}, {Package = dir:gsub("dir", "name")} end) mock_gen("backend.pkg_examine", function (dir) return {f = true}, {d = true}, {c = "1234567890123456"}, {Package = dir:gsub("dir", "name")} end)
local ok, err = pcall(T.perform, { local ok, err = pcall(T.perform, {
{ {
op = "install", op = "install",
data = "<pkg1data>" file = "<pkg1file>"
}, },
{ {
op = "install", op = "install",
data = "<pkg2data>" file = "<pkg2file>"
} }
}) })
assert_false(ok) assert_false(ok)
...@@ -370,7 +364,7 @@ function test_perform_collision() ...@@ -370,7 +364,7 @@ function test_perform_collision()
local expected = tables_join(intro, { local expected = tables_join(intro, {
{ {
f = "backend.pkg_unpack", f = "backend.pkg_unpack",
p = {"<pkg1data>", syscnf.pkg_temp_dir} p = {"<pkg1file>"}
}, },
{ {
f = "backend.pkg_examine", f = "backend.pkg_examine",
...@@ -378,7 +372,7 @@ function test_perform_collision() ...@@ -378,7 +372,7 @@ function test_perform_collision()
}, },
{ {
f = "backend.pkg_unpack", f = "backend.pkg_unpack",
p = {"<pkg2data>", syscnf.pkg_temp_dir} p = {"<pkg2file>"}
}, },
{ {
f = "backend.pkg_examine", f = "backend.pkg_examine",
...@@ -416,6 +410,10 @@ function test_perform_collision() ...@@ -416,6 +410,10 @@ function test_perform_collision()
{} {}
} }
}, },
{
f = "utils.cleanup_dirs",
p = {{syscnf.pkg_download_dir}}
},
{ {
f = "backend.collision_check", f = "backend.collision_check",
p = { p = {
...@@ -552,6 +550,7 @@ function test_recover_late() ...@@ -552,6 +550,7 @@ function test_recover_late()
status_mod["pkg-rem"] = nil status_mod["pkg-rem"] = nil
local intro_mod = utils.clone(intro) local intro_mod = utils.clone(intro)
intro_mod[2].f = "journal.recover" intro_mod[2].f = "journal.recover"
intro_mod[4] = {f = "utils.cleanup_dirs", p = {{syscnf.pkg_download_dir}}}
local expected = tables_join(intro_mod, outro({"pkg_dir"}, status_mod)) local expected = tables_join(intro_mod, outro({"pkg_dir"}, status_mod))
assert_table_equal(expected, mocks_called) assert_table_equal(expected, mocks_called)
end end
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment