Verified Commit 6db555f6 authored by Karel Koci's avatar Karel Koci 🤘

Integrate new URI implementation

This commit changes a lot of functionality including user visible ones
such as configuration language.

The most significant from users point is that in configuration language
functions no longer return handlers for configuration they created. This
is to simplify implementation. The correct approach is to use package or
repository name instead of handler. Handler usage was less powerful and
because of sandboxing also unusable across multiple scripts.

There are additional changes in form of obsoleted extra options for
configuration commands. Repository's extra option subdirs is obsoleted
and works differently and much more. Please review changes in language
documentation file.
parent 96f9a511
This diff is collapsed.
......@@ -35,10 +35,11 @@ local mkdir = mkdir
local stat = stat
local events_wait = events_wait
local run_util = run_util
local uri = require "uri"
module "utils"
-- luacheck: globals lines2set map set2arr arr2set cleanup_dirs dir_ensure mkdirp read_file write_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay randstr arr_prune arr_inv file_exists
-- luacheck: globals lines2set map set2arr arr2set cleanup_dirs dir_ensure mkdirp read_file write_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay randstr arr_prune arr_inv file_exists uri_syste_cas uri_no_crl uri_config uri_content
--[[
Convert provided text into set of lines. Doesn't care about the order.
......@@ -370,4 +371,62 @@ function file_exists(name)
end
end
--[[
This function applies given table of configuration to given uri object.
This is here because we need bridge between old approach of using lua tables and
approach of inherited settings in uri object.
For full support of all fields see language documentation, section Verification.
Any field that is not set in table is ignored (configuration is not changed).
]]
function uri_config(uriobj, config)
local function wrap(val)
if type(val) == "table" then
return val
elseif type(val) == "boolean" then
return {}
else
return {val}
end
end
if config.ca ~= nil then
uriobj:set_ssl_verify(config.ca)
uriobj:add_ca(nil)
for _, ca in pairs(wrap(config.ca)) do
uriobj:add_ca(ca)
end
end
if config.crl ~= nil then
uriobj:add_crl(nil)
for _, crl in pairs(wrap(config.crl)) do
uriobj:add_crl(crl)
end
end
if config.ocsp ~= nil then
uriobj:set_ocsp(config.ocsp)
end
if config.pubkey ~= nil then
uriobj:add_pubkey(nil)
for _, pubkey in pairs(wrap(config.pubkey)) do
uriobj:add_pubkey(pubkey)
end
end
if config.sig ~= nil then
uriobj:set_sig(config.sig)
end
end
-- Get content of given URI
-- It returns downloaded content as first argument and uri object as second (which
-- can be used as a parent to other uris)
function uri_content(struri, parent, config)
local master = uri.new()
local u = master:to_buffer(struri, parent)
uri_config(u, config)
if master:download() then
error("URI download failed: " .. u:download_error())
end
return u:finish(), u
end
return _M
This diff is collapsed.
......@@ -38,7 +38,6 @@ local utils = require "utils"
local backend = require "backend"
local requests = require "requests"
local syscnf = require "syscnf"
local uri = require "uri"
local uci_ok, uci = pcall(require, "uci")
module "sandbox"
......@@ -52,7 +51,9 @@ local updater_features = utils.arr2set({
'conflicts',
'abi_change',
'abi_change_deep',
'replan_string'
'replan_string',
'relative_uri',
'no_returns'
})
-- Available functions and "constants" from global environment
......@@ -91,8 +92,8 @@ local local_available_funcs = {
local rest_additional_funcs = {
{"version_match", backend.version_match},
{"version_cmp", backend.version_cmp},
{"system_cas", uri.system_cas},
{"no_crl", uri.no_crl}
{"system_cas", true},
{"no_crl", false}
}
state_vars = nil
......@@ -251,7 +252,7 @@ List the variable names here. This way we ensure they are actually set in case
they are nil. This helps in testing and also ensures some other global variable
isn't mistaken for the actual value that isn't available.
]]
for _, name in pairs({'root_dir', 'model', 'board_name', 'turris_version', 'serial', 'architectures', 'installed', 'self_version', 'language_version', 'features'}) do
for _, name in pairs({'root_dir', 'os_release', 'host_os_release', 'architectures', 'installed', 'self_version', 'language_version', 'features'}) do
funcs.Restricted[name] = {
mode = "state",
value = name
......@@ -396,8 +397,7 @@ function run_sandboxed(chunk, name, sec_level, parent, context_merge, context_mo
end
local context = new(sec_level, parent)
utils.table_merge(context, context_merge or {})
context_mod = context_mod or function () end
context_mod(context)
if context_mod then context_mod(context) end
local func = setfenv(chunk, context.env)
local ok, err = pcall(func)
if ok then
......
......@@ -25,7 +25,6 @@ local pcall = pcall
local next = next
local type = type
local assert = assert
local unpack = unpack
local table = table
local string = string
local events_wait = events_wait
......@@ -36,112 +35,84 @@ local ERROR = ERROR
local utils = require "utils"
local backend = require "backend"
local requests = require "requests"
local uri = require "uri"
module "postprocess"
-- luacheck: globals get_repos deps_canon conflicts_canon available_packages pkg_aggregate run sort_candidates
function get_repos()
DBG("Getting repos")
--[[
The repository index downloads are already in progress since
the repository objects have been created. We now register
callback for the arrival of data. This might happen right
away or later on. Anyway, after we wait, all the indices
have been downloaded.
When we get each index, we detect if the data is gzipped
or not. If it is not, the repository is parsed right away.
If it is, extraction is run in the background and parsing
is scheduled for once it finishes. Eventually, we wait for
all the extractions to finish, and at that point everything
is parsed.
]]
local uris = {} -- The uris we wait for to be downloaded
local extract_events = {} -- The extractions we wait for
local errors = {} -- Collect errors as we go
local fatal = false -- Are any of them a reason to abort?
--[[
We don't care about the order in which we register the callbacks
(which may be different from the order in which they are called
anyway).
]]
for _, repo in pairs(requests.known_repositories_all) do
repo.tp = 'parsed-repository'
repo.content = {}
for subrepo, index_uri in pairs(utils.private(repo).index_uri) do
local name = repo.name .. "/" .. index_uri.uri
table.insert(uris, index_uri)
local function broken(why, extra)
ERROR("Index " .. name .. " is broken (" .. why .. "): " .. tostring(extra))
extra.why = why
extra.repo = name
repo.content[subrepo] = extra
table.insert(errors, extra)
fatal = fatal or not utils.arr2set(repo.ignore or {})[why]
end
local function parse(content)
DBG("Parsing index " .. name)
local ok, list = pcall(backend.repo_parse, content)
if ok then
for _, pkg in pairs(list) do
-- Compute the URI of each package (but don't download it yet, so don't create the uri object)
pkg.uri_raw = repo.repo_uri .. subrepo .. '/' .. pkg.Filename
pkg.repo = repo
end
repo.content[subrepo] = {
tp = "pkg-list",
list = list
}
else
broken('syntax', utils.exception('repo broken', "Couldn't parse the index of " .. name .. ": " .. tostring(list)))
end
end
local function decompressed(ecode, _, stdout, stderr)
DBG("Decompression of " .. name .. " done")
if ecode == 0 then
parse(stdout)
else
broken('syntax', utils.exception('repo broken', "Couldn't decompress " .. name .. ": " .. stderr))
local function repo_parse(repo)
repo.tp = 'parsed-repository'
repo.content = {}
local name = repo.name .. "/" .. repo.index_uri:uri()
-- Get index
local index = repo.index_uri:finish() -- TODO error?
if index:sub(1, 2) == string.char(0x1F, 0x8B) then -- compressed index
DBG("Decompressing index " .. name)
local extr = run_util(function (ecode, _, stdout, stderr)
if ecode ~= 0 then
error(utils.exception('repo broken', "Couldn't decompress " .. name .. ": " .. stderr))
end
index = stdout
end
local function downloaded(ok, answer)
DBG("Received repository index " .. name)
if not ok then
-- Couldn't download
-- TODO: Once we have validation, this could also mean the integrity is broken, not download
broken('missing', answer)
elseif answer:sub(1, 2) == string.char(0x1F, 0x8B) then
-- It starts with gzip magic - we want to decompress it
DBG("Index " .. name .. " is compressed, decompressing")
table.insert(extract_events, run_util(decompressed, nil, answer, -1, -1, 'gzip', '-dc'))
else
parse(answer)
end
, nil, index, -1, -1, 'gzip', '-dc')
events_wait(extr)
end
-- Parse index
DBG("Parsing index " .. name)
local ok, list = pcall(backend.repo_parse, index)
if not ok then
local msg = "Couldn't parse the index of " .. name .. ": " .. tostring(list)
if not repo.optional then
error(utils.exception('syntax', msg))
end
WARN(msg)
-- TODO we might want to ignore this repository in its fulles instead of this
end
for _, pkg in pairs(list) do
-- Compute the URI of each package (but don't download it yet, so don't create the uri object)
pkg.uri_raw = repo.repo_uri .. '/' .. pkg.Filename
pkg.repo = repo
end
repo.content = list
end
local function repos_failed_download(uri_fail)
-- Locate failed repository and check if we can continue
for _, repo in pairs(requests.known_repositories) do
if uri_fail == repo.index_uri then
local message = "Download failed for repository index " ..
repo.name .. " (" .. repo.index_uri:uri() .. "): " ..
tostring(repo.index_uri:download_error())
if not repo.optional then
error(utils.exception('repo missing', message))
end
index_uri:cback(downloaded)
WARN(message)
repo.tp = 'failed-repository'
break
end
--[[
We no longer need to keep the uris in there, we
wait for them here and after all is done, we want
the contents to be garbage collected.
]]
utils.private(repo).index_uri = nil
end
-- Make sure everything is downloaded
uri.wait(unpack(uris))
-- And extracted
events_wait(unpack(extract_events))
-- Process any errors
local multi = utils.exception('multiple', "Multiple exceptions (" .. #errors .. ")")
multi.errors = errors
if fatal then
error(multi)
elseif next(errors) then
return multi
else
return nil
end
function get_repos()
DBG("Downloading repositories indexes")
-- Run download
while true do
local uri_fail = requests.repositories_uri_master:download()
if uri_fail then
repos_failed_download(uri_fail)
else
break
end
end
-- Collect indexes and parse them
for _, repo in pairs(requests.known_repositories) do
if repo.tp == 'repository' then -- ignore failed repositories
local ok, err = pcall(repo_parse, repo)
if not ok then
-- TODO is this fatal?
error(err)
end
end
end
end
......@@ -332,21 +303,20 @@ to form single package object.
]]
function pkg_aggregate()
DBG("Aggregating packages together")
for _, repo in pairs(requests.known_repositories_all) do
for _, cont in pairs(repo.content) do
if type(cont) == 'table' and cont.tp == 'pkg-list' then
for name, candidate in pairs(cont.list) do
if not available_packages[name] then
available_packages[name] = {candidates = {}, modifiers = {}}
end
table.insert(available_packages[name].candidates, candidate)
if candidate.Provides then -- Add this candidate to package it provides
for p in candidate.Provides:gmatch("[^, ]+") do
if not available_packages[p] then
available_packages[p] = {candidates = {}, modifiers = {}}
end
table.insert(available_packages[p].candidates, candidate)
for _, repo in pairs(requests.known_repositories) do
if repo.tp == "parsed-repository" then
-- TODO this content design is invalid as there can be multiple packages of same name in same repository with different versions
for name, candidate in pairs(repo.content) do
if not available_packages[name] then
available_packages[name] = {candidates = {}, modifiers = {}}
end
table.insert(available_packages[name].candidates, candidate)
if candidate.Provides then -- Add this candidate to package it provides
for p in candidate.Provides:gmatch("[^, ]+") do
if not available_packages[p] then
available_packages[p] = {candidates = {}, modifiers = {}}
end
table.insert(available_packages[p].candidates, candidate)
end
end
end
......@@ -467,10 +437,7 @@ function pkg_aggregate()
end
function run()
local repo_errors = get_repos()
if repo_errors then
WARN("Not all repositories are available")
end
get_repos()
pkg_aggregate()
end
......
......@@ -266,7 +266,7 @@ local function sat_build(sat, pkgs, requests)
}
-- Go trough requests and add them to SAT
for _, req in ipairs(requests) do
if not pkgs[req.package.name] and not utils.arr2set(req.ignore or {})["missing"] then
if not pkgs[req.package.name] and not req.optional then
error(utils.exception('inconsistent', "Requested package " .. req.package.name .. " doesn't exists."))
end
local req_var = sat:var()
......@@ -393,7 +393,7 @@ local function build_plan(pkgs, requests, sat, satmap)
inwstack[name] = #wstack + 1 -- Signal that we are working on this package group.
table.insert(wstack, name)
for _, p in pkg_dep_iterate(utils.multi_index(pkg, 'modifier', 'deps') or {}) do -- plan package group dependencies
pkg_plan(p, ignore_missing or utils.arr2set(utils.multi_index(pkg, 'modifier', 'ignore') or {})["deps"], false, "Package " .. name .. " requires package")
pkg_plan(p, ignore_missing or utils.multi_index(pkg, 'modifier', 'optional'), false, "Package " .. name .. " requires package")
end
if not next(candidates) then return end -- We have no candidate, but we passed previous check because it's virtual
local r = {}
......@@ -406,7 +406,7 @@ local function build_plan(pkgs, requests, sat, satmap)
else
no_pkg_candidate = false
for _, p in pkg_dep_iterate(utils.multi_index(candidate, 'deps') or {}) do
pkg_plan(p, ignore_missing or utils.arr2set(utils.multi_index(pkg, 'modifier', 'ignore') or {})["deps"], false, "Package " .. name .. " requires package")
pkg_plan(p, ignore_missing or utils.multi_index(pkg, 'modifier', 'optional'), false, "Package " .. name .. " requires package")
end
end
end
......@@ -439,7 +439,7 @@ local function build_plan(pkgs, requests, sat, satmap)
for _, req in pairs(requests) do
if sat[satmap.req2sat[req]] then -- Plan only if we can satisfy given request
if req.tp == "install" then -- And if it is install request, uninstall requests are resolved by not being planned.
local pln = pkg_plan(req.package, false, utils.arr2set(req.ignore or {})["missing"], 'Requested package')
local pln = pkg_plan(req.package, false, req.optional, 'Requested package')
-- Note that if pln is nil than we ignored missing package. We have to compute with that here
if pln then
if req.reinstall then
......
......@@ -20,18 +20,19 @@ along with Updater. If not, see <http://www.gnu.org/licenses/>.
local next = next
local error = error
local ipairs = ipairs
local pcall = pcall
local table = table
local WARN = WARN
local INFO = INFO
local DIE = DIE
local md5 = md5
local md5_file = md5_file
local sha256_file = sha256_file
local sha256 = sha256
local reexec = reexec
local LS_CONF = LS_CONF
local LS_PLAN = LS_PLAN
local LS_DOWN = LS_DOWN
local update_state = update_state
local log_event = log_event
local utils = require "utils"
local syscnf = require "syscnf"
local sandbox = require "sandbox"
......@@ -56,16 +57,13 @@ end
local function required_pkgs(entrypoint)
-- Get the top-level script
local tlc = sandbox.new('Full')
local ep_uri = uri(tlc, entrypoint)
local ok, tls = ep_uri:get()
if not ok then error(tls) end
local entry_chunk, entry_uri = utils.uri_content(entrypoint, nil, {})
local merge = {
-- Note: See requests.script for usage of this value
["parent_script_uri"] = entry_uri
}
update_state(LS_CONF)
--[[
Run the top level script with full privileges.
The script shall be part of updater anyway.
]]
local err = sandbox.run_sandboxed(tls, "", 'Full')
local err = sandbox.run_sandboxed(entry_chunk, entrypoint, 'Full', nil, merge)
if err and err.tp == 'error' then error(err) end
update_state(LS_PLAN)
-- Go through all the requirements and decide what we need
......@@ -102,44 +100,41 @@ end
function tasks_to_transaction()
INFO("Downloading packages")
update_state(LS_DOWN)
utils.mkdirp(syscnf.pkg_download_dir)
-- Start packages download
local uri_master = uri:new()
for _, task in ipairs(tasks) do
if task.action == "require" then
-- Strip sig verification off, packages from repos don't have their own .sig
-- files, but they are checked by hashes in the (already checked) index.
local veriopts = utils.shallow_copy(task.package.repo)
local veri = veriopts.verification or utils.private(task.package.repo).context.verification or 'both'
if veri == 'both' then
veriopts.verification = 'cert'
elseif veri == 'sig' then
veriopts.verification = 'none'
end
task.real_uri = uri(utils.private(task.package.repo).context, task.package.uri_raw, veriopts)
task.real_uri:cback(function()
log_event('D', task.name .. " " .. task.package.Version)
end)
task.file = syscnf.pkg_download_dir .. task.name .. '-' .. task.package.Version .. '.ipk'
task.real_uri = uri_master:to_file(task.package.Filename, task.file, task.package.repo.index_uri)
task.real_uri:add_pubkey() -- do not verify signatures (there are none)
-- TODO on failure: log_event('D', task.name .. " " .. task.package.Version)
end
end
local failed_uri = uri_master:download()
if failed_uri then
error(utils.exception("download",
"Download of " .. failed_uri:uri() .. " failed: " .. failed_uri:download_error()))
end
-- Now push all data into the transaction
utils.mkdirp(syscnf.pkg_download_dir)
for _, task in ipairs(tasks) do
if task.action == "require" then
local ok, data = task.real_uri:get()
if not ok then error(data) end
local ok, err = pcall(function() task.real_uri:finish() end)
if not ok then error(err) end
if task.package.MD5Sum then
local sum = md5(data)
local sum = md5_file(task.file)
if sum ~= task.package.MD5Sum then
error(utils.exception("corruption", "The md5 sum of " .. task.name .. " does not match"))
end
end
if task.package.SHA256Sum then
local sum = sha256(data)
local sum = sha256_file(task.file)
if sum ~= task.package.SHA256Sum then
error(utils.exception("corruption", "The sha256 sum of " .. task.name .. " does not match"))
end
end
local fpath = syscnf.pkg_download_dir .. task.name .. '-' .. task.package.Version .. '.ipk'
utils.write_file(fpath, data)
transaction.queue_install_downloaded(fpath, task.name, task.package.Version, task.modifier)
transaction.queue_install_downloaded(task.file, task.name, task.package.Version, task.modifier)
elseif task.action == "remove" then
transaction.queue_remove(task.name)
else
......
......@@ -259,7 +259,7 @@ Test the chain of functions ‒ unpack, examine
]]
function test_pkg_unpack()
syscnf.set_root_dir(tmpdir)
local path = B.pkg_unpack(datadir .. "updater.ipk")
local path = B.pkg_unpack(datadir .. "repo/updater.ipk")
-- Make sure it is deleted on teardown
table.insert(tmp_dirs, path)
-- Check list of extracted files
......@@ -911,7 +911,7 @@ function test_config_modified()
-- If a file doesn't exist, it returns nil
assert_nil(B.config_modified("/file/does/not/exist", "12345678901234567890123456789012"))
-- We test on a non-config file, but it the same.
local file = (os.getenv("S") or ".") .. "/tests/data/updater.ipk"
local file = (os.getenv("S") or ".") .. "/tests/data/repo/updater.ipk"
assert_false(B.config_modified(file, "182171ccacfc32a9f684479509ac471a"))
assert(B.config_modified(file, "282171ccacfc32a9f684479509ac471b"))
assert_false(B.config_modified(file, "4f54362b30f53ae6862b11ff34d22a8d4510ed2b3e757b1f285dbd1033666e55"))
......
Package: 6in4
Version: 21-2
Depends: libc, kmod-sit
Source: package/network/ipv6/6in4
License: GPL-2.0
Section: net
Maintainer: Jo-Philipp Wich <xm@subsignal.org>
Architecture: all
Installed-Size: 1558
Filename: 6in4_21-2_all.ipk
Size: 2534
MD5Sum: a2a58a05c002cf7b45fbe364794d96a5
SHA256sum: 06c3e5630a54a6c2d95ff13945b76e4122ac1a9e533fe4665c501ae26d55933d
Description: Provides support for 6in4 tunnels in /etc/config/network.
Refer to http://wiki.openwrt.org/doc/uci/network for
configuration details.
Package: 6rd
Version: 9-2
Depends: libc, kmod-sit
Source: package/network/ipv6/6rd
License: GPL-2.0
Section: net
Maintainer: Steven Barth <cyrus@openwrt.org>
Architecture: all
Installed-Size: 3432
Filename: 6rd_9-2_all.ipk
Size: 4416
MD5Sum: 2b46cba96c887754f879676be77615e5
SHA256sum: e1081e495d0055f962a0ea4710239447eabf596f7acb06ccf0bd6f06b125fda8
Description: Provides support for 6rd tunnels in /etc/config/network.
Refer to http://wiki.openwrt.org/doc/uci/network for
configuration details.
......@@ -469,7 +469,7 @@ function test_missing_ignore_deps()
tp = 'package',
name = 'pkg',
},
ignore = {'missing'},
optional = true,
priority = 50,
}
}
......@@ -1834,7 +1834,7 @@ function test_request_no_candidate_ignore()
tp = 'package',
name = 'pkg',
},
ignore = {'missing'},
optional = true,
priority = 50,
}
}
......@@ -1863,7 +1863,7 @@ function test_missing_install()
tp = 'package',
name = 'pkg2'
},
ignore = {'missing'},
optional = true,
priority = 50,
}
}
......@@ -1885,7 +1885,7 @@ function test_missing_dep_ignore()
pkg1 = {
candidates = {{Package = 'pkg1', deps = 'pkg2', repo = def_repo}},
modifier = {
ignore = {"deps"}
optional = true
},
name = "pkg1"
}
......@@ -1906,7 +1906,7 @@ function test_missing_dep_ignore()
action = "require",
package = {Package = 'pkg1', deps = 'pkg2', repo = def_repo},
modifier = {
ignore = {"deps"}
optional = true
},
critical = false,
name = "pkg1"
......
This diff is collapsed.
......@@ -29,86 +29,74 @@ module("requests-tests", package.seeall, lunit.testcase)
local tmp_dirs = {}
local sandbox_fun_i = 0
local function run_sandbox_fun(func_code, level)
local chunk = "result = " .. func_code
local env
local result = sandbox.run_sandboxed(chunk, "Function chunk" .. tostring(sandbox_fun_i), level or "Restricted", nil, nil, function (context)
env = context.env
end)
sandbox_fun_i = sandbox_fun_i + 1
assert_equal("context", result.tp, result.msg)
return env.result
end
function test_package()
local p1 = run_sandbox_fun("Package('pkg_name')")
assert_table_equal({
tp = "package",
name = "pkg_name"
}, p1)
local p2 = run_sandbox_fun("Package('pkg_name', {replan = true, reboot = 'delayed', priority = 42})")
local result = sandbox.run_sandboxed([[
Package('pkg_name')
Package('pkg_name', {replan = true, reboot = 'delayed', priority = 42})
]], "test_package_chunk", "Restricted")
assert_equal("context", result.tp, result.msg)
assert_table_equal({
tp = "package",
name = "pkg_name",
replan = true,
reboot = "delayed",
priority = 42
}, p2)
assert_table_equal({p1, p2}, requests.known_packages)
{
tp = "package",
name = "pkg_name"
},
{
tp = "package",
name = "pkg_name",
replan = true,
reboot = "delayed",
priority = 42
}
}, requests.known_packages)
end
function test_repository()
requests.repo_serial = 1
local r1 = run_sandbox_fun("Repository('test-repo', 'http://example.org/repo')")
assert_table_equal({
tp = "repository",
name = "test-repo",
repo_uri = "http://example.org/repo",
priority = 50,
serial = 1
}, r1)
utils.private(r1).context = nil
assert_table_equal({
index_uri = {[""] = {u = "http://example.org/repo/Packages.gz"}}
}, utils.private(r1))
local r2 = run_sandbox_fun("Repository('test-repo-2', 'http://example.org/repo-2', {subdirs = {'a', 'b'}, priority = 60})")
assert_table_equal({
tp = "repository",
name = "test-repo-2",
repo_uri = "http://example.org/repo-2",
subdirs = {'a', 'b'},
priority = 60,
serial = 2
}, r2)
utils.private(r2).context = nil
assert_table_equal({
index_uri = {["/a"] = {u = "http://example.org/repo-2/a/Packages.gz"}, ["/b"] = {u = "http://example.org/repo-2/b/Packages.gz"}}
}, utils.private(r2))
local r3 = run_sandbox_fun("Repository('test-repo-other', 'http://example.org/repo-other', {index = 'https://example.org/repo-other/Packages.gz'})")
assert_table_equal({
tp = "repository",
name = "test-repo-other",
repo_uri = "http://example.org/repo-other",
index = "https://example.org/repo-other/Packages.gz",
priority = 50,
serial = 3
}, r3)
utils.private(r3).context = nil
assert_table_equal({