Verified Commit 9926663d authored by Karel Koci's avatar Karel Koci 🤘

Integrate new URI implementation

This commit changes a lot of functionality including user visible ones
such as configuration language.

The most significant from users point is that in configuration language
functions no longer return handlers for configuration they created. This
is to simplify implementation. The correct approach is to use package or
repository name instead of handler. Handler usage was less powerful and
because of sandboxing also unusable across multiple scripts.

There are additional changes in form of obsoleted extra options for
configuration commands. Repository's extra option subdirs is obsoleted
and works differently and much more. Please review changes in language
documentation file.
parent a718f158
This diff is collapsed.
...@@ -35,10 +35,11 @@ local mkdir = mkdir ...@@ -35,10 +35,11 @@ local mkdir = mkdir
local stat = stat local stat = stat
local events_wait = events_wait local events_wait = events_wait
local run_util = run_util local run_util = run_util
local uri = require "uri"
module "utils" module "utils"
-- luacheck: globals lines2set map set2arr arr2set cleanup_dirs dir_ensure mkdirp read_file write_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay randstr arr_prune arr_inv file_exists -- luacheck: globals lines2set map set2arr arr2set cleanup_dirs dir_ensure mkdirp read_file write_file clone shallow_copy table_merge arr_append exception multi_index private filter_best strip table_overlay table_wrap randstr arr_prune arr_inv file_exists uri_syste_cas uri_no_crl uri_config uri_content
--[[ --[[
Convert provided text into set of lines. Doesn't care about the order. Convert provided text into set of lines. Doesn't care about the order.
...@@ -357,6 +358,18 @@ function table_overlay(table) ...@@ -357,6 +358,18 @@ function table_overlay(table)
}) })
end end
--[[
This function returns always table. If input is not table then it is placed to
table. If input is table then it is returned as is.
]]
function table_wrap(table)
if type(table) == "table" then
return table
else
return {table}
end
end
--[[ --[[
Check whether file exists Check whether file exists
]] ]]
...@@ -370,4 +383,51 @@ function file_exists(name) ...@@ -370,4 +383,51 @@ function file_exists(name)
end end
end end
--[[
This function applies given table of configuration to given uri object.
This is here because we need bridge between old approach of using lua tables and
approach of inherited settings in uri object.
For full support of all fields see language documentation, section Verification.
Any field that is not set in table is ignored (configuration is not changed).
]]
function uri_config(uriobj, config)
-- TODO and how about veri?
if config.ca ~= nil then
uriobj:set_ssl_verify(config.ca)
uriobj:add_ca(nil)
for ca in pairs(table_wrap(config.ca)) do
uriobj:add_ca(ca)
end
end
if config.crl ~= nil then
uriobj:add_crl(nil)
for crl in pairs(table_wrap(config.crl)) do
uriobj:add_crl(crl)
end
end
if config.ocsp ~= nil then
uriobj:set_ocsp(config.ocsp)
end
if config.pubkey ~= nil then
uriobj:add_pubkey(nil)
for pubkey in pairs(table_wrap(config.pubkey)) do
uriobj:add_pubkey(pubkey)
end
end
if config.sig ~= nil then
uriobj:set_sig(config.sig)
end
end
-- Get content of given URI
-- It returns downloaded content as first argument and uri object as second (which
-- can be used as a parent to other uris)
function uri_content(struri, parent, config)
local master = uri.new()
local u = master:to_buffer(struri, parent)
uri_config(u, config)
-- TODO finish error and others?
return u:finish(), u
end
return _M return _M
This diff is collapsed.
...@@ -38,7 +38,6 @@ local utils = require "utils" ...@@ -38,7 +38,6 @@ local utils = require "utils"
local backend = require "backend" local backend = require "backend"
local requests = require "requests" local requests = require "requests"
local syscnf = require "syscnf" local syscnf = require "syscnf"
local uri = require "uri"
local uci_ok, uci = pcall(require, "uci") local uci_ok, uci = pcall(require, "uci")
module "sandbox" module "sandbox"
...@@ -52,7 +51,8 @@ local updater_features = utils.arr2set({ ...@@ -52,7 +51,8 @@ local updater_features = utils.arr2set({
'conflicts', 'conflicts',
'abi_change', 'abi_change',
'abi_change_deep', 'abi_change_deep',
'replan_string' 'replan_string',
'no_returns'
}) })
-- Available functions and "constants" from global environment -- Available functions and "constants" from global environment
...@@ -91,8 +91,8 @@ local local_available_funcs = { ...@@ -91,8 +91,8 @@ local local_available_funcs = {
local rest_additional_funcs = { local rest_additional_funcs = {
{"version_match", backend.version_match}, {"version_match", backend.version_match},
{"version_cmp", backend.version_cmp}, {"version_cmp", backend.version_cmp},
{"system_cas", uri.system_cas}, {"system_cas", true},
{"no_crl", uri.no_crl} {"no_crl", false}
} }
state_vars = nil state_vars = nil
...@@ -396,8 +396,7 @@ function run_sandboxed(chunk, name, sec_level, parent, context_merge, context_mo ...@@ -396,8 +396,7 @@ function run_sandboxed(chunk, name, sec_level, parent, context_merge, context_mo
end end
local context = new(sec_level, parent) local context = new(sec_level, parent)
utils.table_merge(context, context_merge or {}) utils.table_merge(context, context_merge or {})
context_mod = context_mod or function () end if context_mod then context_mod(context) end
context_mod(context)
local func = setfenv(chunk, context.env) local func = setfenv(chunk, context.env)
local ok, err = pcall(func) local ok, err = pcall(func)
if ok then if ok then
......
...@@ -25,7 +25,6 @@ local pcall = pcall ...@@ -25,7 +25,6 @@ local pcall = pcall
local next = next local next = next
local type = type local type = type
local assert = assert local assert = assert
local unpack = unpack
local table = table local table = table
local string = string local string = string
local events_wait = events_wait local events_wait = events_wait
...@@ -36,112 +35,84 @@ local ERROR = ERROR ...@@ -36,112 +35,84 @@ local ERROR = ERROR
local utils = require "utils" local utils = require "utils"
local backend = require "backend" local backend = require "backend"
local requests = require "requests" local requests = require "requests"
local uri = require "uri"
module "postprocess" module "postprocess"
-- luacheck: globals get_repos deps_canon conflicts_canon available_packages pkg_aggregate run sort_candidates -- luacheck: globals get_repos deps_canon conflicts_canon available_packages pkg_aggregate run sort_candidates
function get_repos() local function repo_parse(repo)
DBG("Getting repos") repo.tp = 'parsed-repository'
--[[ repo.content = {}
The repository index downloads are already in progress since local name = repo.name .. "/" .. repo.index_uri:uri()
the repository objects have been created. We now register -- Get index
callback for the arrival of data. This might happen right local index = repo.index_uri:finish() -- TODO error?
away or later on. Anyway, after we wait, all the indices if index:sub(1, 2) == string.char(0x1F, 0x8B) then -- copressed index
have been downloaded. DBG("Decompressing index " .. name)
local extr = run_util(function (ecode, _, stdout, stderr)
When we get each index, we detect if the data is gzipped if ecode ~= 0 then
or not. If it is not, the repository is parsed right away. error(utils.exception('repo broken', "Couldn't decompress " .. name .. ": " .. stderr))
If it is, extraction is run in the background and parsing
is scheduled for once it finishes. Eventually, we wait for
all the extractions to finish, and at that point everything
is parsed.
]]
local uris = {} -- The uris we wait for to be downloaded
local extract_events = {} -- The extractions we wait for
local errors = {} -- Collect errors as we go
local fatal = false -- Are any of them a reason to abort?
--[[
We don't care about the order in which we register the callbacks
(which may be different from the order in which they are called
anyway).
]]
for _, repo in pairs(requests.known_repositories_all) do
repo.tp = 'parsed-repository'
repo.content = {}
for subrepo, index_uri in pairs(utils.private(repo).index_uri) do
local name = repo.name .. "/" .. index_uri.uri
table.insert(uris, index_uri)
local function broken(why, extra)
ERROR("Index " .. name .. " is broken (" .. why .. "): " .. tostring(extra))
extra.why = why
extra.repo = name
repo.content[subrepo] = extra
table.insert(errors, extra)
fatal = fatal or not utils.arr2set(repo.ignore or {})[why]
end
local function parse(content)
DBG("Parsing index " .. name)
local ok, list = pcall(backend.repo_parse, content)
if ok then
for _, pkg in pairs(list) do
-- Compute the URI of each package (but don't download it yet, so don't create the uri object)
pkg.uri_raw = repo.repo_uri .. subrepo .. '/' .. pkg.Filename
pkg.repo = repo
end
repo.content[subrepo] = {
tp = "pkg-list",
list = list
}
else
broken('syntax', utils.exception('repo broken', "Couldn't parse the index of " .. name .. ": " .. tostring(list)))
end
end
local function decompressed(ecode, _, stdout, stderr)
DBG("Decompression of " .. name .. " done")
if ecode == 0 then
parse(stdout)
else
broken('syntax', utils.exception('repo broken', "Couldn't decompress " .. name .. ": " .. stderr))
end end
index = stdout
end end
local function downloaded(ok, answer) , nil, index, -1, -1, 'gzip', '-dc')
DBG("Received repository index " .. name) events_wait(extr)
if not ok then end
-- Couldn't download -- Parse index
-- TODO: Once we have validation, this could also mean the integrity is broken, not download DBG("Parsing index " .. name)
broken('missing', answer) local ok, list = pcall(backend.repo_parse, index)
elseif answer:sub(1, 2) == string.char(0x1F, 0x8B) then if not ok then
-- It starts with gzip magic - we want to decompress it local msg = "Couldn't parse the index of " .. name .. ": " .. tostring(list)
DBG("Index " .. name .. " is compressed, decompressing") if not repo.optional then
table.insert(extract_events, run_util(decompressed, nil, answer, -1, -1, 'gzip', '-dc')) error(utils.exception('syntax', msg))
else end
parse(answer) WARN(msg)
end -- TODO we might want to ignore this repository in its fulles instead of this
end
for _, pkg in pairs(list) do
-- Compute the URI of each package (but don't download it yet, so don't create the uri object)
pkg.uri_raw = repo.repo_uri .. '/' .. pkg.Filename
pkg.repo = repo
end
repo.content = list
end
local function repos_failed_download(uri_fail)
-- Locate failed repository and check if we can continue
for _, repo in pairs(requests.known_repositories) do
if uri_fail == repo.index_uri then
local message = "Download failed for repository index " ..
repo.name .. " (" .. repo.index_uri:uri() .. "): " ..
tostring(repo.index_uri:download_error())
if not repo.optional then
error(utils.exception('repo missing', message))
end end
index_uri:cback(downloaded) WARN(message)
repo.tp = 'failed-repository'
break
end end
--[[
We no longer need to keep the uris in there, we
wait for them here and after all is done, we want
the contents to be garbage collected.
]]
utils.private(repo).index_uri = nil
end end
-- Make sure everything is downloaded end
uri.wait(unpack(uris))
-- And extracted function get_repos()
events_wait(unpack(extract_events)) DBG("Downloading repositories indexes")
-- Process any errors -- Run download
local multi = utils.exception('multiple', "Multiple exceptions (" .. #errors .. ")") while true do
multi.errors = errors local uri_fail = requests.repositories_uri_master:download()
if fatal then if uri_fail then
error(multi) repos_failed_download(uri_fail)
elseif next(errors) then else
return multi break
else end
return nil end
-- Collect indexes and parse them
for _, repo in pairs(requests.known_repositories) do
if repo.tp == 'repository' then -- ignore failed repositories
local ok, err = pcall(repo_parse, repo)
if not ok then
-- TODO is this fatal?
error(err)
end
end
end end
end end
...@@ -332,21 +303,20 @@ to form single package object. ...@@ -332,21 +303,20 @@ to form single package object.
]] ]]
function pkg_aggregate() function pkg_aggregate()
DBG("Aggregating packages together") DBG("Aggregating packages together")
for _, repo in pairs(requests.known_repositories_all) do for _, repo in pairs(requests.known_repositories) do
for _, cont in pairs(repo.content) do if repo.tp == "parsed-repository" then
if type(cont) == 'table' and cont.tp == 'pkg-list' then -- TODO this content design is invalid as there can be multiple packages of same name in same repository with different versions
for name, candidate in pairs(cont.list) do for name, candidate in pairs(repo.content) do
if not available_packages[name] then if not available_packages[name] then
available_packages[name] = {candidates = {}, modifiers = {}} available_packages[name] = {candidates = {}, modifiers = {}}
end end
table.insert(available_packages[name].candidates, candidate) table.insert(available_packages[name].candidates, candidate)
if candidate.Provides then -- Add this candidate to package it provides if candidate.Provides then -- Add this candidate to package it provides
for p in candidate.Provides:gmatch("[^, ]+") do for p in candidate.Provides:gmatch("[^, ]+") do
if not available_packages[p] then if not available_packages[p] then
available_packages[p] = {candidates = {}, modifiers = {}} available_packages[p] = {candidates = {}, modifiers = {}}
end
table.insert(available_packages[p].candidates, candidate)
end end
table.insert(available_packages[p].candidates, candidate)
end end
end end
end end
...@@ -467,10 +437,7 @@ function pkg_aggregate() ...@@ -467,10 +437,7 @@ function pkg_aggregate()
end end
function run() function run()
local repo_errors = get_repos() get_repos()
if repo_errors then
WARN("Not all repositories are available")
end
pkg_aggregate() pkg_aggregate()
end end
......
...@@ -261,7 +261,7 @@ local function sat_build(sat, pkgs, requests) ...@@ -261,7 +261,7 @@ local function sat_build(sat, pkgs, requests)
} }
-- Go trough requests and add them to SAT -- Go trough requests and add them to SAT
for _, req in ipairs(requests) do for _, req in ipairs(requests) do
if not pkgs[req.package.name] and not utils.arr2set(req.ignore or {})["missing"] then if not pkgs[req.package.name] and not req.optional then
error(utils.exception('inconsistent', "Requested package " .. req.package.name .. " doesn't exists.")) error(utils.exception('inconsistent', "Requested package " .. req.package.name .. " doesn't exists."))
end end
local req_var = sat:var() local req_var = sat:var()
...@@ -388,7 +388,7 @@ local function build_plan(pkgs, requests, sat, satmap) ...@@ -388,7 +388,7 @@ local function build_plan(pkgs, requests, sat, satmap)
inwstack[name] = #wstack + 1 -- Signal that we are working on this package group. inwstack[name] = #wstack + 1 -- Signal that we are working on this package group.
table.insert(wstack, name) table.insert(wstack, name)
for _, p in pkg_dep_iterate(utils.multi_index(pkg, 'modifier', 'deps') or {}) do -- plan package group dependencies for _, p in pkg_dep_iterate(utils.multi_index(pkg, 'modifier', 'deps') or {}) do -- plan package group dependencies
pkg_plan(p, ignore_missing or utils.arr2set(utils.multi_index(pkg, 'modifier', 'ignore') or {})["deps"], false, "Package " .. name .. " requires package") pkg_plan(p, ignore_missing or utils.multi_index(pkg, 'modifier', 'optional'), false, "Package " .. name .. " requires package")
end end
if not next(candidates) then return end -- We have no candidate, but we passed previous check because it's virtual if not next(candidates) then return end -- We have no candidate, but we passed previous check because it's virtual
local r = {} local r = {}
...@@ -401,7 +401,7 @@ local function build_plan(pkgs, requests, sat, satmap) ...@@ -401,7 +401,7 @@ local function build_plan(pkgs, requests, sat, satmap)
else else
no_pkg_candidate = false no_pkg_candidate = false
for _, p in pkg_dep_iterate(utils.multi_index(candidate, 'deps') or {}) do for _, p in pkg_dep_iterate(utils.multi_index(candidate, 'deps') or {}) do
pkg_plan(p, ignore_missing or utils.arr2set(utils.multi_index(pkg, 'modifier', 'ignore') or {})["deps"], false, "Package " .. name .. " requires package") pkg_plan(p, ignore_missing or utils.multi_index(pkg, 'modifier', 'optional'), false, "Package " .. name .. " requires package")
end end
end end
end end
...@@ -434,7 +434,7 @@ local function build_plan(pkgs, requests, sat, satmap) ...@@ -434,7 +434,7 @@ local function build_plan(pkgs, requests, sat, satmap)
for _, req in pairs(requests) do for _, req in pairs(requests) do
if sat[satmap.req2sat[req]] then -- Plan only if we can satisfy given request if sat[satmap.req2sat[req]] then -- Plan only if we can satisfy given request
if req.tp == "install" then -- And if it is install request, uninstall requests are resolved by not being planned. if req.tp == "install" then -- And if it is install request, uninstall requests are resolved by not being planned.
local pln = pkg_plan(req.package, false, utils.arr2set(req.ignore or {})["missing"], 'Requested package') local pln = pkg_plan(req.package, false, req.optional, 'Requested package')
-- Note that if pln is nil than we ignored missing package. We have to compute with that here -- Note that if pln is nil than we ignored missing package. We have to compute with that here
if pln then if pln then
if req.reinstall then if req.reinstall then
......
...@@ -20,18 +20,17 @@ along with Updater. If not, see <http://www.gnu.org/licenses/>. ...@@ -20,18 +20,17 @@ along with Updater. If not, see <http://www.gnu.org/licenses/>.
local next = next local next = next
local error = error local error = error
local ipairs = ipairs local ipairs = ipairs
local pcall = pcall
local table = table local table = table
local WARN = WARN local WARN = WARN
local INFO = INFO local INFO = INFO
local DIE = DIE local DIE = DIE
local md5 = md5
local sha256 = sha256 local sha256 = sha256
local reexec = reexec local reexec = reexec
local LS_CONF = LS_CONF local LS_CONF = LS_CONF
local LS_PLAN = LS_PLAN local LS_PLAN = LS_PLAN
local LS_DOWN = LS_DOWN local LS_DOWN = LS_DOWN
local update_state = update_state local update_state = update_state
local log_event = log_event
local utils = require "utils" local utils = require "utils"
local syscnf = require "syscnf" local syscnf = require "syscnf"
local sandbox = require "sandbox" local sandbox = require "sandbox"
...@@ -56,16 +55,13 @@ end ...@@ -56,16 +55,13 @@ end
local function required_pkgs(entrypoint) local function required_pkgs(entrypoint)
-- Get the top-level script -- Get the top-level script
local tlc = sandbox.new('Full') local entry_chunk, entry_uri = utils.uri_content(entrypoint, nil, {})
local ep_uri = uri(tlc, entrypoint) local merge = {
local ok, tls = ep_uri:get() -- Note: See requests.script for usage of this value
if not ok then error(tls) end ["parent_script_uri"] = entry_uri
}
update_state(LS_CONF) update_state(LS_CONF)
--[[ local err = sandbox.run_sandboxed(entry_chunk, entrypoint, 'Full', nil, merge)
Run the top level script with full privileges.
The script shall be part of updater anyway.
]]
local err = sandbox.run_sandboxed(tls, "", 'Full')
if err and err.tp == 'error' then error(err) end if err and err.tp == 'error' then error(err) end
update_state(LS_PLAN) update_state(LS_PLAN)
-- Go through all the requirements and decide what we need -- Go through all the requirements and decide what we need
...@@ -103,28 +99,24 @@ function tasks_to_transaction() ...@@ -103,28 +99,24 @@ function tasks_to_transaction()
INFO("Downloading packages") INFO("Downloading packages")
update_state(LS_DOWN) update_state(LS_DOWN)
-- Start packages download -- Start packages download
local uri_master = uri:new()
for _, task in ipairs(tasks) do for _, task in ipairs(tasks) do
if task.action == "require" then if task.action == "require" then
-- Strip sig verification off, packages from repos don't have their own .sig task.file = syscnf.pkg_download_dir .. task.name .. '-' .. task.package.Version .. '.ipk'
-- files, but they are checked by hashes in the (already checked) index. task.real_uri = uri_master:to_file(task.package.Filename, task.file, task.package.repo.index_uri)
local veriopts = utils.shallow_copy(task.package.repo) task.real_uri:add_pubkey() -- do not verify signatures (there are none)
local veri = veriopts.verification or utils.private(task.package.repo).context.verification or 'both' -- TODO on failure: log_event('D', task.name .. " " .. task.package.Version)
if veri == 'both' then
veriopts.verification = 'cert'
elseif veri == 'sig' then
veriopts.verification = 'none'
end
task.real_uri = uri(utils.private(task.package.repo).context, task.package.uri_raw, veriopts)
task.real_uri:cback(function()
log_event('D', task.name .. " " .. task.package.Version)
end)
end end
end end
uri_master:download() -- TODO what if error?
-- Now push all data into the transaction -- Now push all data into the transaction
utils.mkdirp(syscnf.pkg_download_dir)
for _, task in ipairs(tasks) do for _, task in ipairs(tasks) do
if task.action == "require" then if task.action == "require" then
local ok, data = task.real_uri:get() local ok, err = pcall(function() task.real_uri:finish() end)
if not ok then error(data) end if not ok then error(err) end
-- TODO check hash
--[[
if task.package.MD5Sum then if task.package.MD5Sum then
local sum = md5(data) local sum = md5(data)
if sum ~= task.package.MD5Sum then if sum ~= task.package.MD5Sum then
...@@ -137,9 +129,8 @@ function tasks_to_transaction() ...@@ -137,9 +129,8 @@ function tasks_to_transaction()
error(utils.exception("corruption", "The sha256 sum of " .. task.name .. " does not match")) error(utils.exception("corruption", "The sha256 sum of " .. task.name .. " does not match"))
end end
end end
local fpath = syscnf.pkg_download_dir .. task.name .. '-' .. task.package.Version .. '.ipk' ]]
utils.write_file(fpath, data) transaction.queue_install_downloaded(task.file, task.name, task.package.Version, task.modifier)
transaction.queue_install_downloaded(fpath, task.name, task.package.Version, task.modifier)
elseif task.action == "remove" then elseif task.action == "remove" then
transaction.queue_remove(task.name) transaction.queue_remove(task.name)
else else
......
...@@ -259,7 +259,7 @@ Test the chain of functions ‒ unpack, examine ...@@ -259,7 +259,7 @@ Test the chain of functions ‒ unpack, examine
]] ]]
function test_pkg_unpack() function test_pkg_unpack()
syscnf.set_root_dir(tmpdir) syscnf.set_root_dir(tmpdir)
local path = B.pkg_unpack(datadir .. "updater.ipk") local path = B.pkg_unpack(datadir .. "repo/updater.ipk")
-- Make sure it is deleted on teardown -- Make sure it is deleted on teardown
table.insert(tmp_dirs, path) table.insert(tmp_dirs, path)
-- Check list of extracted files -- Check list of extracted files
...@@ -902,7 +902,7 @@ function test_config_modified() ...@@ -902,7 +902,7 @@ function test_config_modified()
-- If a file doesn't exist, it returns nil -- If a file doesn't exist, it returns nil
assert_nil(B.config_modified("/file/does/not/exist", "12345678901234567890123456789012")) assert_nil(B.config_modified("/file/does/not/exist", "12345678901234567890123456789012"))
-- We test on a non-config file, but it the same. -- We test on a non-config file, but it the same.
local file = (os.getenv("S") or ".") .. "/tests/data/updater.ipk" local file = (os.getenv("S") or ".") .. "/tests/data/repo/updater.ipk"
assert_false(B.config_modified(file, "182171ccacfc32a9f684479509ac471a")) assert_false(B.config_modified(file, "182171ccacfc32a9f684479509ac471a"))
assert(B.config_modified(file, "282171ccacfc32a9f684479509ac471b")) assert(B.config_modified(file, "282171ccacfc32a9f684479509ac471b"))
assert_false(B.config_modified(file, "4f54362b30f53ae6862b11ff34d22a8d4510ed2b3e757b1f285dbd1033666e55")) assert_false(B.config_modified(file, "4f54362b30f53ae6862b11ff34d22a8d4510ed2b3e757b1f285dbd1033666e55"))
......
Package: 6in4
Version: 21-2
Depends: libc, kmod-sit
Source: package/network/ipv6/6in4
License: GPL-2.0
Section: net
Maintainer: Jo-Philipp Wich <xm@subsignal.org>
Architecture: all
Installed-Size: 1558
Filename: 6in4_21-2_all.ipk
Size: 2534
MD5Sum: a2a58a05c002cf7b45fbe364794d96a5
SHA256sum: 06c3e5630a54a6c2d95ff13945b76e4122ac1a9e533fe4665c501ae26d55933d
Description: Provides support for 6in4 tunnels in /etc/config/network.
Refer to http://wiki.openwrt.org/doc/uci/network for
configuration details.
Package: 6rd
Version: 9-2
Depends: libc, kmod-sit
Source: package/network/ipv6/6rd
License: GPL-2.0
Section: net
Maintainer: Steven Barth <cyrus@openwrt.org>
Architecture: all
Installed-Size: 3432
Filename: 6rd_9-2_all.ipk
Size: 4416
MD5Sum: 2b46cba96c887754f879676be77615e5
SHA256sum: e1081e495d0055f962a0ea4710239447eabf596f7acb06ccf0bd6f06b125fda8
Description: Provides support for 6rd tunnels in /etc/config/network.
Refer to http://wiki.openwrt.org/doc/uci/network for
configuration details.
...@@ -469,7 +469,7 @@ function test_missing_ignore_deps() ...@@ -469,7 +469,7 @@ function test_missing_ignore_deps()
tp = 'package', tp = 'package',
name = 'pkg', name = 'pkg',
}, },
ignore = {'missing'}, optional = true,
priority = 50, priority = 50,
} }
} }
...@@ -1834,7 +1834,7 @@ function test_request_no_candidate_ignore() ...@@ -1834,7 +1834,7 @@ function test_request_no_candidate_ignore()
tp = 'package', tp = 'package',
name = 'pkg', name = 'pkg',
}, },
ignore = {'missing'}, optional = true,
priority = 50, priority = 50,
} }
} }
...@@ -1863,7 +1863,7 @@ function test_missing_install() ...@@ -1863,7 +1863,7 @@ function test_missing_install()
tp = 'package', tp = 'package',
name = 'pkg2' name = 'pkg2'
}, },
ignore = {'missing'}, optional = true,
priority = 50, priority = 50,
} }
} }
...@@ -1885,7 +1885,7 @@ function test_missing_dep_ignore() ...@@ -1885,7 +1885,7 @@ function test_missing_dep_ignore()
pkg1 = { pkg1 = {
candidates = {{Package = 'pkg1', deps = 'pkg2', repo = def_repo}}, candidates = {{Package = 'pkg1', deps = 'pkg2', repo = def_repo}},
modifier = { modifier = {
ignore = {"deps"} optional = true
}, },
name = "pkg1" name = "pkg1"
} }
...@@ -1906,7 +1906,7 @@ function test_missing_dep_ignore() ...@@ -1906,7 +1906,7 @@ function test_missing_dep_ignore()
action = "require", action = "require",
package = {Package = 'pkg1', deps = 'pkg2', repo = def_repo}, package = {Package = 'pkg1', deps = 'pkg2', repo = def_repo},
modifier = { modifier = {
ignore = {"deps"} optional = true
}, },
critical = false, critical = false,
name = "pkg1" name = "pkg1"
......