feat: init commit
This commit is contained in:
1
CODEOWNERS
Normal file
1
CODEOWNERS
Normal file
@@ -0,0 +1 @@
|
|||||||
|
* @kubewarden/kubewarden-developers
|
||||||
45
Cargo.toml
Normal file
45
Cargo.toml
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
[package]
|
||||||
|
name = "policy-evaluator"
|
||||||
|
version = "0.6.0"
|
||||||
|
authors = [
|
||||||
|
"Flavio Castelli <fcastelli@suse.com>",
|
||||||
|
"Rafael Fernández López <rfernandezlopez@suse.com>"
|
||||||
|
]
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[workspace]
|
||||||
|
members = [
|
||||||
|
"crates/burrego"
|
||||||
|
]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1.0"
|
||||||
|
base64 = "0.21"
|
||||||
|
burrego = { path = "crates/burrego" }
|
||||||
|
cached = "0.42"
|
||||||
|
dns-lookup = "1.0"
|
||||||
|
json-patch = "0.3"
|
||||||
|
kube = { version = "0.78.0", default-features = false, features = ["client", "rustls-tls"] }
|
||||||
|
k8s-openapi = { version = "0.17.0", default-features = false }
|
||||||
|
kubewarden-policy-sdk = "0.8.7"
|
||||||
|
itertools = "0.10"
|
||||||
|
lazy_static = "1.4"
|
||||||
|
policy-fetcher = { git = "https://github.com/kubewarden/policy-fetcher", tag = "v0.7.17" }
|
||||||
|
serde_json = "1.0"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
sha2 = "0.10"
|
||||||
|
tokio = { version = "^1", features = ["rt", "rt-multi-thread"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-futures = "0.2"
|
||||||
|
url = { version = "2.2", features = ["serde"] }
|
||||||
|
validator = { version = "0.16", features = ["derive"] }
|
||||||
|
wasmparser = "0.96"
|
||||||
|
wapc = "1.0.0"
|
||||||
|
wasmtime-provider = { version = "1.3.3", features = ["cache"] }
|
||||||
|
picky = { version = "7.0.0-rc.3", default-features = false, features = [ "x509", "ec", "chrono_conversion" ] }
|
||||||
|
chrono = "0.4.23"
|
||||||
|
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
assert-json-diff = "2.0"
|
||||||
|
k8s-openapi = { version = "0.17.0", default-features = false, features = ["v1_24"] }
|
||||||
201
LICENSE
Normal file
201
LICENSE
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
25
Makefile
Normal file
25
Makefile
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
KUBE_API_VERSION?=1.24
|
||||||
|
|
||||||
|
.PHONY: build
|
||||||
|
build:
|
||||||
|
K8S_OPENAPI_ENABLED_VERSION=$(KUBE_API_VERSION) cargo build --release
|
||||||
|
|
||||||
|
.PHONY: fmt
|
||||||
|
fmt:
|
||||||
|
K8S_OPENAPI_ENABLED_VERSION=$(KUBE_API_VERSION) cargo fmt --all -- --check
|
||||||
|
|
||||||
|
.PHONY: lint
|
||||||
|
lint:
|
||||||
|
K8S_OPENAPI_ENABLED_VERSION=$(KUBE_API_VERSION) cargo clippy --workspace -- -D warnings
|
||||||
|
|
||||||
|
.PHONY: check
|
||||||
|
check:
|
||||||
|
K8S_OPENAPI_ENABLED_VERSION=$(KUBE_API_VERSION) cargo check
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
|
test: fmt lint
|
||||||
|
cargo test --workspace
|
||||||
|
|
||||||
|
.PHONY: clean
|
||||||
|
clean:
|
||||||
|
cargo clean
|
||||||
4
README.md
Normal file
4
README.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# policy-evaluator
|
||||||
|
|
||||||
|
Crate used by Kubewarden that is able to evaluate policies with a
|
||||||
|
given input, request to evaluate and settings.
|
||||||
77
crates/burrego/.github/workflows/tests.yml
vendored
Normal file
77
crates/burrego/.github/workflows/tests.yml
vendored
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
name: Continuous integration
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Check
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: check
|
||||||
|
|
||||||
|
test:
|
||||||
|
name: Test Suite
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --workspace
|
||||||
|
|
||||||
|
fmt:
|
||||||
|
name: Rustfmt
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- run: rustup component add rustfmt
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: fmt
|
||||||
|
args: --all -- --check
|
||||||
|
|
||||||
|
clippy:
|
||||||
|
name: Clippy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- run: rustup component add clippy
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: clippy
|
||||||
|
args: -- -D warnings
|
||||||
|
|
||||||
|
e2e:
|
||||||
|
name: e2e tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Setup BATS
|
||||||
|
uses: mig4/setup-bats@v1
|
||||||
|
with:
|
||||||
|
bats-version: 1.2.1
|
||||||
|
- name: run e2e tests
|
||||||
|
run: make e2e-test
|
||||||
3
crates/burrego/.gitignore
vendored
Normal file
3
crates/burrego/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
target
|
||||||
|
*.wasm
|
||||||
|
*.tar.gz
|
||||||
32
crates/burrego/Cargo.toml
Normal file
32
crates/burrego/Cargo.toml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
[package]
|
||||||
|
name = "burrego"
|
||||||
|
version = "0.3.1"
|
||||||
|
authors = ["Flavio Castelli <fcastelli@suse.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
base64 = "0.21.0"
|
||||||
|
chrono = "0.4.23"
|
||||||
|
chrono-tz = "0.8.1"
|
||||||
|
gtmpl = "0.7.1"
|
||||||
|
gtmpl_value = "0.5.1"
|
||||||
|
itertools = "0.10.5"
|
||||||
|
json-patch = "0.3.0"
|
||||||
|
lazy_static = "1.4.0"
|
||||||
|
regex = "1.5.6"
|
||||||
|
semver = "1.0.16"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0.91"
|
||||||
|
serde_yaml = "0.9.16"
|
||||||
|
thiserror = "1.0"
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version= "0.3", features = ["fmt", "env-filter"] }
|
||||||
|
url = "2.2.2"
|
||||||
|
wasmtime = "4.0"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
anyhow = "1.0"
|
||||||
|
assert-json-diff = "2.0.2"
|
||||||
|
clap = { version = "4.0", features = [ "derive" ] }
|
||||||
24
crates/burrego/Makefile
Normal file
24
crates/burrego/Makefile
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
TESTDIRS := $(wildcard test_data/*)
|
||||||
|
.PHONY: $(TESTDIRS)
|
||||||
|
|
||||||
|
.PHONY: fmt
|
||||||
|
fmt:
|
||||||
|
cargo fmt --all -- --check
|
||||||
|
|
||||||
|
.PHONY: lint
|
||||||
|
lint:
|
||||||
|
cargo clippy -- -D warnings
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
|
test: fmt lint e2e-tests
|
||||||
|
cargo test
|
||||||
|
|
||||||
|
.PHONY: clean
|
||||||
|
clean:
|
||||||
|
cargo clean
|
||||||
|
|
||||||
|
|
||||||
|
.PHONY: e2e-tests
|
||||||
|
e2e-tests: $(TESTDIRS)
|
||||||
|
$(TESTDIRS):
|
||||||
|
$(MAKE) -C $@
|
||||||
2
crates/burrego/examples/.gitignore
vendored
Normal file
2
crates/burrego/examples/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
*.tar.gz
|
||||||
|
*.wasm
|
||||||
139
crates/burrego/examples/cli/main.rs
Normal file
139
crates/burrego/examples/cli/main.rs
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
use std::{fs::File, io::BufReader, path::PathBuf, process};
|
||||||
|
|
||||||
|
use tracing::debug;
|
||||||
|
use tracing_subscriber::prelude::*;
|
||||||
|
use tracing_subscriber::{fmt, EnvFilter};
|
||||||
|
|
||||||
|
extern crate burrego;
|
||||||
|
|
||||||
|
extern crate clap;
|
||||||
|
use clap::Parser;
|
||||||
|
|
||||||
|
#[derive(clap::Parser, Debug)]
|
||||||
|
#[clap(author, version, about, long_about = None)]
|
||||||
|
pub(crate) struct Cli {
|
||||||
|
/// Enable verbose mode
|
||||||
|
#[clap(short, long, value_parser)]
|
||||||
|
verbose: bool,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(clap::Subcommand, Debug)]
|
||||||
|
pub(crate) enum Commands {
|
||||||
|
/// Evaluate a Rego policy compiled to WebAssembly
|
||||||
|
Eval {
|
||||||
|
/// JSON string with the input
|
||||||
|
#[clap(short, long, value_name = "JSON", value_parser)]
|
||||||
|
input: Option<String>,
|
||||||
|
|
||||||
|
/// Path to file containing the JSON input
|
||||||
|
#[clap(long, value_name = "JSON_FILE", value_parser)]
|
||||||
|
input_path: Option<String>,
|
||||||
|
|
||||||
|
/// JSON string with the data
|
||||||
|
#[clap(short, long, value_name = "JSON", default_value = "{}", value_parser)]
|
||||||
|
data: String,
|
||||||
|
|
||||||
|
/// OPA entrypoint to evaluate
|
||||||
|
#[clap(
|
||||||
|
short,
|
||||||
|
long,
|
||||||
|
value_name = "ENTRYPOINT_ID",
|
||||||
|
default_value = "0",
|
||||||
|
value_parser
|
||||||
|
)]
|
||||||
|
entrypoint: String,
|
||||||
|
|
||||||
|
/// Path to WebAssembly module to load
|
||||||
|
#[clap(value_parser, value_name = "WASM_FILE", value_parser)]
|
||||||
|
policy: String,
|
||||||
|
},
|
||||||
|
/// List the supported builtins
|
||||||
|
Builtins,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
// setup logging
|
||||||
|
let level_filter = if cli.verbose { "debug" } else { "info" };
|
||||||
|
let filter_layer = EnvFilter::new(level_filter)
|
||||||
|
.add_directive("wasmtime_cranelift=off".parse().unwrap()) // this crate generates lots of tracing events we don't care about
|
||||||
|
.add_directive("cranelift_codegen=off".parse().unwrap()) // this crate generates lots of tracing events we don't care about
|
||||||
|
.add_directive("cranelift_wasm=off".parse().unwrap()) // this crate generates lots of tracing events we don't care about
|
||||||
|
.add_directive("regalloc=off".parse().unwrap()); // this crate generates lots of tracing events we don't care about
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(filter_layer)
|
||||||
|
.with(fmt::layer().with_writer(std::io::stderr))
|
||||||
|
.init();
|
||||||
|
|
||||||
|
match &cli.command {
|
||||||
|
Commands::Builtins => {
|
||||||
|
println!("These are the OPA builtins currently supported:");
|
||||||
|
for b in burrego::Evaluator::implemented_builtins() {
|
||||||
|
println!(" - {}", b);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Commands::Eval {
|
||||||
|
input,
|
||||||
|
input_path,
|
||||||
|
data,
|
||||||
|
entrypoint,
|
||||||
|
policy,
|
||||||
|
} => {
|
||||||
|
if input.is_some() && input_path.is_some() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Cannot use 'input' and 'input-path' at the same time"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let input_value: serde_json::Value = if let Some(input_json) = input {
|
||||||
|
serde_json::from_str(&input_json)
|
||||||
|
.map_err(|e| anyhow!("Cannot parse input: {:?}", e))?
|
||||||
|
} else if let Some(input_filename) = input_path {
|
||||||
|
let file = File::open(input_filename)
|
||||||
|
.map_err(|e| anyhow!("Cannot read input file: {:?}", e))?;
|
||||||
|
let reader = BufReader::new(file);
|
||||||
|
serde_json::from_reader(reader)?
|
||||||
|
} else {
|
||||||
|
json!({})
|
||||||
|
};
|
||||||
|
|
||||||
|
let data_value: serde_json::Value =
|
||||||
|
serde_json::from_str(&data).map_err(|e| anyhow!("Cannot parse data: {:?}", e))?;
|
||||||
|
let mut evaluator = burrego::EvaluatorBuilder::default()
|
||||||
|
.policy_path(&PathBuf::from(policy))
|
||||||
|
.host_callbacks(burrego::HostCallbacks::default())
|
||||||
|
.build()?;
|
||||||
|
|
||||||
|
let (major, minor) = evaluator.opa_abi_version()?;
|
||||||
|
debug!(major, minor, "OPA Wasm ABI");
|
||||||
|
|
||||||
|
let entrypoints = evaluator.entrypoints()?;
|
||||||
|
debug!(?entrypoints, "OPA entrypoints");
|
||||||
|
|
||||||
|
let not_implemented_builtins = evaluator.not_implemented_builtins()?;
|
||||||
|
if !not_implemented_builtins.is_empty() {
|
||||||
|
eprintln!("Cannot evaluate policy, these builtins are not yet implemented:");
|
||||||
|
for b in not_implemented_builtins {
|
||||||
|
eprintln!(" - {}", b);
|
||||||
|
}
|
||||||
|
process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let entrypoint_id = match entrypoint.parse() {
|
||||||
|
Ok(id) => id,
|
||||||
|
_ => evaluator.entrypoint_id(&String::from(entrypoint))?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let evaluation_res = evaluator.evaluate(entrypoint_id, &input_value, &data_value)?;
|
||||||
|
println!("{}", serde_json::to_string_pretty(&evaluation_res)?);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
crates/burrego/examples/gatekeeper/Makefile
Normal file
12
crates/burrego/examples/gatekeeper/Makefile
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
SOURCES=$(shell find . -name "*.rego")
|
||||||
|
OBJECTS=$(SOURCES:%.rego=%.wasm)
|
||||||
|
|
||||||
|
all: $(OBJECTS)
|
||||||
|
|
||||||
|
%.wasm: %.rego
|
||||||
|
opa build -t wasm -e policy/violation -o $*.tar.gz $<
|
||||||
|
tar -xf $*.tar.gz --transform "s|policy.wasm|$*.wasm|" /policy.wasm
|
||||||
|
rm $*.tar.gz
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -f *.wasm *.tar.gz
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
package policy
|
||||||
|
|
||||||
|
violation[{"msg": msg}] {
|
||||||
|
object_namespace := input.review.object.metadata.namespace
|
||||||
|
satisfied := [allowed_namespace | namespace = input.parameters.allowed_namespaces[_]; allowed_namespace = object_namespace == namespace]
|
||||||
|
not any(satisfied)
|
||||||
|
msg := sprintf("object created under an invalid namespace %s; allowed namespaces are %v", [object_namespace, input.parameters.allowed_namespaces])
|
||||||
|
}
|
||||||
6
crates/burrego/examples/gatekeeper/always-accept.rego
Normal file
6
crates/burrego/examples/gatekeeper/always-accept.rego
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package policy
|
||||||
|
|
||||||
|
violation[{"msg": msg}] {
|
||||||
|
false
|
||||||
|
msg := ""
|
||||||
|
}
|
||||||
5
crates/burrego/examples/gatekeeper/always-reject.rego
Normal file
5
crates/burrego/examples/gatekeeper/always-reject.rego
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
package policy
|
||||||
|
|
||||||
|
violation[{"msg": msg}] {
|
||||||
|
msg := "this is not allowed"
|
||||||
|
}
|
||||||
12
crates/burrego/examples/opa/Makefile
Normal file
12
crates/burrego/examples/opa/Makefile
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
SOURCES=$(shell find . -name "*.rego")
|
||||||
|
OBJECTS=$(SOURCES:%.rego=%.wasm)
|
||||||
|
|
||||||
|
all: $(OBJECTS)
|
||||||
|
|
||||||
|
%.wasm: %.rego
|
||||||
|
opa build -t wasm -e policy/main utility/policy.rego -o $*.tar.gz $<
|
||||||
|
tar -xf $*.tar.gz --transform "s|policy.wasm|$*.wasm|" /policy.wasm
|
||||||
|
rm $*.tar.gz
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -f *.wasm *.tar.gz
|
||||||
8
crates/burrego/examples/opa/accept-in-namespaces.rego
Normal file
8
crates/burrego/examples/opa/accept-in-namespaces.rego
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
package kubernetes.admission
|
||||||
|
|
||||||
|
deny[msg] {
|
||||||
|
object_namespace := input.request.object.metadata.namespace
|
||||||
|
satisfied := [allowed_namespace | namespace = data.allowed_namespaces[_]; allowed_namespace = object_namespace == namespace]
|
||||||
|
not any(satisfied)
|
||||||
|
msg := sprintf("object created under an invalid namespace %s; allowed namespaces are %v", [object_namespace, data.allowed_namespaces])
|
||||||
|
}
|
||||||
6
crates/burrego/examples/opa/always-accept.rego
Normal file
6
crates/burrego/examples/opa/always-accept.rego
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package kubernetes.admission
|
||||||
|
|
||||||
|
deny[msg] {
|
||||||
|
false
|
||||||
|
msg := ""
|
||||||
|
}
|
||||||
5
crates/burrego/examples/opa/always-reject.rego
Normal file
5
crates/burrego/examples/opa/always-reject.rego
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
package kubernetes.admission
|
||||||
|
|
||||||
|
deny[msg] {
|
||||||
|
msg := "this is not allowed"
|
||||||
|
}
|
||||||
8
crates/burrego/examples/opa/no-default-namespace.rego
Normal file
8
crates/burrego/examples/opa/no-default-namespace.rego
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
package kubernetes.admission
|
||||||
|
|
||||||
|
# RBAC alone would suffice here, but we create a policy just to show
|
||||||
|
# how it can be done as well.
|
||||||
|
deny[msg] {
|
||||||
|
input.request.object.metadata.namespace == "default"
|
||||||
|
msg := "you cannot use the default namespace"
|
||||||
|
}
|
||||||
12
crates/burrego/examples/opa/utility/README.md
Normal file
12
crates/burrego/examples/opa/utility/README.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Open Policy Agent utility
|
||||||
|
|
||||||
|
This folder contains the entry point for Open Policy Agent policies.
|
||||||
|
|
||||||
|
Since Open Policy Agent policies have to produce an `AdmissionReview`
|
||||||
|
object, this utility library contains the Rego entry point that
|
||||||
|
generates such `AdmissionReview`, based on whether the `deny` query
|
||||||
|
inside the package `kubernetes.admission` (defined by the policy
|
||||||
|
itself) is evaluated to `true`.
|
||||||
|
|
||||||
|
If `deny` evaluates to true, the produced `AdmissionReview` will
|
||||||
|
reject the request. Otherwise, it will be accepted.
|
||||||
23
crates/burrego/examples/opa/utility/policy.rego
Normal file
23
crates/burrego/examples/opa/utility/policy.rego
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
package policy
|
||||||
|
|
||||||
|
import data.kubernetes.admission
|
||||||
|
|
||||||
|
main = {
|
||||||
|
"apiVersion": "admission.k8s.io/v1",
|
||||||
|
"kind": "AdmissionReview",
|
||||||
|
"response": response,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = {
|
||||||
|
"uid": input.request.uid,
|
||||||
|
"allowed": false,
|
||||||
|
"status": {"message": reason},
|
||||||
|
} {
|
||||||
|
reason = concat(", ", admission.deny)
|
||||||
|
reason != ""
|
||||||
|
} else = {
|
||||||
|
"uid": input.request.uid,
|
||||||
|
"allowed": true,
|
||||||
|
} {
|
||||||
|
true
|
||||||
|
}
|
||||||
39
crates/burrego/src/builtins/builtins_helper.rs
Normal file
39
crates/burrego/src/builtins/builtins_helper.rs
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
use super::{get_builtins, BuiltinFunctionsMap};
|
||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use std::sync::RwLock;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub(crate) static ref BUILTINS_HELPER: RwLock<BuiltinsHelper> = {
|
||||||
|
RwLock::new(BuiltinsHelper {
|
||||||
|
builtins: get_builtins(),
|
||||||
|
})
|
||||||
|
};
|
||||||
|
}
|
||||||
|
pub(crate) struct BuiltinsHelper {
|
||||||
|
builtins: BuiltinFunctionsMap,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BuiltinsHelper {
|
||||||
|
pub(crate) fn invoke(
|
||||||
|
&self,
|
||||||
|
builtin_name: &str,
|
||||||
|
args: &[serde_json::Value],
|
||||||
|
) -> Result<serde_json::Value> {
|
||||||
|
let builtin_fn = self
|
||||||
|
.builtins
|
||||||
|
.get(builtin_name)
|
||||||
|
.ok_or_else(|| BurregoError::BuiltinNotImplementedError(builtin_name.to_string()))?;
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
builtin = builtin_name,
|
||||||
|
args = serde_json::to_string(&args)
|
||||||
|
.expect("cannot convert builtins args to JSON")
|
||||||
|
.as_str(),
|
||||||
|
"invoking builtin"
|
||||||
|
);
|
||||||
|
builtin_fn(args)
|
||||||
|
}
|
||||||
|
}
|
||||||
20
crates/burrego/src/builtins/debugging.rs
Normal file
20
crates/burrego/src/builtins/debugging.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(args))]
|
||||||
|
pub fn trace(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "trace".to_string(),
|
||||||
|
message: "Wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let message_str = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "trace".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
tracing::debug!("{}", message_str);
|
||||||
|
|
||||||
|
Ok(serde_json::Value::Null)
|
||||||
|
}
|
||||||
583
crates/burrego/src/builtins/encoding.rs
Normal file
583
crates/burrego/src/builtins/encoding.rs
Normal file
@@ -0,0 +1,583 @@
|
|||||||
|
pub mod base64url {
|
||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use base64::{engine::general_purpose, Engine as _};
|
||||||
|
|
||||||
|
/// A base64 engine that uses URL_SAFE alphabet and escapes using no padding
|
||||||
|
/// For performance reasons, it's recommended to cache its creation
|
||||||
|
pub const BASE64_ENGINE: general_purpose::GeneralPurpose =
|
||||||
|
general_purpose::GeneralPurpose::new(&base64::alphabet::URL_SAFE, general_purpose::NO_PAD);
|
||||||
|
|
||||||
|
pub fn encode_no_pad(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "base64url.encode_no_pad".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "base64url.encode_no_pad".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let res = BASE64_ENGINE.encode(input);
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "base64url.encode_no_pad".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_no_pad() {
|
||||||
|
let input = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = encode_no_pad(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
let actual = actual.unwrap();
|
||||||
|
assert_eq!(json!(BASE64_ENGINE.encode(input)), actual);
|
||||||
|
|
||||||
|
let engine_with_pad = general_purpose::GeneralPurpose::new(
|
||||||
|
&base64::alphabet::URL_SAFE,
|
||||||
|
general_purpose::PAD,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_ne!(json!(engine_with_pad.encode(input)), actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod urlquery {
|
||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
pub fn encode(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut url =
|
||||||
|
Url::parse("https://example.com/").map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode".to_string(),
|
||||||
|
message: format!("internal error 1 - {:?}", e),
|
||||||
|
})?;
|
||||||
|
url.set_query(Some(format!("input={}", input).as_str()));
|
||||||
|
|
||||||
|
let res = url.query().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode".to_string(),
|
||||||
|
message: "internal error 2".to_string(),
|
||||||
|
})?;
|
||||||
|
let res = res
|
||||||
|
.strip_prefix("input=")
|
||||||
|
.ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode".to_string(),
|
||||||
|
message: "internal error 3".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut url =
|
||||||
|
Url::parse("https://example.com/").map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode".to_string(),
|
||||||
|
message: format!("internal error 1 - {:?}", e),
|
||||||
|
})?;
|
||||||
|
url.set_query(Some(format!("input={}", input).as_str()));
|
||||||
|
|
||||||
|
let mut pairs = url.query_pairs();
|
||||||
|
if pairs.count() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode".to_string(),
|
||||||
|
message: "internal error 2".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let (_, value) = pairs.next().unwrap();
|
||||||
|
serde_json::to_value(value).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encode_object(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode_object".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let obj = args[0]
|
||||||
|
.as_object()
|
||||||
|
.ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode_object".to_string(),
|
||||||
|
message: "1st parameter is not an object".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut url =
|
||||||
|
Url::parse("https://example.com/").map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode_object".to_string(),
|
||||||
|
message: format!("internal error 1 - {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut queries: Vec<String> = Vec::new();
|
||||||
|
for (key, value) in obj.iter() {
|
||||||
|
let value_str = value.as_str();
|
||||||
|
if value_str.is_none() {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode_object".to_string(),
|
||||||
|
message: format!("the value of key {} is not a string", key),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
queries.push(format!("{}={}", key, value_str.unwrap()));
|
||||||
|
}
|
||||||
|
url.set_query(Some(queries.join("&").as_str()));
|
||||||
|
|
||||||
|
let res = url.query().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode_object".to_string(),
|
||||||
|
message: "internal error 2".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.encode_object".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode_object(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode_object".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut url =
|
||||||
|
Url::parse("https://example.com/").map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode_object".to_string(),
|
||||||
|
message: format!("internal error 1 - {:?}", e),
|
||||||
|
})?;
|
||||||
|
url.set_query(Some(input));
|
||||||
|
|
||||||
|
let mut res: HashMap<String, String> = HashMap::new();
|
||||||
|
let pairs = url.query_pairs();
|
||||||
|
for (key, value) in pairs {
|
||||||
|
res.insert(String::from(key), String::from(value));
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::to_value(&res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "urlquery.decode_object".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use assert_json_diff::assert_json_eq;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode() {
|
||||||
|
let input = "español";
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = encode(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
let actual = actual.unwrap();
|
||||||
|
assert_eq!(json!("espa%C3%B1ol"), actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode() {
|
||||||
|
let input = "espa%C3%B1ol";
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = decode(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
let actual = actual.unwrap();
|
||||||
|
assert_eq!(json!("español"), actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_object() {
|
||||||
|
let input = json!(
|
||||||
|
{
|
||||||
|
"language": "español",
|
||||||
|
"name": "Rafael Fernández López"
|
||||||
|
});
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = encode_object(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
assert_json_eq!(
|
||||||
|
json!("language=espa%C3%B1ol&name=Rafael%20Fern%C3%A1ndez%20L%C3%B3pez"),
|
||||||
|
actual.unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode_object_does_not_have_string_values() {
|
||||||
|
let input = json!(
|
||||||
|
{
|
||||||
|
"language": "español",
|
||||||
|
"name": "Rafael Fernández López",
|
||||||
|
"awesomeness": 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = encode_object(&args);
|
||||||
|
assert!(actual.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode_object() {
|
||||||
|
let expected = json!(
|
||||||
|
{
|
||||||
|
"language": "español",
|
||||||
|
"name": "Rafael Fernández López"
|
||||||
|
});
|
||||||
|
let input = json!("language=espa%C3%B1ol&name=Rafael%20Fern%C3%A1ndez%20L%C3%B3pez");
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = decode_object(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_json_eq!(expected, actual.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod json {
|
||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
|
||||||
|
pub fn is_valid(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "json.is_valid".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "json.is_valid".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let v: serde_json::Result<serde_json::Value> = serde_json::from_str(input);
|
||||||
|
let res = v.is_ok();
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "json.is_valid".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use assert_json_diff::assert_json_eq;
|
||||||
|
use serde_json::json;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_valid() {
|
||||||
|
let mut cases: HashMap<String, bool> = HashMap::new();
|
||||||
|
cases.insert(String::from("[1,2]"), true);
|
||||||
|
cases.insert(String::from("[1,2"), false);
|
||||||
|
|
||||||
|
for (input, expected) in cases.iter() {
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = is_valid(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
let actual = actual.unwrap();
|
||||||
|
assert_json_eq!(json!(expected), actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod yaml {
|
||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
|
||||||
|
pub fn marshal(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "yaml.marshal".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input: serde_json::Value = args[0].clone();
|
||||||
|
|
||||||
|
// convert the generic input json value into a generic yaml value
|
||||||
|
let value: serde_yaml::Value =
|
||||||
|
serde_json::from_value(input).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.marshal".to_string(),
|
||||||
|
message: format!(" cannot convert input object to yaml - {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// marshal from yaml to string
|
||||||
|
let res = serde_yaml::to_string(&value).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.marshal".to_string(),
|
||||||
|
|
||||||
|
message: format!("marshal error - {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.marshal".to_string(),
|
||||||
|
message: format!("cannot convert result into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unmarshal(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "yaml.unmarshal".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.unmarshal".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let res: serde_json::Value =
|
||||||
|
serde_yaml::from_str(input).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.unmarshal".to_string(),
|
||||||
|
message: format!("cannot convert input object to json - {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.unmarshal".to_string(),
|
||||||
|
message: format!("cannot convert result into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_valid(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "yaml.is_valid".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.is_valid".to_string(),
|
||||||
|
message: "parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let v: serde_yaml::Result<serde_yaml::Value> = serde_yaml::from_str(input);
|
||||||
|
let res = v.is_ok();
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "yaml.is_valid".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use assert_json_diff::assert_json_eq;
|
||||||
|
use serde_json::json;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_marshal() {
|
||||||
|
let input = json!({
|
||||||
|
"hello": "world",
|
||||||
|
"number": 42,
|
||||||
|
"list": [1,2,3]
|
||||||
|
});
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = marshal(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
let actual_str = actual.unwrap();
|
||||||
|
let actual_json: serde_json::Value =
|
||||||
|
serde_yaml::from_str(actual_str.as_str().unwrap()).unwrap();
|
||||||
|
|
||||||
|
assert_json_eq!(input, actual_json);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unmarshal() {
|
||||||
|
let input_str = r#"---
|
||||||
|
hello: world
|
||||||
|
list:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
number: 42
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let input = json!(input_str);
|
||||||
|
|
||||||
|
let expected = json!({
|
||||||
|
"hello": "world",
|
||||||
|
"number": 42,
|
||||||
|
"list": [1,2,3]
|
||||||
|
});
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = unmarshal(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
let actual = actual.unwrap();
|
||||||
|
assert_json_eq!(json!(expected), actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_valid() {
|
||||||
|
let mut cases: HashMap<String, bool> = HashMap::new();
|
||||||
|
cases.insert(
|
||||||
|
String::from("some_key: [1,2]\nsome_other_key: [3.0, 4.0]"),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
cases.insert(String::from("some_key: [1,2"), false);
|
||||||
|
|
||||||
|
for (input, expected) in cases.iter() {
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = is_valid(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
|
||||||
|
let actual = actual.unwrap();
|
||||||
|
assert_json_eq!(json!(expected), actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod hex {
|
||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use core::num;
|
||||||
|
|
||||||
|
pub fn encode(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "hex.encode".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "hex.encode".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let res: Vec<String> = input
|
||||||
|
.as_bytes()
|
||||||
|
.iter()
|
||||||
|
.map(|v| format!("{:x?}", v))
|
||||||
|
.collect();
|
||||||
|
let res = res.join("");
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "hex.encode".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "hex.decode".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "hex.decode".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let value: std::result::Result<Vec<u8>, num::ParseIntError> = (0..input.len())
|
||||||
|
.step_by(2)
|
||||||
|
.map(|i| u8::from_str_radix(&input[i..i + 2], 16))
|
||||||
|
.collect();
|
||||||
|
let value = value.map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "hex.decode".to_string(),
|
||||||
|
message: format!("cannot parse input - {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let res = String::from_utf8(value).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "hex.decode".to_string(),
|
||||||
|
message: format!("cannot parse string - {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "hex.decode".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode() {
|
||||||
|
let input = "hello";
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = encode(&args);
|
||||||
|
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(json!("68656c6c6f"), actual.unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode() {
|
||||||
|
let input = "68656c6c6f";
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input)];
|
||||||
|
let actual = decode(&args);
|
||||||
|
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(json!("hello"), actual.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
57
crates/burrego/src/builtins/glob.rs
Normal file
57
crates/burrego/src/builtins/glob.rs
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
|
||||||
|
pub fn quote_meta(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "glob.quote_meta".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "glob.quote_meta".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(escape(input)).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "glob.quote_meta".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn escape(s: &str) -> String {
|
||||||
|
let mut escaped = String::new();
|
||||||
|
for c in s.chars() {
|
||||||
|
match c {
|
||||||
|
'*' | '?' | '\\' | '[' | ']' | '{' | '}' => {
|
||||||
|
escaped.push('\\');
|
||||||
|
escaped.push(c);
|
||||||
|
}
|
||||||
|
c => {
|
||||||
|
escaped.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
escaped
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
#[test]
|
||||||
|
fn escape() {
|
||||||
|
assert_eq!(super::escape("*.domain.com"), r"\*.domain.com");
|
||||||
|
|
||||||
|
assert_eq!(super::escape("*.domain-*.com"), r"\*.domain-\*.com");
|
||||||
|
|
||||||
|
assert_eq!(super::escape("domain.com"), r"domain.com");
|
||||||
|
|
||||||
|
assert_eq!(super::escape("domain-[ab].com"), r"domain-\[ab\].com");
|
||||||
|
|
||||||
|
assert_eq!(super::escape("nie?ce"), r"nie\?ce");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
super::escape("some *?\\[]{} text"),
|
||||||
|
"some \\*\\?\\\\\\[\\]\\{\\} text"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
59
crates/burrego/src/builtins/json.rs
Normal file
59
crates/burrego/src/builtins/json.rs
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
|
||||||
|
pub fn patch(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 2 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "json.patch".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if !args[0].is_object() {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "json.patch".to_string(),
|
||||||
|
message: "1st parameter is not an object".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let mut obj = args[0].clone();
|
||||||
|
|
||||||
|
if !args[1].is_array() {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "json.patch".to_string(),
|
||||||
|
message: "2nd parameter is not an array".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let patches_str = serde_json::to_string(&args[1]).map_err(|_| BurregoError::BuiltinError {
|
||||||
|
name: "json.patch".to_string(),
|
||||||
|
message: "cannot convert 2nd parameter to string".to_string(),
|
||||||
|
})?;
|
||||||
|
let patches: json_patch::Patch = serde_json::from_str(&patches_str).unwrap();
|
||||||
|
|
||||||
|
json_patch::patch(&mut obj, &patches).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "json.patch".to_string(),
|
||||||
|
message: format!("cannot apply patch: {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(obj).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "json.patch".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use assert_json_diff::assert_json_eq;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_patch() {
|
||||||
|
let args: Vec<serde_json::Value> = vec![
|
||||||
|
json!({"a": {"foo": 1}}),
|
||||||
|
json!([{"op": "add", "path": "/a/bar", "value": 2}]),
|
||||||
|
];
|
||||||
|
|
||||||
|
let actual = patch(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_json_eq!(json!({"a": {"foo": 1, "bar": 2}}), actual.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
65
crates/burrego/src/builtins/mod.rs
Normal file
65
crates/burrego/src/builtins/mod.rs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
use crate::errors::Result;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub(crate) mod builtins_helper;
|
||||||
|
mod debugging;
|
||||||
|
mod encoding;
|
||||||
|
mod glob;
|
||||||
|
mod json;
|
||||||
|
mod regex;
|
||||||
|
mod semver;
|
||||||
|
mod strings;
|
||||||
|
mod time;
|
||||||
|
|
||||||
|
pub(crate) use builtins_helper::BUILTINS_HELPER;
|
||||||
|
|
||||||
|
pub(crate) type BuiltinFunctionsMap =
|
||||||
|
HashMap<&'static str, fn(&[serde_json::Value]) -> Result<serde_json::Value>>;
|
||||||
|
|
||||||
|
pub fn get_builtins() -> BuiltinFunctionsMap {
|
||||||
|
let mut functions: BuiltinFunctionsMap = HashMap::new();
|
||||||
|
|
||||||
|
// debugging
|
||||||
|
functions.insert("trace", debugging::trace);
|
||||||
|
|
||||||
|
// encoding
|
||||||
|
functions.insert(
|
||||||
|
"base64url.encode_no_pad",
|
||||||
|
encoding::base64url::encode_no_pad,
|
||||||
|
);
|
||||||
|
functions.insert("urlquery.encode", encoding::urlquery::encode);
|
||||||
|
functions.insert("urlquery.decode", encoding::urlquery::decode);
|
||||||
|
functions.insert("urlquery.encode_object", encoding::urlquery::encode_object);
|
||||||
|
functions.insert("urlquery.decode_object", encoding::urlquery::decode_object);
|
||||||
|
functions.insert("json.is_valid", encoding::json::is_valid);
|
||||||
|
functions.insert("yaml.marshal", encoding::yaml::marshal);
|
||||||
|
functions.insert("yaml.unmarshal", encoding::yaml::unmarshal);
|
||||||
|
functions.insert("yaml.is_valid", encoding::yaml::is_valid);
|
||||||
|
functions.insert("hex.encode", encoding::hex::encode);
|
||||||
|
functions.insert("hex.decode", encoding::hex::decode);
|
||||||
|
|
||||||
|
// glob
|
||||||
|
functions.insert("glob.quote_meta", glob::quote_meta);
|
||||||
|
|
||||||
|
// objects
|
||||||
|
functions.insert("json.patch", json::patch);
|
||||||
|
|
||||||
|
// regex
|
||||||
|
functions.insert("regex.split", regex::split);
|
||||||
|
functions.insert("regex.template_match", regex::template_match);
|
||||||
|
functions.insert("regex.find_n", regex::find_n);
|
||||||
|
|
||||||
|
// semver
|
||||||
|
functions.insert("semver.is_valid", semver::is_valid);
|
||||||
|
functions.insert("semver.compare", semver::compare);
|
||||||
|
|
||||||
|
// strings
|
||||||
|
functions.insert("sprintf", strings::sprintf);
|
||||||
|
|
||||||
|
// time
|
||||||
|
functions.insert("time.now_ns", time::now_ns);
|
||||||
|
functions.insert("parse_rfc3339_ns", time::parse_rfc3339_ns);
|
||||||
|
functions.insert("date", time::date);
|
||||||
|
|
||||||
|
functions
|
||||||
|
}
|
||||||
320
crates/burrego/src/builtins/regex.rs
Normal file
320
crates/burrego/src/builtins/regex.rs
Normal file
@@ -0,0 +1,320 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use core::fmt::Display;
|
||||||
|
use regex::{escape as regex_escape, Regex};
|
||||||
|
use std::{fmt, str::FromStr};
|
||||||
|
|
||||||
|
pub fn split(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 2 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "regex.split".to_string(),
|
||||||
|
message: "Wrong number of arguments given".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let pattern_str = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.split".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
let string_str = args[1].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.split".to_string(),
|
||||||
|
message: "2nd parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(
|
||||||
|
Regex::new(pattern_str)
|
||||||
|
.map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "regex.split".to_string(),
|
||||||
|
message: format!(
|
||||||
|
"cannot build regex from the given pattern string '{}': {:?}",
|
||||||
|
pattern_str, e
|
||||||
|
),
|
||||||
|
})?
|
||||||
|
.split(string_str)
|
||||||
|
.collect::<String>(),
|
||||||
|
)
|
||||||
|
.map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "regex.split".to_string(),
|
||||||
|
message: format!("cannot convert result into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn template_match(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 4 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: "Wrong number of arguments given".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let pattern_str = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
let string_str = args[1].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: "2nd parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
let delimiter_start_str = args[2].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: "3rd parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
if delimiter_start_str.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: "3rd parameter has to be exactly one character long".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let delimiter_end_str = args[3].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: "4th parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
if delimiter_end_str.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: "4th parameter has to be exactly one character long".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let computed_regexp = TemplateMatch::regexp_from_template(
|
||||||
|
pattern_str,
|
||||||
|
// safe, since we have ensured that the length is 1
|
||||||
|
delimiter_start_str.chars().next().unwrap(),
|
||||||
|
// safe, since we have ensured that the length is 1
|
||||||
|
delimiter_end_str.chars().next().unwrap(),
|
||||||
|
)?;
|
||||||
|
serde_json::to_value(computed_regexp.is_match(string_str)).map_err(|e| {
|
||||||
|
BurregoError::BuiltinError {
|
||||||
|
name: "regex.template_match".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_n(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 3 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "regex.find_n".to_string(),
|
||||||
|
message: "Wrong number of arguments given to ".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let pattern_str = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.find_n".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
let string_str = args[1].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.find_n".to_string(),
|
||||||
|
message: "2nd parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
let take_number = args[2].as_i64().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "regex.find_n".to_string(),
|
||||||
|
message: "3rd parameter is not a number".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let take_n = if take_number != -1 {
|
||||||
|
take_number as usize
|
||||||
|
} else {
|
||||||
|
Regex::new(pattern_str)
|
||||||
|
.map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "regex.find_n".to_string(),
|
||||||
|
message: format!(
|
||||||
|
"cannot build regex from the given pattern string '{}': {:?}",
|
||||||
|
pattern_str, e
|
||||||
|
),
|
||||||
|
})?
|
||||||
|
.find_iter(string_str)
|
||||||
|
.count()
|
||||||
|
};
|
||||||
|
|
||||||
|
let matches: Vec<String> = Regex::new(pattern_str)
|
||||||
|
.map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "regex.find_n".to_string(),
|
||||||
|
message: format!(
|
||||||
|
"cannot build regex from the given pattern string '{}': {:?}",
|
||||||
|
pattern_str, e
|
||||||
|
),
|
||||||
|
})?
|
||||||
|
.find_iter(string_str)
|
||||||
|
.take(take_n)
|
||||||
|
.map(|match_| String::from(match_.as_str()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
serde_json::to_value(matches).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "regex.find_n".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Expression {
|
||||||
|
is_regexp: bool,
|
||||||
|
expression: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Expression {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
if self.is_regexp {
|
||||||
|
write!(f, "{}", &self.expression)
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", ®ex_escape(&self.expression))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ExpressionList(Vec<Expression>);
|
||||||
|
|
||||||
|
impl Display for ExpressionList {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
for expression in self.0.iter() {
|
||||||
|
write!(f, "{}", expression)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TemplateMatch {}
|
||||||
|
|
||||||
|
impl TemplateMatch {
|
||||||
|
fn regexp_from_template(
|
||||||
|
template: &str,
|
||||||
|
delimiter_start: char,
|
||||||
|
delimiter_end: char,
|
||||||
|
) -> Result<Regex> {
|
||||||
|
let mut expressions = ExpressionList(Vec::new());
|
||||||
|
let mut current_expression = Expression {
|
||||||
|
is_regexp: false,
|
||||||
|
expression: String::new(),
|
||||||
|
};
|
||||||
|
let mut delimiters_open = 0;
|
||||||
|
|
||||||
|
for c in template.chars() {
|
||||||
|
if c == delimiter_start {
|
||||||
|
delimiters_open += 1;
|
||||||
|
if delimiters_open == 1 {
|
||||||
|
if !current_expression.expression.is_empty() {
|
||||||
|
expressions.0.push(current_expression);
|
||||||
|
}
|
||||||
|
current_expression = Expression {
|
||||||
|
is_regexp: true,
|
||||||
|
expression: String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if c == delimiter_end {
|
||||||
|
delimiters_open -= 1;
|
||||||
|
if delimiters_open == 0 {
|
||||||
|
if !current_expression.expression.is_empty() {
|
||||||
|
expressions.0.push(current_expression);
|
||||||
|
}
|
||||||
|
current_expression = Expression {
|
||||||
|
is_regexp: false,
|
||||||
|
expression: String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
current_expression.expression.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !current_expression.expression.is_empty() {
|
||||||
|
expressions.0.push(current_expression);
|
||||||
|
}
|
||||||
|
|
||||||
|
Regex::from_str(&format!("{}", expressions)).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "regex".to_string(),
|
||||||
|
message: format!("tried to initialize an invalid regular expression: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn regex_from_template() -> Result<()> {
|
||||||
|
assert!(
|
||||||
|
TemplateMatch::regexp_from_template("urn:foo:bar:baz", '{', '}',)?
|
||||||
|
.is_match("urn:foo:bar:baz"),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
TemplateMatch::regexp_from_template("urn:foo:{.*}", '{', '}',)?
|
||||||
|
.is_match("urn:foo:bar:baz"),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
TemplateMatch::regexp_from_template("urn:foo:<.*>", '<', '>',)?
|
||||||
|
.is_match("urn:foo:bar:baz"),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
TemplateMatch::regexp_from_template("urn:foo:{.*}", '<', '>',)?
|
||||||
|
.is_match("urn:foo:{.*}"),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(TemplateMatch::regexp_from_template(
|
||||||
|
"urn:foo:test:section-<[0-9]{2}>:alert-<[0-9]{4}>",
|
||||||
|
'<',
|
||||||
|
'>',
|
||||||
|
)?
|
||||||
|
.is_match("urn:foo:test:section-42:alert-1234"),);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn find_n() -> Result<()> {
|
||||||
|
assert_eq!(
|
||||||
|
super::find_n(&vec![
|
||||||
|
serde_json::to_value("a.").unwrap(),
|
||||||
|
serde_json::to_value("paranormal").unwrap(),
|
||||||
|
serde_json::to_value(1).unwrap(),
|
||||||
|
])?
|
||||||
|
.as_array()
|
||||||
|
.unwrap(),
|
||||||
|
&vec!["ar",],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
super::find_n(&vec![
|
||||||
|
serde_json::to_value("a.").unwrap(),
|
||||||
|
serde_json::to_value("paranormal").unwrap(),
|
||||||
|
serde_json::to_value(2).unwrap(),
|
||||||
|
])?
|
||||||
|
.as_array()
|
||||||
|
.unwrap(),
|
||||||
|
&vec!["ar", "an",],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
super::find_n(&vec![
|
||||||
|
serde_json::to_value("a.").unwrap(),
|
||||||
|
serde_json::to_value("paranormal").unwrap(),
|
||||||
|
serde_json::to_value(10).unwrap(),
|
||||||
|
])?
|
||||||
|
.as_array()
|
||||||
|
.unwrap(),
|
||||||
|
&vec!["ar", "an", "al"],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
super::find_n(&vec![
|
||||||
|
serde_json::to_value("a.").unwrap(),
|
||||||
|
serde_json::to_value("paranormal").unwrap(),
|
||||||
|
serde_json::to_value(-1).unwrap(),
|
||||||
|
])?
|
||||||
|
.as_array()
|
||||||
|
.unwrap(),
|
||||||
|
&vec!["ar", "an", "al"],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
super::find_n(&vec![
|
||||||
|
serde_json::to_value("nomatch").unwrap(),
|
||||||
|
serde_json::to_value("paranormal").unwrap(),
|
||||||
|
serde_json::to_value(-1).unwrap(),
|
||||||
|
])?
|
||||||
|
.as_array()
|
||||||
|
.unwrap(),
|
||||||
|
&vec![] as &Vec<String>,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
102
crates/burrego/src/builtins/semver.rs
Normal file
102
crates/burrego/src/builtins/semver.rs
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use semver::Version;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
pub fn is_valid(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "semver.is_valid".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "semver.is_valid".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let valid_version = Version::parse(input).map(|_| true).unwrap_or(false);
|
||||||
|
|
||||||
|
serde_json::to_value(valid_version).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "semver.is_valid".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compare(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 2 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "semver.compare".to_string(),
|
||||||
|
message: "wrong number of arguments".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let version_a = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "semver.compare".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let version_b = args[1].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "semver.compare".to_string(),
|
||||||
|
message: "2nd parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let version_a = Version::parse(version_a).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "semver.compare".to_string(),
|
||||||
|
message: format!("first argument is not a valid semantic version: {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let version_b = Version::parse(version_b).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "semver.compare".to_string(),
|
||||||
|
message: format!("second argument is not a valid semantic version: {:?}", e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let res = match version_a.cmp(&version_b) {
|
||||||
|
Ordering::Less => -1,
|
||||||
|
Ordering::Equal => 0,
|
||||||
|
Ordering::Greater => 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "semver.compare".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_valid() -> Result<()> {
|
||||||
|
assert_eq!(super::is_valid(&[json!("1.0.0")])?, true);
|
||||||
|
assert_eq!(super::is_valid(&[json!("1.0.0-rc1")])?, true);
|
||||||
|
assert_eq!(super::is_valid(&[json!("invalidsemver-1.0.0")])?, false);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn compare() -> Result<()> {
|
||||||
|
assert_eq!(super::compare(&[json!("0.0.1"), json!("0.1.0")])?, -1);
|
||||||
|
assert_eq!(
|
||||||
|
super::compare(&[json!("1.0.0-rc1"), json!("1.0.0-rc1")])?,
|
||||||
|
0
|
||||||
|
);
|
||||||
|
assert_eq!(super::compare(&[json!("0.1.0"), json!("0.0.1")])?, 1);
|
||||||
|
assert_eq!(
|
||||||
|
super::compare(&[json!("1.0.0-beta1"), json!("1.0.0-alpha3")])?,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
super::compare(&[json!("1.0.0-rc2"), json!("1.0.0-rc1")])?,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
assert!(super::compare(&[json!("invalidsemver-1.0.0"), json!("0.1.0")]).is_err());
|
||||||
|
assert!(super::compare(&[json!("0.1.0"), json!("invalidsemver-1.0.0")]).is_err());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
104
crates/burrego/src/builtins/strings.rs
Normal file
104
crates/burrego/src/builtins/strings.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use std::{collections::HashMap, convert::From};
|
||||||
|
|
||||||
|
struct GoTmplValue(gtmpl::Value);
|
||||||
|
|
||||||
|
impl From<serde_json::Value> for GoTmplValue {
|
||||||
|
fn from(value: serde_json::Value) -> Self {
|
||||||
|
match value {
|
||||||
|
serde_json::Value::String(s) => GoTmplValue(gtmpl::Value::String(s)),
|
||||||
|
serde_json::Value::Number(n) => {
|
||||||
|
let n: i64 = n.as_i64().unwrap();
|
||||||
|
let number: gtmpl_value::Number = n.into();
|
||||||
|
GoTmplValue(gtmpl::Value::Number(number))
|
||||||
|
}
|
||||||
|
serde_json::Value::Bool(b) => GoTmplValue(gtmpl::Value::Bool(b)),
|
||||||
|
serde_json::Value::Array(arr) => {
|
||||||
|
let res: Vec<gtmpl::Value> = arr
|
||||||
|
.iter()
|
||||||
|
.map(|i| {
|
||||||
|
let v: GoTmplValue = i.clone().into();
|
||||||
|
v.0
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
GoTmplValue(gtmpl::Value::Array(res))
|
||||||
|
}
|
||||||
|
serde_json::Value::Object(obj) => {
|
||||||
|
let res: HashMap<String, gtmpl::Value> = obj
|
||||||
|
.iter()
|
||||||
|
.map(|(k, v)| {
|
||||||
|
let val: GoTmplValue = v.clone().into();
|
||||||
|
(k.clone(), val.0)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
GoTmplValue(gtmpl::Value::Map(res))
|
||||||
|
}
|
||||||
|
_ => GoTmplValue(gtmpl::Value::Nil),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sprintf(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 2 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "sprintf".to_string(),
|
||||||
|
message: "Wrong number of arguments given".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let fmt_str = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "sprintf".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
let fmt_args: Vec<gtmpl::Value> = args[1]
|
||||||
|
.as_array()
|
||||||
|
.ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "sprintf".to_string(),
|
||||||
|
message: "2nd parameter is not an array".to_string(),
|
||||||
|
})?
|
||||||
|
.iter()
|
||||||
|
.map(|i| {
|
||||||
|
let g: GoTmplValue = i.clone().into();
|
||||||
|
g.0
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut index_cmds: Vec<String> = Vec::new();
|
||||||
|
for i in 0..fmt_args.len() {
|
||||||
|
index_cmds.push(format!("(index . {})", i));
|
||||||
|
}
|
||||||
|
|
||||||
|
let template_str = format!(r#"{{{{ printf "{}" {}}}}}"#, fmt_str, index_cmds.join(" "));
|
||||||
|
let res = gtmpl::template(&template_str, fmt_args.as_slice()).map_err(|e| {
|
||||||
|
BurregoError::BuiltinError {
|
||||||
|
name: "sprintf".to_string(),
|
||||||
|
message: format!(
|
||||||
|
"Cannot render go template '{}' with args {:?}: {:?}",
|
||||||
|
template_str, fmt_args, e
|
||||||
|
),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(res).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "sprintf".to_string(),
|
||||||
|
message: format!("Cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn sprintf_mixed_input() {
|
||||||
|
let args: Vec<serde_json::Value> = vec![
|
||||||
|
json!("hello %v %v %v"),
|
||||||
|
json!(["world", 42, ["this", "is", "a", "list"]]),
|
||||||
|
];
|
||||||
|
|
||||||
|
let actual = sprintf(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(json!("hello world 42 [this is a list]"), actual.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
195
crates/burrego/src/builtins/time.rs
Normal file
195
crates/burrego/src/builtins/time.rs
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use chrono::{self, DateTime, Datelike, Duration, Local};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
pub fn now_ns(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if !args.is_empty() {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "time.now_ns".to_string(),
|
||||||
|
message: "wrong number of arguments given".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let now = Local::now();
|
||||||
|
serde_json::to_value(now.timestamp_nanos()).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "time.now_ns".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_rfc3339_ns(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "time.parse_rfc3339_ns".to_string(),
|
||||||
|
message: "wrong number of arguments given".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let value = args[0].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "time.parse_rfc3339_ns".to_string(),
|
||||||
|
message: "1st parameter is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let dt = DateTime::parse_from_rfc3339(value).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "time.parse_rfc3339_ns".to_string(),
|
||||||
|
message: format!(": cannot convert {}: {:?}", value, e),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
serde_json::to_value(dt.timestamp_nanos()).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "time.parse_rfc3339_ns".to_string(),
|
||||||
|
message: format!("cannot convert value into JSON: {:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn date(args: &[serde_json::Value]) -> Result<serde_json::Value> {
|
||||||
|
if args.len() != 1 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "wrong number of arguments given".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let nanoseconds: i64;
|
||||||
|
let mut timezone: chrono_tz::Tz = chrono_tz::UTC;
|
||||||
|
|
||||||
|
match args[0].clone() {
|
||||||
|
serde_json::Value::Number(val) => {
|
||||||
|
nanoseconds = val.as_i64().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "1st parameter is not a number".to_string(),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
serde_json::Value::Array(val) => {
|
||||||
|
if val.len() != 2 {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "wrong number of items inside of input array".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
nanoseconds = val[0].as_i64().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "1st array item is not a number".to_string(),
|
||||||
|
})?;
|
||||||
|
let tz_name = val[1].as_str().ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "2nd array item is not a string".to_string(),
|
||||||
|
})?;
|
||||||
|
if tz_name == "Local" {
|
||||||
|
return date_local(nanoseconds);
|
||||||
|
} else {
|
||||||
|
timezone =
|
||||||
|
chrono_tz::Tz::from_str(tz_name).map_err(|e| BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: format!("cannot handle given timezone {}: {:?}", tz_name, e),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "the 1st parameter is neither a number nor an array".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let unix_epoch = DateTime::<chrono::Utc>::from_utc(
|
||||||
|
chrono::NaiveDateTime::from_timestamp_opt(0, 0).ok_or_else(|| {
|
||||||
|
BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "cannot create timestamp".to_string(),
|
||||||
|
}
|
||||||
|
})?,
|
||||||
|
chrono::Utc,
|
||||||
|
);
|
||||||
|
let dt = unix_epoch
|
||||||
|
.checked_add_signed(Duration::nanoseconds(nanoseconds))
|
||||||
|
.ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "overflow when building date".to_string(),
|
||||||
|
})?
|
||||||
|
.with_timezone(&timezone);
|
||||||
|
|
||||||
|
Ok(serde_json::json!([dt.year(), dt.month(), dt.day(),]))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn date_local(ns: i64) -> Result<serde_json::Value> {
|
||||||
|
let unix_epoch = DateTime::<chrono::Utc>::from_utc(
|
||||||
|
chrono::NaiveDateTime::from_timestamp_opt(0, 0).ok_or_else(|| {
|
||||||
|
BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "cannot create timestamp".to_string(),
|
||||||
|
}
|
||||||
|
})?,
|
||||||
|
chrono::Utc,
|
||||||
|
);
|
||||||
|
let dt = unix_epoch
|
||||||
|
.checked_add_signed(Duration::nanoseconds(ns))
|
||||||
|
.ok_or_else(|| BurregoError::BuiltinError {
|
||||||
|
name: "time.date".to_string(),
|
||||||
|
message: "overflow when building date".to_string(),
|
||||||
|
})?
|
||||||
|
.with_timezone(&chrono::Local);
|
||||||
|
|
||||||
|
Ok(serde_json::json!([dt.year(), dt.month(), dt.day(),]))
|
||||||
|
}
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use chrono::TimeZone;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_rfc3339_ns() {
|
||||||
|
let input_dt = Local::now();
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input_dt.to_rfc3339())];
|
||||||
|
|
||||||
|
let actual = parse_rfc3339_ns(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(json!(input_dt.timestamp_nanos()), actual.unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn date_with_no_tz() {
|
||||||
|
let input_dt = Local::now().naive_utc();
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!(input_dt.timestamp_nanos())];
|
||||||
|
|
||||||
|
let actual = date(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(
|
||||||
|
json!([input_dt.year(), input_dt.month(), input_dt.day()]),
|
||||||
|
actual.unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn date_with_tz() {
|
||||||
|
let input_dt = match chrono_tz::US::Pacific.with_ymd_and_hms(1990, 5, 6, 12, 30, 45) {
|
||||||
|
chrono::LocalResult::Single(dt) => dt,
|
||||||
|
_ => panic!("didn't get the expected datetime object"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!([input_dt.timestamp_nanos(), "US/Pacific"])];
|
||||||
|
|
||||||
|
let actual = date(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(
|
||||||
|
json!([input_dt.year(), input_dt.month(), input_dt.day()]),
|
||||||
|
actual.unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn date_with_local_tz() {
|
||||||
|
let input_dt = Local::now().naive_utc();
|
||||||
|
|
||||||
|
let args: Vec<serde_json::Value> = vec![json!([input_dt.timestamp_nanos(), "Local"])];
|
||||||
|
|
||||||
|
let actual = date(&args);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(
|
||||||
|
json!([input_dt.year(), input_dt.month(), input_dt.day()]),
|
||||||
|
actual.unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
31
crates/burrego/src/errors.rs
Normal file
31
crates/burrego/src/errors.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, BurregoError>;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum BurregoError {
|
||||||
|
#[error("Missing Rego builtins: {0}")]
|
||||||
|
MissingRegoBuiltins(String),
|
||||||
|
|
||||||
|
#[error("wasm engine error: {0}")]
|
||||||
|
WasmEngineError(String),
|
||||||
|
|
||||||
|
#[error("Rego wasm error: {0}")]
|
||||||
|
RegoWasmError(String),
|
||||||
|
|
||||||
|
#[error("JSON error: {0}")]
|
||||||
|
JSONError(String),
|
||||||
|
|
||||||
|
#[error("Evaluator builder error: {0}")]
|
||||||
|
EvaluatorBuilderError(String),
|
||||||
|
|
||||||
|
#[error("Builtin error [{name:?}]: {message:?}")]
|
||||||
|
BuiltinError { name: String, message: String },
|
||||||
|
|
||||||
|
#[error("Builtin not implemented: {0}")]
|
||||||
|
BuiltinNotImplementedError(String),
|
||||||
|
|
||||||
|
/// Wasmtime execution deadline exceeded
|
||||||
|
#[error("guest code interrupted, execution deadline exceeded")]
|
||||||
|
ExecutionDeadlineExceeded,
|
||||||
|
}
|
||||||
244
crates/burrego/src/evaluator.rs
Normal file
244
crates/burrego/src/evaluator.rs
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
use crate::builtins;
|
||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use crate::host_callbacks::HostCallbacks;
|
||||||
|
use crate::opa_host_functions;
|
||||||
|
use crate::policy::Policy;
|
||||||
|
use crate::stack_helper::StackHelper;
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use tracing::debug;
|
||||||
|
use wasmtime::{Engine, Instance, Linker, Memory, MemoryType, Module, Store};
|
||||||
|
|
||||||
|
macro_rules! set_epoch_deadline_and_call_guest {
|
||||||
|
($epoch_deadline:expr, $store:expr, $code:block) => {{
|
||||||
|
if let Some(deadline) = $epoch_deadline {
|
||||||
|
$store.set_epoch_deadline(deadline);
|
||||||
|
}
|
||||||
|
$code
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
struct EvaluatorStack {
|
||||||
|
store: Store<Option<StackHelper>>,
|
||||||
|
instance: Instance,
|
||||||
|
memory: Memory,
|
||||||
|
policy: Policy,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Evaluator {
|
||||||
|
engine: Engine,
|
||||||
|
module: Module,
|
||||||
|
store: Store<Option<StackHelper>>,
|
||||||
|
instance: Instance,
|
||||||
|
memory: Memory,
|
||||||
|
policy: Policy,
|
||||||
|
host_callbacks: HostCallbacks,
|
||||||
|
/// used to tune the [epoch
|
||||||
|
/// interruption](https://docs.rs/wasmtime/latest/wasmtime/struct.Config.html#method.epoch_interruption)
|
||||||
|
/// feature of wasmtime
|
||||||
|
epoch_deadline: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Evaluator {
|
||||||
|
pub(crate) fn from_engine_and_module(
|
||||||
|
engine: Engine,
|
||||||
|
module: Module,
|
||||||
|
host_callbacks: HostCallbacks,
|
||||||
|
epoch_deadline: Option<u64>,
|
||||||
|
) -> Result<Evaluator> {
|
||||||
|
let stack = Self::setup(engine.clone(), module.clone(), host_callbacks.clone())?;
|
||||||
|
let mut store = stack.store;
|
||||||
|
let instance = stack.instance;
|
||||||
|
let memory = stack.memory;
|
||||||
|
let policy = stack.policy;
|
||||||
|
|
||||||
|
let used_builtins = set_epoch_deadline_and_call_guest!(epoch_deadline, store, {
|
||||||
|
policy
|
||||||
|
.builtins(&mut store, &memory)?
|
||||||
|
.keys()
|
||||||
|
.cloned()
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", ")
|
||||||
|
});
|
||||||
|
|
||||||
|
debug!(used = used_builtins.as_str(), "policy builtins");
|
||||||
|
|
||||||
|
let mut evaluator = Evaluator {
|
||||||
|
engine,
|
||||||
|
module,
|
||||||
|
store,
|
||||||
|
instance,
|
||||||
|
memory,
|
||||||
|
policy,
|
||||||
|
host_callbacks,
|
||||||
|
epoch_deadline,
|
||||||
|
};
|
||||||
|
|
||||||
|
let not_implemented_builtins = evaluator.not_implemented_builtins()?;
|
||||||
|
if !not_implemented_builtins.is_empty() {
|
||||||
|
return Err(BurregoError::MissingRegoBuiltins(
|
||||||
|
not_implemented_builtins.iter().join(", "),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(evaluator)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn setup(
|
||||||
|
engine: Engine,
|
||||||
|
module: Module,
|
||||||
|
host_callbacks: HostCallbacks,
|
||||||
|
) -> Result<EvaluatorStack> {
|
||||||
|
let mut linker = Linker::<Option<StackHelper>>::new(&engine);
|
||||||
|
|
||||||
|
let opa_data_helper: Option<StackHelper> = None;
|
||||||
|
let mut store = Store::new(&engine, opa_data_helper);
|
||||||
|
|
||||||
|
let memory_ty = MemoryType::new(5, None);
|
||||||
|
let memory = Memory::new(&mut store, memory_ty)
|
||||||
|
.map_err(|e| BurregoError::WasmEngineError(format!("cannot create memory: {}", e)))?;
|
||||||
|
linker.define("env", "memory", memory).map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!("linker cannot define memory: {}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
opa_host_functions::add_to_linker(&mut linker)?;
|
||||||
|
|
||||||
|
let instance = linker.instantiate(&mut store, &module).map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!("linker cannot create instance: {}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let stack_helper = StackHelper::new(
|
||||||
|
&instance,
|
||||||
|
&memory,
|
||||||
|
&mut store,
|
||||||
|
host_callbacks.opa_abort,
|
||||||
|
host_callbacks.opa_println,
|
||||||
|
)?;
|
||||||
|
let policy = Policy::new(&instance, &mut store, &memory)?;
|
||||||
|
_ = store.data_mut().insert(stack_helper);
|
||||||
|
|
||||||
|
Ok(EvaluatorStack {
|
||||||
|
memory,
|
||||||
|
store,
|
||||||
|
instance,
|
||||||
|
policy,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset(&mut self) -> Result<()> {
|
||||||
|
let stack = Self::setup(
|
||||||
|
self.engine.clone(),
|
||||||
|
self.module.clone(),
|
||||||
|
self.host_callbacks.clone(),
|
||||||
|
)?;
|
||||||
|
self.store = stack.store;
|
||||||
|
self.instance = stack.instance;
|
||||||
|
self.memory = stack.memory;
|
||||||
|
self.policy = stack.policy;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn opa_abi_version(&mut self) -> Result<(i32, i32)> {
|
||||||
|
let major = self
|
||||||
|
.instance
|
||||||
|
.get_global(&mut self.store, "opa_wasm_abi_version")
|
||||||
|
.and_then(|g| g.get(&mut self.store).i32())
|
||||||
|
.ok_or_else(|| {
|
||||||
|
BurregoError::RegoWasmError("Cannot find OPA Wasm ABI major version".to_string())
|
||||||
|
})?;
|
||||||
|
let minor = self
|
||||||
|
.instance
|
||||||
|
.get_global(&mut self.store, "opa_wasm_abi_minor_version")
|
||||||
|
.and_then(|g| g.get(&mut self.store).i32())
|
||||||
|
.ok_or_else(|| {
|
||||||
|
BurregoError::RegoWasmError("Cannot find OPA Wasm ABI minor version".to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((major, minor))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn implemented_builtins() -> HashSet<String> {
|
||||||
|
builtins::get_builtins()
|
||||||
|
.keys()
|
||||||
|
.map(|v| String::from(*v))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn not_implemented_builtins(&mut self) -> Result<HashSet<String>> {
|
||||||
|
let used_builtins: HashSet<String> = self
|
||||||
|
.policy
|
||||||
|
.builtins(&mut self.store, &self.memory)?
|
||||||
|
.keys()
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
let supported_builtins: HashSet<String> = builtins::get_builtins()
|
||||||
|
.keys()
|
||||||
|
.map(|v| String::from(*v))
|
||||||
|
.collect();
|
||||||
|
Ok(used_builtins
|
||||||
|
.difference(&supported_builtins)
|
||||||
|
.cloned()
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn entrypoint_id(&mut self, entrypoint: &str) -> Result<i32> {
|
||||||
|
let entrypoints = self.policy.entrypoints(&mut self.store, &self.memory)?;
|
||||||
|
entrypoints
|
||||||
|
.iter()
|
||||||
|
.find(|(k, _v)| k == &entrypoint)
|
||||||
|
.map(|(_k, v)| *v)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"Cannot find the specified entrypoint {} inside of {:?}",
|
||||||
|
entrypoint, entrypoints
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn entrypoints(&mut self) -> Result<HashMap<String, i32>> {
|
||||||
|
set_epoch_deadline_and_call_guest!(self.epoch_deadline, self.store, {
|
||||||
|
self.policy.entrypoints(&mut self.store, &self.memory)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn evaluate(
|
||||||
|
&mut self,
|
||||||
|
entrypoint_id: i32,
|
||||||
|
input: &serde_json::Value,
|
||||||
|
data: &serde_json::Value,
|
||||||
|
) -> Result<serde_json::Value> {
|
||||||
|
let entrypoints = self.policy.entrypoints(&mut self.store, &self.memory)?;
|
||||||
|
entrypoints
|
||||||
|
.iter()
|
||||||
|
.find(|(_k, &v)| v == entrypoint_id)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"Cannot find the specified entrypoint {} inside of {:?}",
|
||||||
|
entrypoint_id, entrypoints
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
data = serde_json::to_string(&data)
|
||||||
|
.expect("cannot convert data back to json")
|
||||||
|
.as_str(),
|
||||||
|
"setting policy data"
|
||||||
|
);
|
||||||
|
set_epoch_deadline_and_call_guest!(self.epoch_deadline, self.store, {
|
||||||
|
self.policy.set_data(&mut self.store, &self.memory, data)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
input = serde_json::to_string(&input)
|
||||||
|
.expect("cannot convert input back to JSON")
|
||||||
|
.as_str(),
|
||||||
|
"attempting evaluation"
|
||||||
|
);
|
||||||
|
set_epoch_deadline_and_call_guest!(self.epoch_deadline, self.store, {
|
||||||
|
self.policy
|
||||||
|
.evaluate(entrypoint_id, &mut self.store, &self.memory, input)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
102
crates/burrego/src/evaluator_builder.rs
Normal file
102
crates/burrego/src/evaluator_builder.rs
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use wasmtime::{Engine, Module};
|
||||||
|
|
||||||
|
use crate::{host_callbacks::HostCallbacks, Evaluator};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct EvaluatorBuilder {
|
||||||
|
policy_path: Option<PathBuf>,
|
||||||
|
module: Option<Module>,
|
||||||
|
engine: Option<Engine>,
|
||||||
|
epoch_deadline: Option<u64>,
|
||||||
|
host_callbacks: Option<HostCallbacks>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EvaluatorBuilder {
|
||||||
|
#[must_use]
|
||||||
|
pub fn policy_path(mut self, path: &Path) -> Self {
|
||||||
|
self.policy_path = Some(path.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn module(mut self, module: Module) -> Self {
|
||||||
|
self.module = Some(module);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn engine(mut self, engine: &Engine) -> Self {
|
||||||
|
self.engine = Some(engine.clone());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn enable_epoch_interruptions(mut self, deadline: u64) -> Self {
|
||||||
|
self.epoch_deadline = Some(deadline);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn host_callbacks(mut self, host_callbacks: HostCallbacks) -> Self {
|
||||||
|
self.host_callbacks = Some(host_callbacks);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate(&self) -> Result<()> {
|
||||||
|
if self.policy_path.is_some() && self.module.is_some() {
|
||||||
|
return Err(BurregoError::EvaluatorBuilderError(
|
||||||
|
"policy_path and module cannot be set at the same time".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if self.policy_path.is_none() && self.module.is_none() {
|
||||||
|
return Err(BurregoError::EvaluatorBuilderError(
|
||||||
|
"Either policy_path or module must be set".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.host_callbacks.is_none() {
|
||||||
|
return Err(BurregoError::EvaluatorBuilderError(
|
||||||
|
"host_callbacks must be set".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(&self) -> Result<Evaluator> {
|
||||||
|
self.validate()?;
|
||||||
|
|
||||||
|
let engine = match &self.engine {
|
||||||
|
Some(e) => e.clone(),
|
||||||
|
None => {
|
||||||
|
let mut config = wasmtime::Config::default();
|
||||||
|
if self.epoch_deadline.is_some() {
|
||||||
|
config.epoch_interruption(true);
|
||||||
|
}
|
||||||
|
Engine::new(&config).map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!("cannot create wasmtime Engine: {:?}", e))
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let module = match &self.module {
|
||||||
|
Some(m) => m.clone(),
|
||||||
|
None => Module::from_file(
|
||||||
|
&engine,
|
||||||
|
self.policy_path.clone().expect("policy_path should be set"),
|
||||||
|
)
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!("cannot create wasmtime Module: {:?}", e))
|
||||||
|
})?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let host_callbacks = self
|
||||||
|
.host_callbacks
|
||||||
|
.clone()
|
||||||
|
.expect("host callbacks should be set");
|
||||||
|
|
||||||
|
Evaluator::from_engine_and_module(engine, module, host_callbacks, self.epoch_deadline)
|
||||||
|
}
|
||||||
|
}
|
||||||
30
crates/burrego/src/host_callbacks.rs
Normal file
30
crates/burrego/src/host_callbacks.rs
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
/// HostCallback is a type that references a pointer to a function
|
||||||
|
/// that can be stored and then invoked by burrego when the Open
|
||||||
|
/// Policy Agent Wasm target invokes certain Wasm imports.
|
||||||
|
pub type HostCallback = fn(&str);
|
||||||
|
|
||||||
|
/// HostCallbacks defines a set of pluggable host implementations of
|
||||||
|
/// OPA documented imports:
|
||||||
|
/// https://www.openpolicyagent.org/docs/latest/wasm/#imports
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct HostCallbacks {
|
||||||
|
pub opa_abort: HostCallback,
|
||||||
|
pub opa_println: HostCallback,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for HostCallbacks {
|
||||||
|
fn default() -> HostCallbacks {
|
||||||
|
HostCallbacks {
|
||||||
|
opa_abort: default_opa_abort,
|
||||||
|
opa_println: default_opa_println,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_opa_abort(msg: &str) {
|
||||||
|
eprintln!("OPA abort with message: {:?}", msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_opa_println(msg: &str) {
|
||||||
|
println!("Message coming from the policy: {:?}", msg);
|
||||||
|
}
|
||||||
13
crates/burrego/src/lib.rs
Normal file
13
crates/burrego/src/lib.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
mod builtins;
|
||||||
|
pub mod errors;
|
||||||
|
mod evaluator;
|
||||||
|
mod evaluator_builder;
|
||||||
|
pub mod host_callbacks;
|
||||||
|
mod opa_host_functions;
|
||||||
|
mod policy;
|
||||||
|
mod stack_helper;
|
||||||
|
|
||||||
|
pub use builtins::get_builtins;
|
||||||
|
pub use evaluator::Evaluator;
|
||||||
|
pub use evaluator_builder::EvaluatorBuilder;
|
||||||
|
pub use host_callbacks::HostCallbacks;
|
||||||
370
crates/burrego/src/opa_host_functions.rs
Normal file
370
crates/burrego/src/opa_host_functions.rs
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use tracing::{debug, error};
|
||||||
|
use wasmtime::{AsContextMut, Caller, Linker};
|
||||||
|
|
||||||
|
use crate::builtins::BUILTINS_HELPER;
|
||||||
|
use crate::stack_helper::StackHelper;
|
||||||
|
|
||||||
|
/// Add OPA host callbacks to the linker.
|
||||||
|
/// The callbackes are the one listed at https://www.openpolicyagent.org/docs/latest/wasm/#imports
|
||||||
|
pub(crate) fn add_to_linker(linker: &mut Linker<Option<StackHelper>>) -> Result<()> {
|
||||||
|
register_opa_abort_func(linker)?;
|
||||||
|
register_opa_println_func(linker)?;
|
||||||
|
register_opa_builtin0_func(linker)?;
|
||||||
|
register_opa_builtin1_func(linker)?;
|
||||||
|
register_opa_builtin2_func(linker)?;
|
||||||
|
register_opa_builtin3_func(linker)?;
|
||||||
|
register_opa_builtin4_func(linker)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn register_opa_abort_func(
|
||||||
|
linker: &mut Linker<Option<StackHelper>>,
|
||||||
|
) -> Result<&mut Linker<Option<StackHelper>>> {
|
||||||
|
linker
|
||||||
|
.func_wrap(
|
||||||
|
"env",
|
||||||
|
"opa_abort",
|
||||||
|
|mut caller: Caller<'_, Option<StackHelper>>, addr: i32| {
|
||||||
|
let stack_helper = caller.data().as_ref().unwrap();
|
||||||
|
let opa_abort_host_callback = stack_helper.opa_abort_host_callback;
|
||||||
|
|
||||||
|
let memory_export = caller.get_export("memory").ok_or_else(|| BurregoError::RegoWasmError("cannot find 'memory' export".to_string()))?;
|
||||||
|
let memory = memory_export.into_memory().ok_or_else(|| BurregoError::RegoWasmError("'memory' export cannot be converted into a memory object".to_string()))?;
|
||||||
|
|
||||||
|
let msg = StackHelper::read_string(caller.as_context_mut(), &memory, addr)
|
||||||
|
.map_or_else(
|
||||||
|
|e| format!("cannot decode opa_abort message: {:?}", e),
|
||||||
|
|data| String::from_utf8(data).unwrap_or_else(|e| format!("cannot decode opa_abort message: didn't read a valid string from memory - {:?}", e)),
|
||||||
|
);
|
||||||
|
opa_abort_host_callback(&msg);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
).map_err(|e| BurregoError::BuiltinError{
|
||||||
|
name: "opa_abort".to_string(),
|
||||||
|
message: e.to_string()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn register_opa_println_func(
|
||||||
|
linker: &mut Linker<Option<StackHelper>>,
|
||||||
|
) -> Result<&mut Linker<Option<StackHelper>>> {
|
||||||
|
linker.func_wrap(
|
||||||
|
"env",
|
||||||
|
"opa_println",
|
||||||
|
|mut caller: Caller<'_, Option<StackHelper>>, addr: i32| {
|
||||||
|
let stack_helper = caller.data().as_ref().unwrap();
|
||||||
|
let opa_println_host_callback = stack_helper.opa_println_host_callback;
|
||||||
|
|
||||||
|
let memory_export = caller.get_export("memory").ok_or_else(|| BurregoError::RegoWasmError("cannot find 'memory' export".to_string()))?;
|
||||||
|
let memory = memory_export.into_memory().ok_or_else(|| BurregoError::RegoWasmError("'memory' export cannot be converted into a memory object".to_string()))?;
|
||||||
|
|
||||||
|
let msg = StackHelper::read_string(caller.as_context_mut(), &memory, addr)
|
||||||
|
.map_or_else(
|
||||||
|
|e| format!("cannot decode opa_println message: {:?}", e),
|
||||||
|
|data| String::from_utf8(data).unwrap_or_else(|e| format!("cannot decode opa_println message: didn't read a valid string from memory - {:?}", e)),
|
||||||
|
);
|
||||||
|
opa_println_host_callback(&msg);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
).map_err(|e| BurregoError::BuiltinError{
|
||||||
|
name: "opa_println".to_string(),
|
||||||
|
message: e.to_string()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// env.opa_builtin0 (builtin_id, ctx) addr
|
||||||
|
/// Called to dispatch the built-in function identified by the builtin_id.
|
||||||
|
/// The ctx parameter reserved for future use. The result addr must refer to a value in the shared-memory buffer. The function accepts 0 arguments.
|
||||||
|
fn register_opa_builtin0_func(
|
||||||
|
linker: &mut Linker<Option<StackHelper>>,
|
||||||
|
) -> Result<&mut Linker<Option<StackHelper>>> {
|
||||||
|
linker.func_wrap(
|
||||||
|
"env",
|
||||||
|
"opa_builtin0",
|
||||||
|
|mut caller: Caller<'_, Option<StackHelper>>, builtin_id: i32, _ctx: i32| {
|
||||||
|
debug!(builtin_id, "opa_builtin0");
|
||||||
|
|
||||||
|
let stack_helper = caller.data().as_ref().unwrap();
|
||||||
|
let opa_malloc_fn = stack_helper.opa_malloc_fn;
|
||||||
|
let opa_json_parse_fn = stack_helper.opa_json_parse_fn;
|
||||||
|
let builtin_name = stack_helper
|
||||||
|
.builtins
|
||||||
|
.get(&builtin_id)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
error!(builtin_id, builtins =? stack_helper.builtins, "opa_builtin0: cannot find builtin");
|
||||||
|
BurregoError::BuiltinNotImplementedError(format!("opa_builtin0: cannot find builtin {}", builtin_id))
|
||||||
|
})?.clone();
|
||||||
|
let args = vec![];
|
||||||
|
|
||||||
|
let memory_export = caller.get_export("memory").ok_or_else(|| BurregoError::RegoWasmError("cannot find 'memory' export".to_string()))?;
|
||||||
|
let memory = memory_export.into_memory().ok_or_else(|| BurregoError::RegoWasmError("'memory' export cannot be converted into a memory object".to_string()))?;
|
||||||
|
|
||||||
|
let builtin_helper = BUILTINS_HELPER
|
||||||
|
.read()
|
||||||
|
.map_err(|e| BurregoError::RegoWasmError(format!("Cannot access global builtin helper: {:?}", e)))?;
|
||||||
|
|
||||||
|
let builtin_result = builtin_helper
|
||||||
|
.invoke(&builtin_name, &args)?;
|
||||||
|
|
||||||
|
let addr = StackHelper::push_json(
|
||||||
|
caller.as_context_mut(),
|
||||||
|
&memory,
|
||||||
|
opa_malloc_fn,
|
||||||
|
opa_json_parse_fn,
|
||||||
|
&builtin_result,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(addr)
|
||||||
|
},
|
||||||
|
).map_err(|e| BurregoError::BuiltinError{
|
||||||
|
name: "opa_builtin0".to_string(),
|
||||||
|
message: e.to_string()})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// env.opa_builtin1(builtin_id, ctx, _1) addr
|
||||||
|
/// Same as previous except the function accepts 1 argument.
|
||||||
|
fn register_opa_builtin1_func(
|
||||||
|
linker: &mut Linker<Option<StackHelper>>,
|
||||||
|
) -> Result<&mut Linker<Option<StackHelper>>> {
|
||||||
|
linker.func_wrap(
|
||||||
|
"env",
|
||||||
|
"opa_builtin1",
|
||||||
|
move |mut caller: Caller<'_, Option<StackHelper>>,
|
||||||
|
builtin_id: i32,
|
||||||
|
_ctx: i32,
|
||||||
|
p1: i32| {
|
||||||
|
debug!(builtin_id, p1, "opa_builtin1");
|
||||||
|
|
||||||
|
let stack_helper = caller.data().as_ref().unwrap();
|
||||||
|
let opa_malloc_fn = stack_helper.opa_malloc_fn;
|
||||||
|
let opa_json_parse_fn = stack_helper.opa_json_parse_fn;
|
||||||
|
let opa_json_dump_fn = stack_helper.opa_json_dump_fn;
|
||||||
|
let builtin_name = stack_helper
|
||||||
|
.builtins
|
||||||
|
.get(&builtin_id)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
error!(builtin_id, builtins =? stack_helper.builtins, "opa_builtin0: cannot find builtin");
|
||||||
|
BurregoError::BuiltinNotImplementedError(
|
||||||
|
format!("opa_bunltin1: cannot find builtin {}", builtin_id))
|
||||||
|
})?.clone();
|
||||||
|
|
||||||
|
let memory_export = caller.get_export("memory").ok_or_else(|| BurregoError::RegoWasmError("cannot find 'memory' export".to_string()))?;
|
||||||
|
let memory = memory_export.into_memory().ok_or_else(|| BurregoError::RegoWasmError("'memory' export cannot be converted into a memory object".to_string()))?;
|
||||||
|
|
||||||
|
|
||||||
|
let p1 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p1)?;
|
||||||
|
let args = vec![p1];
|
||||||
|
|
||||||
|
let builtin_helper = BUILTINS_HELPER
|
||||||
|
.read()
|
||||||
|
.map_err(|e| BurregoError::RegoWasmError(format!("Cannot access global builtin helper: {:?}", e)))?;
|
||||||
|
|
||||||
|
let builtin_result = builtin_helper
|
||||||
|
.invoke(&builtin_name, &args)?;
|
||||||
|
|
||||||
|
let addr = StackHelper::push_json(
|
||||||
|
caller.as_context_mut(),
|
||||||
|
&memory,
|
||||||
|
opa_malloc_fn,
|
||||||
|
opa_json_parse_fn,
|
||||||
|
&builtin_result,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(addr)
|
||||||
|
},
|
||||||
|
).map_err(|e| BurregoError::BuiltinError{
|
||||||
|
name: "opa_bunltin1".to_string(),
|
||||||
|
message: e.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// env.opa_builtin2 (builtin_id, ctx, _1, _2) addr
|
||||||
|
/// Same as previous except the function accepts 2 arguments.
|
||||||
|
fn register_opa_builtin2_func(
|
||||||
|
linker: &mut Linker<Option<StackHelper>>,
|
||||||
|
) -> Result<&mut Linker<Option<StackHelper>>> {
|
||||||
|
linker.func_wrap(
|
||||||
|
"env",
|
||||||
|
"opa_builtin2",
|
||||||
|
move |mut caller: Caller<'_, Option<StackHelper>>,
|
||||||
|
builtin_id: i32,
|
||||||
|
_ctx: i32,
|
||||||
|
p1: i32,
|
||||||
|
p2: i32| {
|
||||||
|
debug!(builtin_id, p1, p2, "opa_builtin2");
|
||||||
|
|
||||||
|
let stack_helper = caller.data().as_ref().unwrap();
|
||||||
|
let opa_malloc_fn = stack_helper.opa_malloc_fn;
|
||||||
|
let opa_json_parse_fn = stack_helper.opa_json_parse_fn;
|
||||||
|
let opa_json_dump_fn = stack_helper.opa_json_dump_fn;
|
||||||
|
let builtin_name = stack_helper
|
||||||
|
.builtins
|
||||||
|
.get(&builtin_id)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
error!(builtin_id, builtins =? stack_helper.builtins, "opa_builtin0: cannot find builtin");
|
||||||
|
BurregoError::BuiltinNotImplementedError(format!("opa_builtin2: cannot find builtin {}", builtin_id))
|
||||||
|
})?.clone();
|
||||||
|
|
||||||
|
let memory_export = caller.get_export("memory").ok_or_else(|| BurregoError::RegoWasmError("cannot find 'memory' export".to_string()))?;
|
||||||
|
let memory = memory_export.into_memory().ok_or_else(|| BurregoError::RegoWasmError("'memory' export cannot be converted into a memory object".to_string()))?;
|
||||||
|
|
||||||
|
let p1 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p1)?;
|
||||||
|
let p2 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p2)?;
|
||||||
|
|
||||||
|
let args = vec![p1, p2];
|
||||||
|
|
||||||
|
let builtin_helper = BUILTINS_HELPER
|
||||||
|
.read()
|
||||||
|
.map_err(|e| BurregoError::RegoWasmError(format!("Cannot access global builtin helper: {:?}", e)))?;
|
||||||
|
|
||||||
|
let builtin_result = builtin_helper.invoke(&builtin_name, &args)?;
|
||||||
|
|
||||||
|
let addr = StackHelper::push_json(
|
||||||
|
caller.as_context_mut(),
|
||||||
|
&memory,
|
||||||
|
opa_malloc_fn,
|
||||||
|
opa_json_parse_fn,
|
||||||
|
&builtin_result,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(addr)
|
||||||
|
},
|
||||||
|
).map_err(|e| BurregoError::BuiltinError{
|
||||||
|
name: "opa_builtin2".to_string(),
|
||||||
|
message: e.to_string()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// env.opa_builtin3 (builtin_id, ctx, _1, _2, _3) addr
|
||||||
|
/// Same as previous except the function accepts 3 arguments.
|
||||||
|
fn register_opa_builtin3_func(
|
||||||
|
linker: &mut Linker<Option<StackHelper>>,
|
||||||
|
) -> Result<&mut Linker<Option<StackHelper>>> {
|
||||||
|
linker.func_wrap(
|
||||||
|
"env",
|
||||||
|
"opa_builtin3",
|
||||||
|
move |mut caller: Caller<'_, Option<StackHelper>>,
|
||||||
|
builtin_id: i32,
|
||||||
|
_ctx: i32,
|
||||||
|
p1: i32,
|
||||||
|
p2: i32,
|
||||||
|
p3: i32| {
|
||||||
|
debug!(builtin_id, p1, p2, p3, "opa_builtin3");
|
||||||
|
|
||||||
|
let stack_helper = caller.data().as_ref().unwrap();
|
||||||
|
let opa_malloc_fn = stack_helper.opa_malloc_fn;
|
||||||
|
let opa_json_parse_fn = stack_helper.opa_json_parse_fn;
|
||||||
|
let opa_json_dump_fn = stack_helper.opa_json_dump_fn;
|
||||||
|
let builtin_name = stack_helper
|
||||||
|
.builtins
|
||||||
|
.get(&builtin_id)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
error!(builtin_id, builtins =? stack_helper.builtins, "opa_builtin0: cannot find builtin");
|
||||||
|
BurregoError::BuiltinNotImplementedError(format!("opa_builtin3: cannot find builtin {}", builtin_id))
|
||||||
|
})?.clone();
|
||||||
|
|
||||||
|
let memory_export = caller.get_export("memory").ok_or_else(|| BurregoError::RegoWasmError("cannot find 'memory' export".to_string()))?;
|
||||||
|
let memory = memory_export.into_memory().ok_or_else(|| BurregoError::RegoWasmError("'memory' export cannot be converted into a memory object".to_string()))?;
|
||||||
|
|
||||||
|
let p1 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p1)?;
|
||||||
|
let p2 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p2)?;
|
||||||
|
let p3 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p3)?;
|
||||||
|
|
||||||
|
let args = vec![p1, p2, p3];
|
||||||
|
|
||||||
|
let builtin_helper = BUILTINS_HELPER
|
||||||
|
.read()
|
||||||
|
.map_err(|e| BurregoError::RegoWasmError(format!("Cannot access global builtin helper: {:?}", e)))?;
|
||||||
|
|
||||||
|
let builtin_result = builtin_helper.invoke(&builtin_name, &args)?;
|
||||||
|
|
||||||
|
let addr = StackHelper::push_json(
|
||||||
|
caller.as_context_mut(),
|
||||||
|
&memory,
|
||||||
|
opa_malloc_fn,
|
||||||
|
opa_json_parse_fn,
|
||||||
|
&builtin_result,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(addr)
|
||||||
|
},
|
||||||
|
).map_err(|e| BurregoError::BuiltinError{
|
||||||
|
name: "opa_builtin3".to_string(),
|
||||||
|
message: e.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// env.opa_builtin4 (builtin_id, ctx, _1, _2, _3, _4) addr
|
||||||
|
/// Same as previous except the function accepts 4 arguments.
|
||||||
|
fn register_opa_builtin4_func(
|
||||||
|
linker: &mut Linker<Option<StackHelper>>,
|
||||||
|
) -> Result<&mut Linker<Option<StackHelper>>> {
|
||||||
|
linker.func_wrap(
|
||||||
|
"env",
|
||||||
|
"opa_builtin4",
|
||||||
|
move |mut caller: Caller<'_, Option<StackHelper>>,
|
||||||
|
builtin_id: i32,
|
||||||
|
_ctx: i32,
|
||||||
|
p1: i32,
|
||||||
|
p2: i32,
|
||||||
|
p3: i32,
|
||||||
|
p4: i32| {
|
||||||
|
debug!(builtin_id, p1, p2, p3, p4, "opa_builtin4");
|
||||||
|
|
||||||
|
let stack_helper = caller.data().as_ref().unwrap();
|
||||||
|
let opa_malloc_fn = stack_helper.opa_malloc_fn;
|
||||||
|
let opa_json_parse_fn = stack_helper.opa_json_parse_fn;
|
||||||
|
let opa_json_dump_fn = stack_helper.opa_json_dump_fn;
|
||||||
|
let builtin_name = stack_helper
|
||||||
|
.builtins
|
||||||
|
.get(&builtin_id)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
error!(builtin_id, builtins =? stack_helper.builtins, "opa_builtin0: cannot find builtin");
|
||||||
|
BurregoError::BuiltinNotImplementedError(format!("opa_builtin4: cannot find builtin {}", builtin_id))
|
||||||
|
})?.clone();
|
||||||
|
|
||||||
|
let memory_export = caller.get_export("memory").ok_or_else(|| BurregoError::RegoWasmError("cannot find 'memory' export".to_string()))?;
|
||||||
|
let memory = memory_export.into_memory().ok_or_else(|| BurregoError::RegoWasmError("'memory' export cannot be converted into a memory object".to_string()))?;
|
||||||
|
|
||||||
|
let p1 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p1)?;
|
||||||
|
let p2 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p2)?;
|
||||||
|
let p3 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p3)?;
|
||||||
|
let p4 =
|
||||||
|
StackHelper::pull_json(caller.as_context_mut(), &memory, opa_json_dump_fn, p4)?;
|
||||||
|
|
||||||
|
let args = vec![p1, p2, p3, p4];
|
||||||
|
|
||||||
|
let builtin_helper = BUILTINS_HELPER
|
||||||
|
.read()
|
||||||
|
.map_err(|e| BurregoError::RegoWasmError(format!("Cannot access global builtin helper: {:?}", e)))?;
|
||||||
|
|
||||||
|
let builtin_result = builtin_helper.invoke(&builtin_name, &args)?;
|
||||||
|
|
||||||
|
let addr = StackHelper::push_json(
|
||||||
|
caller.as_context_mut(),
|
||||||
|
&memory,
|
||||||
|
opa_malloc_fn,
|
||||||
|
opa_json_parse_fn,
|
||||||
|
&builtin_result,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(addr)
|
||||||
|
},
|
||||||
|
).map_err(|e| BurregoError::BuiltinError{
|
||||||
|
name: "opa_builtin4".to_string(),
|
||||||
|
message: e.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
306
crates/burrego/src/policy.rs
Normal file
306
crates/burrego/src/policy.rs
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use crate::stack_helper::StackHelper;
|
||||||
|
use serde_json::json;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
use wasmtime::{AsContextMut, Instance, Memory, TypedFunc};
|
||||||
|
|
||||||
|
/// Handle errors returned when calling a wasmtime function
|
||||||
|
/// The macro looks into the error type and, when an epoch interruption
|
||||||
|
/// happens, maps the error to BurregoError::ExecutionDeadlineExceeded
|
||||||
|
macro_rules! map_call_error {
|
||||||
|
($err:expr, $msg:expr) => {{
|
||||||
|
if let Some(trap) = $err.downcast_ref::<wasmtime::Trap>() {
|
||||||
|
if matches!(trap, wasmtime::Trap::Interrupt) {
|
||||||
|
BurregoError::ExecutionDeadlineExceeded
|
||||||
|
} else {
|
||||||
|
BurregoError::WasmEngineError(format!("{}: {:?}", $msg, $err))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
BurregoError::WasmEngineError(format!("{}: {:?}", $msg, $err))
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Policy {
|
||||||
|
builtins_fn: TypedFunc<(), i32>,
|
||||||
|
entrypoints_fn: TypedFunc<(), i32>,
|
||||||
|
opa_heap_ptr_get_fn: TypedFunc<(), i32>,
|
||||||
|
opa_heap_ptr_set_fn: TypedFunc<i32, ()>,
|
||||||
|
opa_eval_ctx_new_fn: TypedFunc<(), i32>,
|
||||||
|
opa_eval_ctx_set_input_fn: TypedFunc<(i32, i32), ()>,
|
||||||
|
opa_eval_ctx_set_data_fn: TypedFunc<(i32, i32), ()>,
|
||||||
|
opa_eval_ctx_set_entrypoint_fn: TypedFunc<(i32, i32), ()>,
|
||||||
|
opa_eval_ctx_get_result_fn: TypedFunc<i32, i32>,
|
||||||
|
opa_json_dump_fn: TypedFunc<i32, i32>,
|
||||||
|
opa_malloc_fn: TypedFunc<i32, i32>,
|
||||||
|
opa_json_parse_fn: TypedFunc<(i32, i32), i32>,
|
||||||
|
eval_fn: TypedFunc<i32, i32>,
|
||||||
|
|
||||||
|
data_addr: i32,
|
||||||
|
base_heap_ptr: i32,
|
||||||
|
data_heap_ptr: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Policy {
|
||||||
|
pub fn new(
|
||||||
|
instance: &Instance,
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
memory: &Memory,
|
||||||
|
) -> Result<Policy> {
|
||||||
|
let mut policy = Policy {
|
||||||
|
builtins_fn: instance
|
||||||
|
.get_typed_func::<(), i32>(store.as_context_mut(), "builtins")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!("cannot get builtins function: {:?}", e))
|
||||||
|
})?,
|
||||||
|
entrypoints_fn: instance
|
||||||
|
.get_typed_func::<(), i32>(store.as_context_mut(), "entrypoints")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!("cannot get entrypoints function: {:?}", e))
|
||||||
|
})?,
|
||||||
|
opa_heap_ptr_get_fn: instance
|
||||||
|
.get_typed_func::<(), i32>(store.as_context_mut(), "opa_heap_ptr_get")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_heap_ptr_get function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_heap_ptr_set_fn: instance
|
||||||
|
.get_typed_func::<i32, ()>(store.as_context_mut(), "opa_heap_ptr_set")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_heap_ptr_set function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_eval_ctx_new_fn: instance
|
||||||
|
.get_typed_func::<(), i32>(store.as_context_mut(), "opa_eval_ctx_new")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_eval_ctx_new function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_eval_ctx_set_input_fn: instance
|
||||||
|
.get_typed_func::<(i32, i32), ()>(store.as_context_mut(), "opa_eval_ctx_set_input")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_eval_ctx_set_input function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_eval_ctx_set_data_fn: instance
|
||||||
|
.get_typed_func::<(i32, i32), ()>(store.as_context_mut(), "opa_eval_ctx_set_data")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_eval_ctx_set_data function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_eval_ctx_set_entrypoint_fn: instance
|
||||||
|
.get_typed_func::<(i32, i32), ()>(
|
||||||
|
store.as_context_mut(),
|
||||||
|
"opa_eval_ctx_set_entrypoint",
|
||||||
|
)
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_eval_ctx_set_entrypoint function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_eval_ctx_get_result_fn: instance
|
||||||
|
.get_typed_func::<i32, i32>(store.as_context_mut(), "opa_eval_ctx_get_result")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_eval_ctx_get_result function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_json_dump_fn: instance
|
||||||
|
.get_typed_func::<i32, i32>(store.as_context_mut(), "opa_json_dump")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_json_dump function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
opa_malloc_fn: instance
|
||||||
|
.get_typed_func::<i32, i32>(store.as_context_mut(), "opa_malloc")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!("cannot get opa_malloc function: {:?}", e))
|
||||||
|
})?,
|
||||||
|
opa_json_parse_fn: instance
|
||||||
|
.get_typed_func::<(i32, i32), i32>(store.as_context_mut(), "opa_json_parse")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"cannot get opa_json_parse function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
eval_fn: instance
|
||||||
|
.get_typed_func::<i32, i32>(store.as_context_mut(), "eval")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!("cannot get eval function: {:?}", e))
|
||||||
|
})?,
|
||||||
|
data_addr: 0,
|
||||||
|
base_heap_ptr: 0,
|
||||||
|
data_heap_ptr: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
// init data
|
||||||
|
let initial_data = json!({});
|
||||||
|
policy.data_addr = StackHelper::push_json(
|
||||||
|
store.as_context_mut(),
|
||||||
|
memory,
|
||||||
|
policy.opa_malloc_fn,
|
||||||
|
policy.opa_json_parse_fn,
|
||||||
|
&initial_data,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
policy.base_heap_ptr = policy
|
||||||
|
.opa_heap_ptr_get_fn
|
||||||
|
.call(store.as_context_mut(), ())
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_heap_ptr_get function"))?;
|
||||||
|
policy.data_heap_ptr = policy.base_heap_ptr;
|
||||||
|
|
||||||
|
Ok(policy)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn builtins(
|
||||||
|
&self,
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
memory: &Memory,
|
||||||
|
) -> Result<HashMap<String, i32>> {
|
||||||
|
let addr = self
|
||||||
|
.builtins_fn
|
||||||
|
.call(store.as_context_mut(), ())
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking builtins function"))?;
|
||||||
|
|
||||||
|
let builtins: HashMap<String, i32> =
|
||||||
|
StackHelper::pull_json(store, memory, self.opa_json_dump_fn, addr)?
|
||||||
|
.as_object()
|
||||||
|
.ok_or_else(|| {
|
||||||
|
BurregoError::RegoWasmError(
|
||||||
|
"OPA builtins didn't return a dictionary".to_string(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.iter()
|
||||||
|
.map(|(k, v)| {
|
||||||
|
let id = v.as_i64().unwrap() as i32;
|
||||||
|
let builtin = String::from(k.as_str());
|
||||||
|
(builtin, id)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Ok(builtins)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn entrypoints(
|
||||||
|
&self,
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
memory: &Memory,
|
||||||
|
) -> Result<HashMap<String, i32>> {
|
||||||
|
let addr = self
|
||||||
|
.entrypoints_fn
|
||||||
|
.call(store.as_context_mut(), ())
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking entrypoints function"))?;
|
||||||
|
let res =
|
||||||
|
StackHelper::pull_json(store.as_context_mut(), memory, self.opa_json_dump_fn, addr)?
|
||||||
|
.as_object()
|
||||||
|
.ok_or_else(|| {
|
||||||
|
BurregoError::RegoWasmError(
|
||||||
|
"OPA entrypoints didn't return a dictionary".to_string(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.iter()
|
||||||
|
.map(|(k, v)| {
|
||||||
|
let id = v.as_i64().unwrap();
|
||||||
|
let entrypoint = String::from(k.as_str());
|
||||||
|
(entrypoint, i32::try_from(id).unwrap())
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_data(
|
||||||
|
&mut self,
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
memory: &Memory,
|
||||||
|
data: &serde_json::Value,
|
||||||
|
) -> Result<()> {
|
||||||
|
self.opa_heap_ptr_set_fn
|
||||||
|
.call(store.as_context_mut(), self.base_heap_ptr)
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_heap_ptr_set function"))?;
|
||||||
|
self.data_addr = StackHelper::push_json(
|
||||||
|
store.as_context_mut(),
|
||||||
|
memory,
|
||||||
|
self.opa_malloc_fn,
|
||||||
|
self.opa_json_parse_fn,
|
||||||
|
data,
|
||||||
|
)?;
|
||||||
|
self.data_heap_ptr = self
|
||||||
|
.opa_heap_ptr_get_fn
|
||||||
|
.call(store.as_context_mut(), ())
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_heap_ptr_get function"))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn evaluate(
|
||||||
|
&self,
|
||||||
|
entrypoint_id: i32,
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
memory: &Memory,
|
||||||
|
input: &serde_json::Value,
|
||||||
|
) -> Result<serde_json::Value> {
|
||||||
|
// Reset the heap pointer before each evaluation
|
||||||
|
self.opa_heap_ptr_set_fn
|
||||||
|
.call(store.as_context_mut(), self.data_heap_ptr)
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_heap_ptr_set function"))?;
|
||||||
|
|
||||||
|
// Load the input data
|
||||||
|
let input_addr = StackHelper::push_json(
|
||||||
|
store.as_context_mut(),
|
||||||
|
memory,
|
||||||
|
self.opa_malloc_fn,
|
||||||
|
self.opa_json_parse_fn,
|
||||||
|
input,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Setup the evaluation context
|
||||||
|
let ctx_addr = self
|
||||||
|
.opa_eval_ctx_new_fn
|
||||||
|
.call(store.as_context_mut(), ())
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_eval_ctx_new function"))?;
|
||||||
|
self.opa_eval_ctx_set_input_fn
|
||||||
|
.call(store.as_context_mut(), (ctx_addr, input_addr))
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_eval_ctx_set_input function"))?;
|
||||||
|
self.opa_eval_ctx_set_data_fn
|
||||||
|
.call(store.as_context_mut(), (ctx_addr, self.data_addr))
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_eval_ctx_set_data function"))?;
|
||||||
|
self.opa_eval_ctx_set_entrypoint_fn
|
||||||
|
.call(store.as_context_mut(), (ctx_addr, entrypoint_id))
|
||||||
|
.map_err(|e| {
|
||||||
|
map_call_error!(e, "error invoking opa_eval_ctx_set_entrypoint function")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Perform evaluation
|
||||||
|
self.eval_fn
|
||||||
|
.call(store.as_context_mut(), ctx_addr)
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_eval function"))?;
|
||||||
|
|
||||||
|
// Retrieve the result
|
||||||
|
let res_addr = self
|
||||||
|
.opa_eval_ctx_get_result_fn
|
||||||
|
.call(store.as_context_mut(), ctx_addr)
|
||||||
|
.map_err(|e| map_call_error!(e, "error invoking opa_eval_ctx_get_result function"))?;
|
||||||
|
|
||||||
|
StackHelper::pull_json(
|
||||||
|
store.as_context_mut(),
|
||||||
|
memory,
|
||||||
|
self.opa_json_dump_fn,
|
||||||
|
res_addr,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
193
crates/burrego/src/stack_helper.rs
Normal file
193
crates/burrego/src/stack_helper.rs
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
use crate::errors::{BurregoError, Result};
|
||||||
|
use crate::host_callbacks;
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use wasmtime::{AsContext, AsContextMut, Instance, Memory, TypedFunc};
|
||||||
|
|
||||||
|
/// StackHelper provides a set of helper methods to share data
|
||||||
|
/// between the host and the Rego Wasm guest
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) struct StackHelper {
|
||||||
|
pub(crate) opa_json_dump_fn: TypedFunc<i32, i32>,
|
||||||
|
pub(crate) opa_malloc_fn: TypedFunc<i32, i32>,
|
||||||
|
pub(crate) opa_json_parse_fn: TypedFunc<(i32, i32), i32>,
|
||||||
|
|
||||||
|
pub(crate) opa_abort_host_callback: host_callbacks::HostCallback,
|
||||||
|
pub(crate) opa_println_host_callback: host_callbacks::HostCallback,
|
||||||
|
|
||||||
|
pub(crate) builtins: HashMap<i32, String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StackHelper {
|
||||||
|
pub fn new(
|
||||||
|
instance: &Instance,
|
||||||
|
memory: &Memory,
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
opa_abort_host_callback: host_callbacks::HostCallback,
|
||||||
|
opa_println_host_callback: host_callbacks::HostCallback,
|
||||||
|
) -> Result<StackHelper> {
|
||||||
|
let opa_json_dump_fn = instance
|
||||||
|
.get_typed_func::<i32, i32>(store.as_context_mut(), "opa_json_dump")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!("cannot access opa_json_dump fuction: {:?}", e))
|
||||||
|
})?;
|
||||||
|
let opa_malloc_fn = instance
|
||||||
|
.get_typed_func::<i32, i32>(store.as_context_mut(), "opa_malloc")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!("Cannot access opa_malloc fuction: {:?}", e))
|
||||||
|
})?;
|
||||||
|
let opa_json_parse_fn = instance
|
||||||
|
.get_typed_func::<(i32, i32), i32>(store.as_context_mut(), "opa_json_parse")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!(
|
||||||
|
"Cannot access opa_json_parse fuction: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let builtins_fn = instance
|
||||||
|
.get_typed_func::<(), i32>(store.as_context_mut(), "builtins")
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::RegoWasmError(format!("cannot access builtins function: {:?}", e))
|
||||||
|
})?;
|
||||||
|
let addr = builtins_fn.call(store.as_context_mut(), ()).map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!("cannot invoke builtins function: {:?}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let builtins: HashMap<i32, String> =
|
||||||
|
StackHelper::pull_json(store, memory, opa_json_dump_fn, addr)?
|
||||||
|
.as_object()
|
||||||
|
.ok_or_else(|| {
|
||||||
|
BurregoError::RegoWasmError(
|
||||||
|
"OPA builtins didn't return a dictionary".to_string(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.iter()
|
||||||
|
.map(|(k, v)| {
|
||||||
|
let id = v.as_i64().unwrap() as i32;
|
||||||
|
let builtin = String::from(k.as_str());
|
||||||
|
(id, builtin)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(StackHelper {
|
||||||
|
opa_json_dump_fn,
|
||||||
|
opa_malloc_fn,
|
||||||
|
opa_json_parse_fn,
|
||||||
|
builtins,
|
||||||
|
opa_abort_host_callback,
|
||||||
|
opa_println_host_callback,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a string from the Wasm guest into the host
|
||||||
|
/// # Arguments
|
||||||
|
/// * `store` - the Store associated with the Wasm instance
|
||||||
|
/// * `memory` - the Wasm linear memory used by the Wasm Instance
|
||||||
|
/// * `addr` - address inside of the Wasm linear memory where the value is stored
|
||||||
|
/// # Returns
|
||||||
|
/// * The data read
|
||||||
|
pub fn read_string(store: impl AsContext, memory: &Memory, addr: i32) -> Result<Vec<u8>> {
|
||||||
|
let mut buffer: [u8; 1] = [0u8];
|
||||||
|
let mut data: Vec<u8> = vec![];
|
||||||
|
let mut raw_addr = addr;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
memory
|
||||||
|
.read(&store, raw_addr.try_into().unwrap(), &mut buffer)
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!("cannot read from memory: {:?}", e))
|
||||||
|
})?;
|
||||||
|
if buffer[0] == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
data.push(buffer[0]);
|
||||||
|
raw_addr += 1;
|
||||||
|
}
|
||||||
|
Ok(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pull a JSON data from the Wasm guest into the host
|
||||||
|
/// # Arguments
|
||||||
|
/// * `store` - the Store associated with the Wasm instance
|
||||||
|
/// * `memory` - the Wasm linear memory used by the Wasm Instance
|
||||||
|
/// * `opa_json_dump_fn` - the `opa_json_dump` function exported by the wasm guest
|
||||||
|
/// * `addr` - address inside of the Wasm linear memory where the value is stored
|
||||||
|
/// # Returns
|
||||||
|
/// * The JSON data read
|
||||||
|
pub fn pull_json(
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
memory: &Memory,
|
||||||
|
opa_json_dump_fn: TypedFunc<i32, i32>,
|
||||||
|
addr: i32,
|
||||||
|
) -> Result<serde_json::Value> {
|
||||||
|
let raw_addr = opa_json_dump_fn
|
||||||
|
.call(store.as_context_mut(), addr)
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!(
|
||||||
|
"cannot invoke opa_json_dump function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
let data = StackHelper::read_string(store, memory, raw_addr)?;
|
||||||
|
|
||||||
|
serde_json::from_slice(&data).map_err(|e| {
|
||||||
|
BurregoError::JSONError(format!(
|
||||||
|
"cannot convert data read from memory into utf8 String: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Push a JSON data from the host into the Wasm guest
|
||||||
|
/// # Arguments
|
||||||
|
/// * `store` - the Store associated with the Wasm instance
|
||||||
|
/// * `memory` - the Wasm linear memory used by the Wasm Instance
|
||||||
|
/// * `opa_malloc_fn` - the `opa_malloc` function exported by the wasm guest
|
||||||
|
/// * `opa_json_parse_fn` - the `opa_json_parse` function exported by the wasm guest
|
||||||
|
/// * `value` - the JSON data to push into the Wasm guest
|
||||||
|
/// # Returns
|
||||||
|
/// * Address inside of the Wasm linear memory where the value has been stored
|
||||||
|
pub fn push_json(
|
||||||
|
mut store: impl AsContextMut,
|
||||||
|
memory: &Memory,
|
||||||
|
opa_malloc_fn: TypedFunc<i32, i32>,
|
||||||
|
opa_json_parse_fn: TypedFunc<(i32, i32), i32>,
|
||||||
|
value: &serde_json::Value,
|
||||||
|
) -> Result<i32> {
|
||||||
|
let data = serde_json::to_vec(&value).map_err(|e| {
|
||||||
|
BurregoError::JSONError(format!("push_json: cannot convert value to json: {:?}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let data_size: i32 = data.len().try_into().map_err(|e| {
|
||||||
|
BurregoError::JSONError(format!("push_json: cannot convert size to json: {:?}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// allocate memory to fit the value
|
||||||
|
let raw_addr = opa_malloc_fn
|
||||||
|
.call(store.as_context_mut(), data_size)
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!(
|
||||||
|
"push_json: cannot invoke opa_malloc function: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
memory
|
||||||
|
.write(store.as_context_mut(), raw_addr.try_into().unwrap(), &data)
|
||||||
|
.map_err(|e| {
|
||||||
|
BurregoError::WasmEngineError(format!("push_json: cannot write to memory: {:?}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
match opa_json_parse_fn.call(store.as_context_mut(), (raw_addr, data_size)) {
|
||||||
|
Ok(0) => Err(BurregoError::RegoWasmError(
|
||||||
|
"Failed to load json in memory".to_string(),
|
||||||
|
)),
|
||||||
|
Ok(addr) => Ok(addr),
|
||||||
|
Err(e) => Err(BurregoError::RegoWasmError(format!(
|
||||||
|
"Cannot get memory address: {:?}",
|
||||||
|
e
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/burrego/test_data/gatekeeper/Makefile
Normal file
10
crates/burrego/test_data/gatekeeper/Makefile
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
test: policy.wasm
|
||||||
|
bats e2e.bats
|
||||||
|
|
||||||
|
policy.wasm: policy.rego
|
||||||
|
opa build -t wasm -e policy/violation -o policy.tar.gz policy.rego
|
||||||
|
tar -xf policy.tar.gz /policy.wasm
|
||||||
|
rm policy.tar.gz
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -f *.wasm *.tar.gz
|
||||||
21
crates/burrego/test_data/gatekeeper/e2e.bats
Normal file
21
crates/burrego/test_data/gatekeeper/e2e.bats
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
@test "[accept in namespace]: valid namespace" {
|
||||||
|
run cargo run --example cli -- -v eval policy.wasm --input-path request-valid.json
|
||||||
|
# this prints the output when one the checks below fails
|
||||||
|
echo "output = ${output}"
|
||||||
|
|
||||||
|
# request accepted
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[ $(expr "$output" : '.*"result":.*\[\]') -ne 0 ]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "[accept in namespace]: not valid namespace" {
|
||||||
|
run cargo run --example cli -- -v eval policy.wasm --input-path request-not-valid.json
|
||||||
|
# this prints the output when one the checks below fails
|
||||||
|
echo "output = ${output}"
|
||||||
|
|
||||||
|
# request accepted
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[ $(expr "$output" : '.*"msg": "object created under an invalid namespace kube-system; allowed namespaces are \[default test\]"') -ne 0 ]
|
||||||
|
}
|
||||||
8
crates/burrego/test_data/gatekeeper/policy.rego
Normal file
8
crates/burrego/test_data/gatekeeper/policy.rego
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
package policy
|
||||||
|
|
||||||
|
violation[{"msg": msg}] {
|
||||||
|
object_namespace := input.review.object.metadata.namespace
|
||||||
|
satisfied := [allowed_namespace | namespace = input.parameters.allowed_namespaces[_]; allowed_namespace = object_namespace == namespace]
|
||||||
|
not any(satisfied)
|
||||||
|
msg := sprintf("object created under an invalid namespace %s; allowed namespaces are %v", [object_namespace, input.parameters.allowed_namespaces])
|
||||||
|
}
|
||||||
26
crates/burrego/test_data/gatekeeper/request-not-valid.json
Normal file
26
crates/burrego/test_data/gatekeeper/request-not-valid.json
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"parameters": {
|
||||||
|
"allowed_namespaces": [
|
||||||
|
"default",
|
||||||
|
"test"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"review": {
|
||||||
|
"uid": "1299d386-525b-4032-98ae-1949f69f9cfc",
|
||||||
|
"kind": {
|
||||||
|
"group": "networking.k8s.io",
|
||||||
|
"kind": "Ingress",
|
||||||
|
"version": "v1"
|
||||||
|
},
|
||||||
|
"object": {
|
||||||
|
"apiVersion": "networking.k8s.io/v1",
|
||||||
|
"kind": "Ingress",
|
||||||
|
"metadata": {
|
||||||
|
"name": "ingress-wildcard-host",
|
||||||
|
"namespace": "kube-system"
|
||||||
|
},
|
||||||
|
"spec": {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
26
crates/burrego/test_data/gatekeeper/request-valid.json
Normal file
26
crates/burrego/test_data/gatekeeper/request-valid.json
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"parameters": {
|
||||||
|
"allowed_namespaces": [
|
||||||
|
"default",
|
||||||
|
"test"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"review": {
|
||||||
|
"uid": "1299d386-525b-4032-98ae-1949f69f9cfc",
|
||||||
|
"kind": {
|
||||||
|
"group": "networking.k8s.io",
|
||||||
|
"kind": "Ingress",
|
||||||
|
"version": "v1"
|
||||||
|
},
|
||||||
|
"object": {
|
||||||
|
"apiVersion": "networking.k8s.io/v1",
|
||||||
|
"kind": "Ingress",
|
||||||
|
"metadata": {
|
||||||
|
"name": "ingress-wildcard-host",
|
||||||
|
"namespace": "default"
|
||||||
|
},
|
||||||
|
"spec": {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
10
crates/burrego/test_data/trace/Makefile
Normal file
10
crates/burrego/test_data/trace/Makefile
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
test: policy.wasm
|
||||||
|
bats e2e.bats
|
||||||
|
|
||||||
|
policy.wasm: policy.rego
|
||||||
|
opa build -t wasm -e policy/main -o policy.tar.gz policy.rego
|
||||||
|
tar -xf policy.tar.gz /policy.wasm
|
||||||
|
rm policy.tar.gz
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -f *.wasm *.tar.gz
|
||||||
23
crates/burrego/test_data/trace/e2e.bats
Normal file
23
crates/burrego/test_data/trace/e2e.bats
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
@test "input message is not valid" {
|
||||||
|
run cargo run --example cli -- -v eval policy.wasm -i '{ "message": "mondo" }'
|
||||||
|
# this prints the output when one the checks below fails
|
||||||
|
echo "output = ${output}"
|
||||||
|
|
||||||
|
# request rejected
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[ $(expr "$output" : '.*"result":.*false') -ne 0 ]
|
||||||
|
[ $(expr "$output" : ".*input\.message has been set to 'mondo'") -ne 0 ]
|
||||||
|
}
|
||||||
|
|
||||||
|
@test "input message is valid" {
|
||||||
|
run cargo run --example cli -- -v eval policy.wasm -i '{ "message": "world" }'
|
||||||
|
# this prints the output when one the checks below fails
|
||||||
|
echo "output = ${output}"
|
||||||
|
|
||||||
|
# request rejected
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[ $(expr "$output" : '.*"result":.*true') -ne 0 ]
|
||||||
|
[ $(expr "$output" : ".*input\.message has been set to 'world'") -ne 0 ]
|
||||||
|
}
|
||||||
9
crates/burrego/test_data/trace/policy.rego
Normal file
9
crates/burrego/test_data/trace/policy.rego
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package policy
|
||||||
|
|
||||||
|
default main = false
|
||||||
|
|
||||||
|
main {
|
||||||
|
trace(sprintf("input.message has been set to '%v'", [input.message]));
|
||||||
|
m := input.message;
|
||||||
|
m == "world"
|
||||||
|
}
|
||||||
24
renovate.json
Normal file
24
renovate.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"extends": [
|
||||||
|
"config:base",
|
||||||
|
"schedule:earlyMondays"
|
||||||
|
],
|
||||||
|
"labels": ["dependencies"],
|
||||||
|
"lockFileMaintenance": { "enabled": true },
|
||||||
|
"major": { "enabled": false },
|
||||||
|
"minor": { "enabled": false },
|
||||||
|
"patch": { "enabled": true },
|
||||||
|
"packageRules": [
|
||||||
|
{
|
||||||
|
"matchPackagePatterns": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"matchUpdateTypes": [
|
||||||
|
"patch"
|
||||||
|
],
|
||||||
|
"groupName": "all patchlevel dependencies",
|
||||||
|
"groupSlug": "all-patch"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rebaseWhen": "behind-base-branch"
|
||||||
|
}
|
||||||
302
src/admission_response.rs
Normal file
302
src/admission_response.rs
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use base64::{engine::general_purpose, Engine as _};
|
||||||
|
use kubewarden_policy_sdk::response::ValidationResponse as PolicyValidationResponse;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
/// This models the admission/v1/AdmissionResponse object of Kubernetes
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Eq, Clone)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct AdmissionResponse {
|
||||||
|
/// UID is an identifier for the individual request/response.
|
||||||
|
/// This must be copied over from the corresponding AdmissionRequest.
|
||||||
|
pub uid: String,
|
||||||
|
|
||||||
|
/// Allowed indicates whether or not the admission request was permitted.
|
||||||
|
pub allowed: bool,
|
||||||
|
|
||||||
|
/// The type of Patch. Currently we only allow "JSONPatch".
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub patch_type: Option<String>,
|
||||||
|
|
||||||
|
/// The patch body. Currently we only support "JSONPatch" which implements RFC 6902.
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub patch: Option<String>,
|
||||||
|
|
||||||
|
/// Status contains extra details into why an admission request was denied.
|
||||||
|
/// This field IS NOT consulted in any way if "Allowed" is "true".
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status: Option<AdmissionResponseStatus>,
|
||||||
|
|
||||||
|
/// AuditAnnotations is an unstructured key value map set by remote admission controller (e.g. error=image-blacklisted).
|
||||||
|
/// MutatingAdmissionWebhook and ValidatingAdmissionWebhook admission controller will prefix the keys with
|
||||||
|
/// admission webhook name (e.g. imagepolicy.example.com/error=image-blacklisted). AuditAnnotations will be provided by
|
||||||
|
/// the admission webhook to add additional context to the audit log for this request.
|
||||||
|
pub audit_annotations: Option<HashMap<String, String>>,
|
||||||
|
|
||||||
|
/// warnings is a list of warning messages to return to the requesting API client.
|
||||||
|
/// Warning messages describe a problem the client making the API request should correct or be aware of.
|
||||||
|
/// Limit warnings to 120 characters if possible.
|
||||||
|
/// Warnings over 256 characters and large numbers of warnings may be truncated.
|
||||||
|
pub warnings: Option<Vec<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Eq, Clone)]
|
||||||
|
pub struct AdmissionResponseStatus {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub message: Option<String>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub code: Option<u16>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AdmissionResponse {
|
||||||
|
pub fn reject(uid: String, message: String, code: u16) -> AdmissionResponse {
|
||||||
|
AdmissionResponse {
|
||||||
|
uid,
|
||||||
|
allowed: false,
|
||||||
|
status: Some(AdmissionResponseStatus {
|
||||||
|
message: Some(message),
|
||||||
|
code: Some(code),
|
||||||
|
}),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reject_internal_server_error(uid: String, message: String) -> AdmissionResponse {
|
||||||
|
AdmissionResponse::reject(uid, format!("internal server error: {}", message), 500)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_policy_validation_response(
|
||||||
|
uid: String,
|
||||||
|
req_obj: Option<&serde_json::Value>,
|
||||||
|
pol_val_resp: &PolicyValidationResponse,
|
||||||
|
) -> Result<AdmissionResponse> {
|
||||||
|
if pol_val_resp.mutated_object.is_some() && req_obj.is_none() {
|
||||||
|
let message = "Incoming object is null, which happens only with DELETE operations, but the policy is attempting a mutation. This is not allowed";
|
||||||
|
|
||||||
|
return Ok(AdmissionResponse {
|
||||||
|
uid,
|
||||||
|
allowed: false,
|
||||||
|
warnings: None,
|
||||||
|
audit_annotations: None,
|
||||||
|
patch_type: None,
|
||||||
|
patch: None,
|
||||||
|
status: Some(AdmissionResponseStatus {
|
||||||
|
message: Some(message.to_string()),
|
||||||
|
code: None,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let patch = match pol_val_resp.mutated_object.clone() {
|
||||||
|
Some(mut_obj) => {
|
||||||
|
let diff = json_patch::diff(req_obj.unwrap(), &mut_obj);
|
||||||
|
let empty_patch = json_patch::Patch(Vec::<json_patch::PatchOperation>::new());
|
||||||
|
if diff == empty_patch {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let diff_str = serde_json::to_string(&diff)
|
||||||
|
.map(|s| general_purpose::STANDARD.encode(s))
|
||||||
|
.map_err(|e| anyhow!("cannot serialize JSONPatch: {:?}", e))?;
|
||||||
|
Some(diff_str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let patch_type: Option<String> = if patch.is_some() {
|
||||||
|
Some(String::from("JSONPatch"))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let status = if pol_val_resp.message.is_some() || pol_val_resp.code.is_some() {
|
||||||
|
Some(AdmissionResponseStatus {
|
||||||
|
message: pol_val_resp.message.clone(),
|
||||||
|
code: pol_val_resp.code,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(AdmissionResponse {
|
||||||
|
uid,
|
||||||
|
allowed: pol_val_resp.accepted,
|
||||||
|
warnings: pol_val_resp.warnings.clone(),
|
||||||
|
audit_annotations: pol_val_resp.audit_annotations.clone(),
|
||||||
|
patch_type,
|
||||||
|
patch,
|
||||||
|
status,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_reject_response() {
|
||||||
|
let uid = String::from("UID");
|
||||||
|
let message = String::from("test message");
|
||||||
|
let code: u16 = 500;
|
||||||
|
|
||||||
|
let response = AdmissionResponse::reject(uid.clone(), message.clone(), code);
|
||||||
|
assert_eq!(response.uid, uid);
|
||||||
|
assert_eq!(response.allowed, false);
|
||||||
|
assert_eq!(response.patch, None);
|
||||||
|
assert_eq!(response.patch_type, None);
|
||||||
|
|
||||||
|
let status = response.status.unwrap();
|
||||||
|
assert_eq!(status.code, Some(code));
|
||||||
|
assert_eq!(status.message, Some(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_from_policy_validation_response_and_mutated_object_is_none() {
|
||||||
|
let uid = String::from("UID");
|
||||||
|
let message = String::from("test message");
|
||||||
|
let code: u16 = 500;
|
||||||
|
|
||||||
|
let mut audit_annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
audit_annotations.insert(String::from("key"), String::from("value"));
|
||||||
|
|
||||||
|
let warnings = vec![String::from("hello"), String::from("world")];
|
||||||
|
|
||||||
|
let pol_val_resp = PolicyValidationResponse {
|
||||||
|
accepted: false,
|
||||||
|
message: Some(message.clone()),
|
||||||
|
code: Some(code),
|
||||||
|
mutated_object: None,
|
||||||
|
audit_annotations: Some(audit_annotations.clone()),
|
||||||
|
warnings: Some(warnings.clone()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let req_obj = Some(json!({"hello": "world"}));
|
||||||
|
|
||||||
|
let response = AdmissionResponse::from_policy_validation_response(
|
||||||
|
uid.clone(),
|
||||||
|
req_obj.as_ref(),
|
||||||
|
&pol_val_resp,
|
||||||
|
);
|
||||||
|
assert!(response.is_ok());
|
||||||
|
let response = response.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(response.uid, uid);
|
||||||
|
assert_eq!(response.allowed, false);
|
||||||
|
assert_eq!(response.patch, None);
|
||||||
|
assert_eq!(response.patch_type, None);
|
||||||
|
assert_eq!(response.audit_annotations, Some(audit_annotations));
|
||||||
|
assert_eq!(response.warnings, Some(warnings));
|
||||||
|
|
||||||
|
let status = response.status.unwrap();
|
||||||
|
assert_eq!(status.code, Some(code));
|
||||||
|
assert_eq!(status.message, Some(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_from_policy_validation_response_and_mutated_object_is_not_different_from_original_one(
|
||||||
|
) {
|
||||||
|
// The Mutated Object should be `Some` only when the policy performs an actual
|
||||||
|
// mutation. However we have to play safe and ensure we can handle the case
|
||||||
|
// where a policy has a bug and by mistake returns a `mutated_object` that is
|
||||||
|
// equal to the original one
|
||||||
|
|
||||||
|
let uid = String::from("UID");
|
||||||
|
let req_obj = Some(json!({"hello": "world"}));
|
||||||
|
|
||||||
|
let pol_val_resp = PolicyValidationResponse {
|
||||||
|
accepted: true,
|
||||||
|
message: None,
|
||||||
|
code: None,
|
||||||
|
mutated_object: req_obj.clone(),
|
||||||
|
warnings: None,
|
||||||
|
audit_annotations: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = AdmissionResponse::from_policy_validation_response(
|
||||||
|
uid.clone(),
|
||||||
|
req_obj.as_ref(),
|
||||||
|
&pol_val_resp,
|
||||||
|
);
|
||||||
|
assert!(response.is_ok());
|
||||||
|
let response = response.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(response.uid, uid);
|
||||||
|
assert!(response.allowed);
|
||||||
|
assert!(response.status.is_none());
|
||||||
|
assert!(response.patch.is_none());
|
||||||
|
assert!(response.patch_type.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mutation_on_delete_operation_is_not_allowed() {
|
||||||
|
let uid = String::from("UID");
|
||||||
|
// DELETE operation have a null 'object'
|
||||||
|
let req_obj = None;
|
||||||
|
|
||||||
|
let pol_val_resp = PolicyValidationResponse {
|
||||||
|
accepted: true,
|
||||||
|
message: None,
|
||||||
|
code: None,
|
||||||
|
mutated_object: Some(json!({"hello": "world"})),
|
||||||
|
warnings: None,
|
||||||
|
audit_annotations: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let response =
|
||||||
|
AdmissionResponse::from_policy_validation_response(uid.clone(), req_obj, &pol_val_resp);
|
||||||
|
assert!(response.is_ok());
|
||||||
|
let response = response.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(response.uid, uid);
|
||||||
|
assert!(!response.allowed);
|
||||||
|
assert!(response.status.is_some());
|
||||||
|
assert!(response.patch.is_none());
|
||||||
|
assert!(response.patch_type.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_from_policy_validation_response_with_mutation() {
|
||||||
|
let uid = String::from("UID");
|
||||||
|
let req_obj = json!({"hello": "world"});
|
||||||
|
let mutated_obj = json!({
|
||||||
|
"hello": "world",
|
||||||
|
"ciao": "mondo",
|
||||||
|
});
|
||||||
|
let expected_diff = json_patch::diff(&req_obj, &mutated_obj);
|
||||||
|
|
||||||
|
let pol_val_resp = PolicyValidationResponse {
|
||||||
|
accepted: true,
|
||||||
|
message: None,
|
||||||
|
code: None,
|
||||||
|
mutated_object: Some(mutated_obj),
|
||||||
|
audit_annotations: None,
|
||||||
|
warnings: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = AdmissionResponse::from_policy_validation_response(
|
||||||
|
uid.clone(),
|
||||||
|
Some(&req_obj),
|
||||||
|
&pol_val_resp,
|
||||||
|
);
|
||||||
|
assert!(response.is_ok());
|
||||||
|
let response = response.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(response.uid, uid);
|
||||||
|
assert!(response.allowed);
|
||||||
|
assert!(response.status.is_none());
|
||||||
|
assert_eq!(response.patch_type, Some(String::from("JSONPatch")));
|
||||||
|
|
||||||
|
let patch_decoded_str = general_purpose::STANDARD
|
||||||
|
.decode(response.patch.unwrap())
|
||||||
|
.unwrap();
|
||||||
|
let patch: json_patch::Patch =
|
||||||
|
serde_json::from_slice(patch_decoded_str.as_slice()).unwrap();
|
||||||
|
assert_eq!(patch, expected_diff);
|
||||||
|
}
|
||||||
|
}
|
||||||
375
src/callback_handler/crypto.rs
Normal file
375
src/callback_handler/crypto.rs
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use chrono::{DateTime, FixedOffset, Utc};
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::crypto::{
|
||||||
|
BoolWithReason, Certificate, CertificateEncoding,
|
||||||
|
};
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::crypto_v1::CertificateVerificationRequest;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
|
/// A collection of trusted root certificates
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
struct CertificatePool {
|
||||||
|
trusted_roots: Vec<picky::x509::Cert>,
|
||||||
|
intermediates: Vec<picky::x509::Cert>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// verify_certificate verifies the validity of the certificate, and if it is
|
||||||
|
/// trusted with the provided certificate chain.
|
||||||
|
/// If the provided certificate chain is empty, it is treated as trusted.
|
||||||
|
pub fn verify_certificate(req: CertificateVerificationRequest) -> Result<BoolWithReason> {
|
||||||
|
// verify validity:
|
||||||
|
let pc = match req.cert.encoding {
|
||||||
|
CertificateEncoding::Pem => {
|
||||||
|
let pem_str = String::from_utf8(req.cert.data)
|
||||||
|
.map_err(|_| anyhow!("Certificate PEM data is not UTF8 encoded"))?;
|
||||||
|
picky::x509::Cert::from_pem_str(&pem_str)
|
||||||
|
}
|
||||||
|
CertificateEncoding::Der => picky::x509::Cert::from_der(&req.cert.data),
|
||||||
|
}?;
|
||||||
|
match req.not_after {
|
||||||
|
Some(not_after_string) => {
|
||||||
|
// picky deals with UTCTime as defined in:
|
||||||
|
// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1
|
||||||
|
|
||||||
|
// Convert RFC 3339 not_after string from the request to chrono's
|
||||||
|
// DateTime<Utc>, to ensure Zulu:
|
||||||
|
let dt_not_after: DateTime<FixedOffset> =
|
||||||
|
DateTime::parse_from_rfc3339(not_after_string.as_str())
|
||||||
|
.map_err(|_| anyhow!("Timestamp not_after is not in RFC3339 format"))?;
|
||||||
|
let zulu_not_after: DateTime<Utc> = dt_not_after.with_timezone(&Utc);
|
||||||
|
|
||||||
|
// Convert from chrono's DateTime<Utc> to picky's UtcDate to perform
|
||||||
|
// check:
|
||||||
|
let p_not_after: picky::x509::date::UtcDate =
|
||||||
|
picky::x509::date::UtcDate::from(zulu_not_after);
|
||||||
|
|
||||||
|
if pc.valid_not_after().lt(&p_not_after) {
|
||||||
|
return Ok(BoolWithReason::False(
|
||||||
|
"Certificate is being used after its expiration date".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => debug!(
|
||||||
|
"No current time provided to check expiration; certificate is assumed never expired"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
let now = picky::x509::date::UtcDate::now();
|
||||||
|
if pc.valid_not_before().gt(&now) {
|
||||||
|
return Ok(BoolWithReason::False(
|
||||||
|
"Certificate is being used before its validity date".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// verify trust with cert chain:
|
||||||
|
if let Some(mut certch) = req.cert_chain {
|
||||||
|
let mut certs = vec![];
|
||||||
|
certs.append(&mut certch);
|
||||||
|
let cert_pool = CertificatePool::from_certificates(&certs)?;
|
||||||
|
if !cert_pool.verify(&pc) {
|
||||||
|
return Ok(BoolWithReason::False(
|
||||||
|
"Certificate is not trusted by the provided cert chain".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(BoolWithReason::True)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CertificatePool {
|
||||||
|
/// Build a `CertificatePool` instance using the provided list of [`Certificate`]
|
||||||
|
fn from_certificates(certs: &[Certificate]) -> Result<Self> {
|
||||||
|
let mut trusted_roots = vec![];
|
||||||
|
let mut intermediates = vec![];
|
||||||
|
|
||||||
|
for c in certs {
|
||||||
|
let pc = match c.encoding {
|
||||||
|
CertificateEncoding::Pem => {
|
||||||
|
let pem_str = String::from_utf8(c.data.clone())
|
||||||
|
.map_err(|_| anyhow!("Certificate PEM data is not UTF8 encoded"))?;
|
||||||
|
picky::x509::Cert::from_pem_str(&pem_str)
|
||||||
|
}
|
||||||
|
CertificateEncoding::Der => picky::x509::Cert::from_der(&c.data),
|
||||||
|
}?;
|
||||||
|
|
||||||
|
match pc.ty() {
|
||||||
|
picky::x509::certificate::CertType::Root => {
|
||||||
|
trusted_roots.push(pc);
|
||||||
|
}
|
||||||
|
picky::x509::certificate::CertType::Intermediate => {
|
||||||
|
intermediates.push(pc);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Cannot add a certificate that is not root nor intermediate"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(CertificatePool {
|
||||||
|
trusted_roots,
|
||||||
|
intermediates,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn verify(&self, cert: &picky::x509::Cert) -> bool {
|
||||||
|
self.create_chains_for_all_certificates()
|
||||||
|
.iter()
|
||||||
|
.any(|chain| {
|
||||||
|
cert.verifier()
|
||||||
|
.chain(chain.iter().copied())
|
||||||
|
.exact_date(&cert.valid_not_before())
|
||||||
|
.verify()
|
||||||
|
.is_ok()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_chains_for_all_certificates(&self) -> Vec<Vec<&picky::x509::Cert>> {
|
||||||
|
let mut chains: Vec<Vec<&picky::x509::Cert>> = vec![];
|
||||||
|
self.trusted_roots.iter().for_each(|trusted_root| {
|
||||||
|
chains.push([trusted_root].to_vec());
|
||||||
|
});
|
||||||
|
self.intermediates.iter().for_each(|intermediate| {
|
||||||
|
for root in self.trusted_roots.iter() {
|
||||||
|
if root.is_parent_of(intermediate).is_ok() {
|
||||||
|
chains.push([intermediate, root].to_vec());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
chains
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::callback_handler::verify_certificate;
|
||||||
|
use chrono::Utc;
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::crypto::{
|
||||||
|
BoolWithReason, Certificate, CertificateEncoding,
|
||||||
|
};
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::crypto_v1::CertificateVerificationRequest;
|
||||||
|
|
||||||
|
const ROOT_CA1_PEM: &str = "-----BEGIN CERTIFICATE-----
|
||||||
|
MIICSTCCAfCgAwIBAgIUQS1sQWI6HCOK5vsO2DDHqWZER7swCgYIKoZIzj0EAwIw
|
||||||
|
gYIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdCYXZhcmlhMRIwEAYDVQQHEwlOdXJl
|
||||||
|
bWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4xGzAZBgNVBAsTEkt1YmV3YXJkZW4g
|
||||||
|
Um9vdCBDQTEbMBkGA1UEAxMSS3ViZXdhcmRlbiBSb290IENBMB4XDTIyMTEyNTE2
|
||||||
|
MTcwMFoXDTI3MTEyNDE2MTcwMFowgYIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdC
|
||||||
|
YXZhcmlhMRIwEAYDVQQHEwlOdXJlbWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4x
|
||||||
|
GzAZBgNVBAsTEkt1YmV3YXJkZW4gUm9vdCBDQTEbMBkGA1UEAxMSS3ViZXdhcmRl
|
||||||
|
biBSb290IENBMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEaCb4QEa4/4rTYBoK
|
||||||
|
Bqfjiuc7bzGbOPox4WIA9UJaTRbdD9vEaxCKDztvAZfv8txr6rJJE/mkFqkXJZoP
|
||||||
|
NADD2aNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
|
||||||
|
BBYEFPuoSG9XuAy5MN3cpZmptH8pfu0PMAoGCCqGSM49BAMCA0cAMEQCIH6foAtH
|
||||||
|
M1glopoEWuk7LbCR5Zsg7Yhv+otAWbP8uQunAiB7bXV4HbW9Y5dDVn4uHvJ3j9Jc
|
||||||
|
6gBcoi4XVyawLUiZkQ==
|
||||||
|
-----END CERTIFICATE-----";
|
||||||
|
|
||||||
|
// this intermediate certificate was built using ROOT_CA1_PEM
|
||||||
|
const INTERMEDIATE_CA1_PEM: &str = "-----BEGIN CERTIFICATE-----
|
||||||
|
MIIClDCCAjmgAwIBAgIUAzsJl3TEWqsFlWPNbJgt0X5heawwCgYIKoZIzj0EAwIw
|
||||||
|
gYIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdCYXZhcmlhMRIwEAYDVQQHEwlOdXJl
|
||||||
|
bWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4xGzAZBgNVBAsTEkt1YmV3YXJkZW4g
|
||||||
|
Um9vdCBDQTEbMBkGA1UEAxMSS3ViZXdhcmRlbiBSb290IENBMB4XDTIyMTEyNTE2
|
||||||
|
MTcwMFoXDTMyMTEyMjE2MTcwMFowgZIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdC
|
||||||
|
YXZhcmlhMRIwEAYDVQQHEwlOdXJlbWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4x
|
||||||
|
IzAhBgNVBAsTGkt1YmV3YXJkZW4gSW50ZXJtZWRpYXRlIENBMSMwIQYDVQQDExpL
|
||||||
|
dWJld2FyZGVuIEludGVybWVkaWF0ZSBDQTBZMBMGByqGSM49AgEGCCqGSM49AwEH
|
||||||
|
A0IABO9YOVQTb1GgIgYprNIfqDNwGHfXc0PJ7Nmf/+zypBGOoGeldLA44aVWQyAj
|
||||||
|
VXbEHR27G4LdtYhwMmLUyk1iqrqjezB5MA4GA1UdDwEB/wQEAwIBBjATBgNVHSUE
|
||||||
|
DDAKBggrBgEFBQcDAzASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBRxoNzy
|
||||||
|
5uxNFY0wnkUe73yehMn5kzAfBgNVHSMEGDAWgBT7qEhvV7gMuTDd3KWZqbR/KX7t
|
||||||
|
DzAKBggqhkjOPQQDAgNJADBGAiEAk2kTo4YrCNuUhCsV/3ziu8PHX+b6Rf8G6Nkz
|
||||||
|
3jKQjYsCIQDpKd/2J7gKujk2mtWZkNiEvmP1JspVjR+OumHpWBLV+Q==
|
||||||
|
-----END CERTIFICATE-----";
|
||||||
|
|
||||||
|
const ROOT_CA2_PEM: &str = "-----BEGIN CERTIFICATE-----
|
||||||
|
MIICSzCCAfCgAwIBAgIUOZnBI4X6K3lySVpSwViYgIQwii0wCgYIKoZIzj0EAwIw
|
||||||
|
gYIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdCYXZhcmlhMRIwEAYDVQQHEwlOdXJl
|
||||||
|
bWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4xGzAZBgNVBAsTEkt1YmV3YXJkZW4g
|
||||||
|
Um9vdCBDQTEbMBkGA1UEAxMSS3ViZXdhcmRlbiBSb290IENBMB4XDTIyMTEyNTE2
|
||||||
|
MTgwMFoXDTI3MTEyNDE2MTgwMFowgYIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdC
|
||||||
|
YXZhcmlhMRIwEAYDVQQHEwlOdXJlbWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4x
|
||||||
|
GzAZBgNVBAsTEkt1YmV3YXJkZW4gUm9vdCBDQTEbMBkGA1UEAxMSS3ViZXdhcmRl
|
||||||
|
biBSb290IENBMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0+9UZU48ZVwDyJel
|
||||||
|
ti1DseAdbHngQwcouX9eSb9yDe1JCcDWA3VttgoHA3D85lZ4x6eIgNiiId1x3Qcm
|
||||||
|
8etlpqNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
|
||||||
|
BBYEFGiLnKXIbexCZ6hgSfI78yti0XBeMAoGCCqGSM49BAMCA0kAMEYCIQCUT5FU
|
||||||
|
Ig4B8SE3NuUhOTpsO6NUJBSuj73tHU7o6BQrIwIhAJzPeTZWJK10gO7aG6jjI4io
|
||||||
|
rwDBTtan3a2vXpmAbOmg
|
||||||
|
-----END CERTIFICATE-----";
|
||||||
|
|
||||||
|
// cert with notAfter=Nov 25 16:19:00 2022 GMT
|
||||||
|
const INTERMEDIATE_CA2_EXPIRED_PEM: &str = "-----BEGIN CERTIFICATE-----
|
||||||
|
MIICkzCCAjmgAwIBAgIUNVpbvakL2qlht3uMDUg2iHnV50cwCgYIKoZIzj0EAwIw
|
||||||
|
gYIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdCYXZhcmlhMRIwEAYDVQQHEwlOdXJl
|
||||||
|
bWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4xGzAZBgNVBAsTEkt1YmV3YXJkZW4g
|
||||||
|
Um9vdCBDQTEbMBkGA1UEAxMSS3ViZXdhcmRlbiBSb290IENBMB4XDTIyMTEyNTE3
|
||||||
|
MDQwMFoXDTIyMTEyNTE3MDUwMFowgZIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdC
|
||||||
|
YXZhcmlhMRIwEAYDVQQHEwlOdXJlbWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4x
|
||||||
|
IzAhBgNVBAsTGkt1YmV3YXJkZW4gSW50ZXJtZWRpYXRlIENBMSMwIQYDVQQDExpL
|
||||||
|
dWJld2FyZGVuIEludGVybWVkaWF0ZSBDQTBZMBMGByqGSM49AgEGCCqGSM49AwEH
|
||||||
|
A0IABMrPXVqh2LOLdE/J2fZIcDWZe6xaLGb61AOykiyN3yd1hwL2PSYL6vFGhrZ4
|
||||||
|
oMFvodJKdC2tXFjyrRQeI5tJdPujezB5MA4GA1UdDwEB/wQEAwIBBjATBgNVHSUE
|
||||||
|
DDAKBggrBgEFBQcDAzASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBT0OaT5
|
||||||
|
auXyLvYjL9T9tJejtfAYMTAfBgNVHSMEGDAWgBRoi5ylyG3sQmeoYEnyO/MrYtFw
|
||||||
|
XjAKBggqhkjOPQQDAgNIADBFAiEAvs57i6LNa44NntViOfyPIDEPtjzuGR1tWThL
|
||||||
|
1Hs3KgYCIFDHSvzZkIk1LtW+oHdiWzd7nWrcZcdfsTbMK5NIR2B4
|
||||||
|
-----END CERTIFICATE-----";
|
||||||
|
|
||||||
|
// cert with not_before=2035-01-05T00:00:00Z
|
||||||
|
const INTERMEDIATE_CA_NOT_BEFORE_PEM: &str = "-----BEGIN CERTIFICATE-----
|
||||||
|
MIICkzCCAjmgAwIBAgIUWzgNojMNxpg7g23KELyQzv4vE1MwCgYIKoZIzj0EAwIw
|
||||||
|
gYIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdCYXZhcmlhMRIwEAYDVQQHEwlOdXJl
|
||||||
|
bWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4xGzAZBgNVBAsTEkt1YmV3YXJkZW4g
|
||||||
|
Um9vdCBDQTEbMBkGA1UEAxMSS3ViZXdhcmRlbiBSb290IENBMB4XDTM1MDEwNTAw
|
||||||
|
MDAwMFoXDTM2MDEwNTAwMDAwMFowgZIxCzAJBgNVBAYTAkRFMRAwDgYDVQQIEwdC
|
||||||
|
YXZhcmlhMRIwEAYDVQQHEwlOdXJlbWJlcmcxEzARBgNVBAoTCkt1YmV3YXJkZW4x
|
||||||
|
IzAhBgNVBAsTGkt1YmV3YXJkZW4gSW50ZXJtZWRpYXRlIENBMSMwIQYDVQQDExpL
|
||||||
|
dWJld2FyZGVuIEludGVybWVkaWF0ZSBDQTBZMBMGByqGSM49AgEGCCqGSM49AwEH
|
||||||
|
A0IABOU504/MZROTH4Ybl8pmQV8TYymk/c51bQS9kqyWyeI19s2G12UvXvb0yfjn
|
||||||
|
gvLZaM/S3k4rv2HA8uBsu7dfvu6jezB5MA4GA1UdDwEB/wQEAwIBBjATBgNVHSUE
|
||||||
|
DDAKBggrBgEFBQcDAzASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBReXEAv
|
||||||
|
EHuCFAQE5thiOSoEqilZAzAfBgNVHSMEGDAWgBR1uDPhKH7EjlGO2axbPKlTgy8j
|
||||||
|
iDAKBggqhkjOPQQDAgNIADBFAiEArSsdE5dDXqAU2vM3ThT8GvTnjkWhER3l9v1j
|
||||||
|
3ka2eiMCIBIMXVLY+XGEHNdarxDj8XKQurNf6Nngs0nU+5ggyF4F
|
||||||
|
-----END CERTIFICATE-----";
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn certificate_is_trusted() {
|
||||||
|
// use the correct CA chain
|
||||||
|
let ca_cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: ROOT_CA1_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let cert_chain = vec![ca_cert];
|
||||||
|
let cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: INTERMEDIATE_CA1_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let req = CertificateVerificationRequest {
|
||||||
|
cert,
|
||||||
|
cert_chain: Some(cert_chain),
|
||||||
|
not_after: None,
|
||||||
|
};
|
||||||
|
assert!(matches!(verify_certificate(req), Ok(BoolWithReason::True)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn certificate_is_not_trusted() {
|
||||||
|
// Use a CA chain unrelated to the cert
|
||||||
|
let ca_cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: ROOT_CA2_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let cert_chain = vec![ca_cert];
|
||||||
|
let cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: INTERMEDIATE_CA1_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let req = CertificateVerificationRequest {
|
||||||
|
cert,
|
||||||
|
cert_chain: Some(cert_chain),
|
||||||
|
not_after: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// compiler thinks 'reason' is unused, doesn't detect it's used in 'matches!()'
|
||||||
|
let _reason = "Certificate is not trusted by the provided cert chain".to_string();
|
||||||
|
assert!(matches!(
|
||||||
|
verify_certificate(req),
|
||||||
|
Ok(BoolWithReason::False(_reason))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn certificate_is_trusted_no_chain() {
|
||||||
|
let cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: INTERMEDIATE_CA1_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let req = CertificateVerificationRequest {
|
||||||
|
cert,
|
||||||
|
cert_chain: None,
|
||||||
|
not_after: None,
|
||||||
|
};
|
||||||
|
assert!(matches!(verify_certificate(req), Ok(BoolWithReason::True)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn certificate_is_expired_but_we_dont_check() {
|
||||||
|
let ca_cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: ROOT_CA2_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let cert_chain = vec![ca_cert];
|
||||||
|
let cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: INTERMEDIATE_CA2_EXPIRED_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let req = CertificateVerificationRequest {
|
||||||
|
cert,
|
||||||
|
cert_chain: Some(cert_chain),
|
||||||
|
not_after: None, // not checking expiration
|
||||||
|
};
|
||||||
|
assert!(matches!(verify_certificate(req), Ok(BoolWithReason::True)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn certificate_malformed_not_after() {
|
||||||
|
let cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: INTERMEDIATE_CA2_EXPIRED_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let req = CertificateVerificationRequest {
|
||||||
|
cert,
|
||||||
|
cert_chain: None,
|
||||||
|
not_after: Some("malformed".to_string()),
|
||||||
|
};
|
||||||
|
assert_eq!(
|
||||||
|
verify_certificate(req).unwrap_err().to_string(),
|
||||||
|
"Timestamp not_after is not in RFC3339 format"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn certificate_is_expired() {
|
||||||
|
let cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: INTERMEDIATE_CA2_EXPIRED_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let req = CertificateVerificationRequest {
|
||||||
|
cert,
|
||||||
|
cert_chain: None,
|
||||||
|
not_after: Some(Utc::now().to_rfc3339()),
|
||||||
|
};
|
||||||
|
|
||||||
|
// compiler thinks 'reason' is unused, doesn't detect it's used in 'matches!()'
|
||||||
|
let _reason = "Certificate is being used after its expiration date".to_string();
|
||||||
|
assert!(matches!(
|
||||||
|
verify_certificate(req),
|
||||||
|
Ok(BoolWithReason::False(_reason))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn certificate_is_used_before_notbefore_date() {
|
||||||
|
let cert = Certificate {
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
data: INTERMEDIATE_CA_NOT_BEFORE_PEM.as_bytes().to_vec(),
|
||||||
|
};
|
||||||
|
let req = CertificateVerificationRequest {
|
||||||
|
cert,
|
||||||
|
cert_chain: None,
|
||||||
|
not_after: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// compiler thinks 'reason' is unused, doesn't detect it's used in 'matches!()'
|
||||||
|
let _reason = "Certificate is being used before its validity date".to_string();
|
||||||
|
assert!(matches!(
|
||||||
|
verify_certificate(req),
|
||||||
|
Ok(BoolWithReason::False(_reason))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
491
src/callback_handler/mod.rs
Normal file
491
src/callback_handler/mod.rs
Normal file
@@ -0,0 +1,491 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use cached::proc_macro::cached;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use policy_fetcher::sources::Sources;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use tokio::sync::{mpsc, oneshot};
|
||||||
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
|
use crate::callback_requests::{CallbackRequest, CallbackRequestType, CallbackResponse};
|
||||||
|
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::verification::{KeylessInfo, KeylessPrefixInfo};
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::{
|
||||||
|
net::LookupResponse, oci::ManifestDigestResponse, verification::VerificationResponse,
|
||||||
|
};
|
||||||
|
use policy_fetcher::verify::FulcioAndRekorData;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
|
mod crypto;
|
||||||
|
mod oci;
|
||||||
|
mod sigstore_verification;
|
||||||
|
|
||||||
|
pub use crypto::verify_certificate;
|
||||||
|
|
||||||
|
const DEFAULT_CHANNEL_BUFF_SIZE: usize = 100;
|
||||||
|
|
||||||
|
/// Helper struct that creates CallbackHandler objects
|
||||||
|
pub struct CallbackHandlerBuilder<'a> {
|
||||||
|
oci_sources: Option<Sources>,
|
||||||
|
channel_buffer_size: usize,
|
||||||
|
shutdown_channel: Option<oneshot::Receiver<()>>,
|
||||||
|
fulcio_and_rekor_data: Option<&'a FulcioAndRekorData>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Default for CallbackHandlerBuilder<'a> {
|
||||||
|
fn default() -> Self {
|
||||||
|
CallbackHandlerBuilder {
|
||||||
|
oci_sources: None,
|
||||||
|
shutdown_channel: None,
|
||||||
|
channel_buffer_size: DEFAULT_CHANNEL_BUFF_SIZE,
|
||||||
|
fulcio_and_rekor_data: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> CallbackHandlerBuilder<'a> {
|
||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
/// Provide all the information needed to access OCI registries. Optional
|
||||||
|
pub fn registry_config(mut self, sources: Option<Sources>) -> Self {
|
||||||
|
self.oci_sources = sources;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fulcio_and_rekor_data(
|
||||||
|
mut self,
|
||||||
|
fulcio_and_rekor_data: Option<&'a FulcioAndRekorData>,
|
||||||
|
) -> Self {
|
||||||
|
self.fulcio_and_rekor_data = fulcio_and_rekor_data;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the size of the channel used by the sync world to communicate with
|
||||||
|
/// the CallbackHandler. Optional
|
||||||
|
pub fn channel_buffer_size(mut self, size: usize) -> Self {
|
||||||
|
self.channel_buffer_size = size;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the onetime channel used to stop the endless loop of
|
||||||
|
/// CallbackHandler. Mandatory
|
||||||
|
pub fn shutdown_channel(mut self, shutdown_channel: oneshot::Receiver<()>) -> Self {
|
||||||
|
self.shutdown_channel = Some(shutdown_channel);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a CallbackHandler object
|
||||||
|
pub fn build(self) -> Result<CallbackHandler> {
|
||||||
|
let (tx, rx) = mpsc::channel::<CallbackRequest>(self.channel_buffer_size);
|
||||||
|
let shutdown_channel = self
|
||||||
|
.shutdown_channel
|
||||||
|
.ok_or_else(|| anyhow!("shutdown_channel_rx not provided"))?;
|
||||||
|
|
||||||
|
let oci_client = oci::Client::new(self.oci_sources.clone());
|
||||||
|
let sigstore_client = sigstore_verification::Client::new(
|
||||||
|
self.oci_sources.clone(),
|
||||||
|
self.fulcio_and_rekor_data,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(CallbackHandler {
|
||||||
|
oci_client,
|
||||||
|
sigstore_client,
|
||||||
|
tx,
|
||||||
|
rx,
|
||||||
|
shutdown_channel,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Struct that computes request coming from a Wasm guest.
|
||||||
|
/// This should be used only to handle the requests that need some async
|
||||||
|
/// code in order to be fulfilled.
|
||||||
|
pub struct CallbackHandler {
|
||||||
|
oci_client: oci::Client,
|
||||||
|
sigstore_client: sigstore_verification::Client,
|
||||||
|
rx: mpsc::Receiver<CallbackRequest>,
|
||||||
|
tx: mpsc::Sender<CallbackRequest>,
|
||||||
|
shutdown_channel: oneshot::Receiver<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CallbackHandler {
|
||||||
|
/// Returns the sender side of the channel that can be used by the sync code
|
||||||
|
/// (like the `host_callback` function of PolicyEvaluator)
|
||||||
|
/// to request the computation of async code.
|
||||||
|
///
|
||||||
|
/// Can be invoked as many times as wanted.
|
||||||
|
pub fn sender_channel(&self) -> mpsc::Sender<CallbackRequest> {
|
||||||
|
self.tx.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enter an endless loop that:
|
||||||
|
/// 1. Waits for requests to be evaluated
|
||||||
|
/// 2. Evaluate the request
|
||||||
|
/// 3. Send back the result of the evaluation
|
||||||
|
///
|
||||||
|
/// The loop is interrupted only when a message is sent over the
|
||||||
|
/// `shutdown_channel`.
|
||||||
|
pub async fn loop_eval(&mut self) {
|
||||||
|
loop {
|
||||||
|
tokio::select! {
|
||||||
|
// place the shutdown check before the message evaluation,
|
||||||
|
// as recommended by tokio's documentation about select!
|
||||||
|
_ = &mut self.shutdown_channel => {
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
maybe_req = self.rx.recv() => {
|
||||||
|
if let Some(req) = maybe_req {
|
||||||
|
match req.request {
|
||||||
|
CallbackRequestType::OciManifestDigest {
|
||||||
|
image,
|
||||||
|
} => {
|
||||||
|
let response = get_oci_digest_cached(&self.oci_client, &image)
|
||||||
|
.await
|
||||||
|
.map(|response| {
|
||||||
|
if response.was_cached {
|
||||||
|
debug!(?image, "Got image digest from cache");
|
||||||
|
} else {
|
||||||
|
debug!(?image, "Got image digest by querying remote registry");
|
||||||
|
}
|
||||||
|
CallbackResponse {
|
||||||
|
payload: serde_json::to_vec(&response.value).unwrap(),
|
||||||
|
}});
|
||||||
|
|
||||||
|
if let Err(e) = req.response_channel.send(response) {
|
||||||
|
warn!("callback handler: cannot send response back: {:?}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CallbackRequestType::SigstorePubKeyVerify {
|
||||||
|
image,
|
||||||
|
pub_keys,
|
||||||
|
annotations,
|
||||||
|
} => {
|
||||||
|
let response = get_sigstore_pub_key_verification_cached(&mut self.sigstore_client, image.clone(), pub_keys, annotations)
|
||||||
|
.await
|
||||||
|
.map(|response| {
|
||||||
|
if response.was_cached {
|
||||||
|
debug!(?image, "Got sigstore pub keys verification from cache");
|
||||||
|
} else {
|
||||||
|
debug!(?image, "Got sigstore pub keys verification by querying remote registry");
|
||||||
|
}
|
||||||
|
CallbackResponse {
|
||||||
|
payload: serde_json::to_vec(&response.value).unwrap()
|
||||||
|
}});
|
||||||
|
|
||||||
|
if let Err(e) = req.response_channel.send(response) {
|
||||||
|
warn!("callback handler: cannot send response back: {:?}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CallbackRequestType::SigstoreKeylessVerify {
|
||||||
|
image,
|
||||||
|
keyless,
|
||||||
|
annotations,
|
||||||
|
} => {
|
||||||
|
let response = get_sigstore_keyless_verification_cached(&mut self.sigstore_client, image.clone(), keyless, annotations)
|
||||||
|
.await
|
||||||
|
.map(|response| {
|
||||||
|
if response.was_cached {
|
||||||
|
debug!(?image, "Got sigstore keyless verification from cache");
|
||||||
|
} else {
|
||||||
|
debug!(?image, "Got sigstore keylesss verification by querying remote registry");
|
||||||
|
}
|
||||||
|
CallbackResponse {
|
||||||
|
payload: serde_json::to_vec(&response.value).unwrap()
|
||||||
|
}});
|
||||||
|
|
||||||
|
if let Err(e) = req.response_channel.send(response) {
|
||||||
|
warn!("callback handler: cannot send response back: {:?}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CallbackRequestType::SigstoreKeylessPrefixVerify {
|
||||||
|
image,
|
||||||
|
keyless_prefix,
|
||||||
|
annotations,
|
||||||
|
} => {
|
||||||
|
let response = get_sigstore_keyless_prefix_verification_cached(&mut self.sigstore_client, image.clone(), keyless_prefix, annotations)
|
||||||
|
.await
|
||||||
|
.map(|response| {
|
||||||
|
if response.was_cached {
|
||||||
|
debug!(?image, "Got sigstore keyless prefix verification from cache");
|
||||||
|
} else {
|
||||||
|
debug!(?image, "Got sigstore keylesss prefix verification by querying remote registry");
|
||||||
|
}
|
||||||
|
CallbackResponse {
|
||||||
|
payload: serde_json::to_vec(&response.value).unwrap()
|
||||||
|
}});
|
||||||
|
|
||||||
|
if let Err(e) = req.response_channel.send(response) {
|
||||||
|
warn!("callback handler: cannot send response back: {:?}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CallbackRequestType::SigstoreGithubActionsVerify {
|
||||||
|
image,
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
annotations,
|
||||||
|
} => {
|
||||||
|
let response = get_sigstore_github_actions_verification_cached(&mut self.sigstore_client, image.clone(), owner, repo, annotations)
|
||||||
|
.await
|
||||||
|
.map(|response| {
|
||||||
|
if response.was_cached {
|
||||||
|
debug!(?image, "Got sigstore GHA verification from cache");
|
||||||
|
} else {
|
||||||
|
debug!(?image, "Got sigstore GHA verification by querying remote registry");
|
||||||
|
}
|
||||||
|
CallbackResponse {
|
||||||
|
payload: serde_json::to_vec(&response.value).unwrap()
|
||||||
|
}});
|
||||||
|
|
||||||
|
if let Err(e) = req.response_channel.send(response) {
|
||||||
|
warn!("callback handler: cannot send response back: {:?}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CallbackRequestType::SigstoreCertificateVerify {
|
||||||
|
image,
|
||||||
|
certificate,
|
||||||
|
certificate_chain,
|
||||||
|
require_rekor_bundle,
|
||||||
|
annotations
|
||||||
|
} => {
|
||||||
|
let response = get_sigstore_certificate_verification_cached(&mut self.sigstore_client, &image, &certificate, certificate_chain.as_deref(), require_rekor_bundle, annotations)
|
||||||
|
.await
|
||||||
|
.map(|response| {
|
||||||
|
if response.was_cached {
|
||||||
|
debug!(?image, "Got sigstore certificate verification from cache");
|
||||||
|
} else {
|
||||||
|
debug!(?image, "Computed sigstore certificate verification");
|
||||||
|
}
|
||||||
|
CallbackResponse {
|
||||||
|
payload: serde_json::to_vec(&response.value).unwrap()
|
||||||
|
}});
|
||||||
|
|
||||||
|
if let Err(e) = req.response_channel.send(response) {
|
||||||
|
warn!("callback handler: cannot send response back: {:?}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CallbackRequestType::DNSLookupHost {
|
||||||
|
host,
|
||||||
|
} => {
|
||||||
|
let response = dns_lookup::lookup_host(&host).map(|ips| {
|
||||||
|
let res = LookupResponse {
|
||||||
|
ips: ips
|
||||||
|
.iter()
|
||||||
|
.map(|ip| ip.to_string())
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
CallbackResponse {
|
||||||
|
payload: serde_json::to_vec(&res).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).map_err(anyhow::Error::new);
|
||||||
|
|
||||||
|
if let Err(e) = req.response_channel.send(response) {
|
||||||
|
warn!("callback handler: cannot send response back: {:?}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interacting with a remote OCI registry is time expensive, this can cause a massive slow down
|
||||||
|
// of policy evaluations, especially inside of PolicyServer.
|
||||||
|
// Because of that we will keep a cache of the digests results.
|
||||||
|
//
|
||||||
|
// Details about this cache:
|
||||||
|
// * only the image "url" is used as key. oci::Client is not hashable, plus
|
||||||
|
// the client is always the same
|
||||||
|
// * the cache is time bound: cached values are purged after 60 seconds
|
||||||
|
// * only successful results are cached
|
||||||
|
#[cached(
|
||||||
|
time = 60,
|
||||||
|
result = true,
|
||||||
|
sync_writes = true,
|
||||||
|
key = "String",
|
||||||
|
convert = r#"{ format!("{}", img) }"#,
|
||||||
|
with_cached_flag = true
|
||||||
|
)]
|
||||||
|
async fn get_oci_digest_cached(
|
||||||
|
oci_client: &oci::Client,
|
||||||
|
img: &str,
|
||||||
|
) -> Result<cached::Return<ManifestDigestResponse>> {
|
||||||
|
oci_client
|
||||||
|
.digest(img)
|
||||||
|
.await
|
||||||
|
.map(|digest| ManifestDigestResponse { digest })
|
||||||
|
.map(cached::Return::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sigstore verifications are time expensive, this can cause a massive slow down
|
||||||
|
// of policy evaluations, especially inside of PolicyServer.
|
||||||
|
// Because of that we will keep a cache of the digests results.
|
||||||
|
//
|
||||||
|
// Details about this cache:
|
||||||
|
// * the cache is time bound: cached values are purged after 60 seconds
|
||||||
|
// * only successful results are cached
|
||||||
|
#[cached(
|
||||||
|
time = 60,
|
||||||
|
result = true,
|
||||||
|
sync_writes = true,
|
||||||
|
key = "String",
|
||||||
|
convert = r#"{ format!("{}{:?}{:?}", image, pub_keys, annotations)}"#,
|
||||||
|
with_cached_flag = true
|
||||||
|
)]
|
||||||
|
async fn get_sigstore_pub_key_verification_cached(
|
||||||
|
client: &mut sigstore_verification::Client,
|
||||||
|
image: String,
|
||||||
|
pub_keys: Vec<String>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<cached::Return<VerificationResponse>> {
|
||||||
|
client
|
||||||
|
.verify_public_key(image, pub_keys, annotations)
|
||||||
|
.await
|
||||||
|
.map(cached::Return::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sigstore verifications are time expensive, this can cause a massive slow down
|
||||||
|
// of policy evaluations, especially inside of PolicyServer.
|
||||||
|
// Because of that we will keep a cache of the digests results.
|
||||||
|
//
|
||||||
|
// Details about this cache:
|
||||||
|
// * the cache is time bound: cached values are purged after 60 seconds
|
||||||
|
// * only successful results are cached
|
||||||
|
#[cached(
|
||||||
|
time = 60,
|
||||||
|
result = true,
|
||||||
|
sync_writes = true,
|
||||||
|
key = "String",
|
||||||
|
convert = r#"{ format!("{}{:?}{:?}", image, keyless, annotations)}"#,
|
||||||
|
with_cached_flag = true
|
||||||
|
)]
|
||||||
|
async fn get_sigstore_keyless_verification_cached(
|
||||||
|
client: &mut sigstore_verification::Client,
|
||||||
|
image: String,
|
||||||
|
keyless: Vec<KeylessInfo>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<cached::Return<VerificationResponse>> {
|
||||||
|
client
|
||||||
|
.verify_keyless(image, keyless, annotations)
|
||||||
|
.await
|
||||||
|
.map(cached::Return::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sigstore verifications are time expensive, this can cause a massive slow down
|
||||||
|
// of policy evaluations, especially inside of PolicyServer.
|
||||||
|
// Because of that we will keep a cache of the digests results.
|
||||||
|
//
|
||||||
|
// Details about this cache:
|
||||||
|
// * the cache is time bound: cached values are purged after 60 seconds
|
||||||
|
// * only successful results are cached
|
||||||
|
#[cached(
|
||||||
|
time = 60,
|
||||||
|
result = true,
|
||||||
|
sync_writes = true,
|
||||||
|
key = "String",
|
||||||
|
convert = r#"{ format!("{}{:?}{:?}", image, keyless_prefix, annotations)}"#,
|
||||||
|
with_cached_flag = true
|
||||||
|
)]
|
||||||
|
async fn get_sigstore_keyless_prefix_verification_cached(
|
||||||
|
client: &mut sigstore_verification::Client,
|
||||||
|
image: String,
|
||||||
|
keyless_prefix: Vec<KeylessPrefixInfo>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<cached::Return<VerificationResponse>> {
|
||||||
|
client
|
||||||
|
.verify_keyless_prefix(image, keyless_prefix, annotations)
|
||||||
|
.await
|
||||||
|
.map(cached::Return::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sigstore verifications are time expensive, this can cause a massive slow down
|
||||||
|
// of policy evaluations, especially inside of PolicyServer.
|
||||||
|
// Because of that we will keep a cache of the digests results.
|
||||||
|
//
|
||||||
|
// Details about this cache:
|
||||||
|
// * the cache is time bound: cached values are purged after 60 seconds
|
||||||
|
// * only successful results are cached
|
||||||
|
#[cached(
|
||||||
|
time = 60,
|
||||||
|
result = true,
|
||||||
|
sync_writes = true,
|
||||||
|
key = "String",
|
||||||
|
convert = r#"{ format!("{}{:?}{:?}{:?}", image, owner, repo, annotations)}"#,
|
||||||
|
with_cached_flag = true
|
||||||
|
)]
|
||||||
|
async fn get_sigstore_github_actions_verification_cached(
|
||||||
|
client: &mut sigstore_verification::Client,
|
||||||
|
image: String,
|
||||||
|
owner: String,
|
||||||
|
repo: Option<String>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<cached::Return<VerificationResponse>> {
|
||||||
|
client
|
||||||
|
.verify_github_actions(image, owner, repo, annotations)
|
||||||
|
.await
|
||||||
|
.map(cached::Return::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_sigstore_certificate_verification_cache_key(
|
||||||
|
image: &str,
|
||||||
|
certificate: &[u8],
|
||||||
|
certificate_chain: Option<&[Vec<u8>]>,
|
||||||
|
require_rekor_bundle: bool,
|
||||||
|
annotations: Option<&HashMap<String, String>>,
|
||||||
|
) -> String {
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
|
||||||
|
hasher.update(image);
|
||||||
|
hasher.update(certificate);
|
||||||
|
|
||||||
|
if let Some(certs) = certificate_chain {
|
||||||
|
for c in certs {
|
||||||
|
hasher.update(c);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if require_rekor_bundle {
|
||||||
|
hasher.update(b"1");
|
||||||
|
} else {
|
||||||
|
hasher.update(b"0");
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(a) = annotations {
|
||||||
|
for key in a.keys().sorted() {
|
||||||
|
hasher.update(key);
|
||||||
|
hasher.update(b"\n");
|
||||||
|
hasher.update(a.get(key).expect("key not found"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
format!("{:x}", hasher.finalize())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cached(
|
||||||
|
time = 60,
|
||||||
|
result = true,
|
||||||
|
sync_writes = true,
|
||||||
|
key = "String",
|
||||||
|
convert = r#"{ format!("{}", get_sigstore_certificate_verification_cache_key(image, certificate, certificate_chain, require_rekor_bundle, annotations.as_ref()))}"#,
|
||||||
|
with_cached_flag = true
|
||||||
|
)]
|
||||||
|
async fn get_sigstore_certificate_verification_cached(
|
||||||
|
client: &mut sigstore_verification::Client,
|
||||||
|
image: &str,
|
||||||
|
certificate: &[u8],
|
||||||
|
certificate_chain: Option<&[Vec<u8>]>,
|
||||||
|
require_rekor_bundle: bool,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<cached::Return<VerificationResponse>> {
|
||||||
|
client
|
||||||
|
.verify_certificate(
|
||||||
|
image,
|
||||||
|
certificate,
|
||||||
|
certificate_chain,
|
||||||
|
require_rekor_bundle,
|
||||||
|
annotations,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(cached::Return::new)
|
||||||
|
}
|
||||||
31
src/callback_handler/oci.rs
Normal file
31
src/callback_handler/oci.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use policy_fetcher::oci_distribution::Reference;
|
||||||
|
use policy_fetcher::{registry::Registry, sources::Sources};
|
||||||
|
|
||||||
|
/// Helper struct to interact with an OCI registry
|
||||||
|
pub(crate) struct Client {
|
||||||
|
sources: Option<Sources>,
|
||||||
|
registry: Registry,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Client {
|
||||||
|
pub fn new(sources: Option<Sources>) -> Self {
|
||||||
|
let registry = Registry {};
|
||||||
|
Client { sources, registry }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch the manifest digest of the OCI resource referenced via `image`
|
||||||
|
pub async fn digest(&self, image: &str) -> Result<String> {
|
||||||
|
// this is needed to expand names as `busybox` into
|
||||||
|
// fully resolved references like `docker.io/library/busybox`
|
||||||
|
let image_ref: Reference = image.parse()?;
|
||||||
|
|
||||||
|
let image_with_proto = format!("registry://{}", image_ref.whole());
|
||||||
|
let image_digest = self
|
||||||
|
.registry
|
||||||
|
.manifest_digest(&image_with_proto, self.sources.as_ref())
|
||||||
|
.await?;
|
||||||
|
serde_json::to_string(&image_digest)
|
||||||
|
.map_err(|e| anyhow!("Cannot serialize response to json: {}", e))
|
||||||
|
}
|
||||||
|
}
|
||||||
263
src/callback_handler/sigstore_verification.rs
Normal file
263
src/callback_handler/sigstore_verification.rs
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::verification::{
|
||||||
|
KeylessInfo, KeylessPrefixInfo, VerificationResponse,
|
||||||
|
};
|
||||||
|
use policy_fetcher::sigstore;
|
||||||
|
use policy_fetcher::sources::Sources;
|
||||||
|
use policy_fetcher::verify::config::{LatestVerificationConfig, Signature, Subject};
|
||||||
|
use policy_fetcher::verify::{fetch_sigstore_remote_data, FulcioAndRekorData, Verifier};
|
||||||
|
use sigstore::cosign::verification_constraint::{
|
||||||
|
AnnotationVerifier, CertificateVerifier, VerificationConstraintVec,
|
||||||
|
};
|
||||||
|
use sigstore::registry::{Certificate, CertificateEncoding};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use tracing::warn;
|
||||||
|
|
||||||
|
pub(crate) struct Client {
|
||||||
|
cosign_client: Arc<Mutex<sigstore::cosign::Client>>,
|
||||||
|
verifier: Verifier,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Client {
|
||||||
|
pub fn new(
|
||||||
|
sources: Option<Sources>,
|
||||||
|
fulcio_and_rekor_data: Option<&FulcioAndRekorData>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let cosign_client = Arc::new(Mutex::new(Self::build_cosign_client(
|
||||||
|
sources.clone(),
|
||||||
|
fulcio_and_rekor_data,
|
||||||
|
)?));
|
||||||
|
let verifier = Verifier::new_from_cosign_client(cosign_client.clone(), sources);
|
||||||
|
|
||||||
|
Ok(Client {
|
||||||
|
cosign_client,
|
||||||
|
verifier,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_cosign_client(
|
||||||
|
sources: Option<Sources>,
|
||||||
|
fulcio_and_rekor_data: Option<&FulcioAndRekorData>,
|
||||||
|
) -> Result<sigstore::cosign::Client> {
|
||||||
|
let client_config: sigstore::registry::ClientConfig = sources.unwrap_or_default().into();
|
||||||
|
let mut cosign_client_builder =
|
||||||
|
sigstore::cosign::ClientBuilder::default().with_oci_client_config(client_config);
|
||||||
|
match fulcio_and_rekor_data {
|
||||||
|
Some(FulcioAndRekorData::FromTufRepository { repo }) => {
|
||||||
|
cosign_client_builder = cosign_client_builder
|
||||||
|
.with_rekor_pub_key(repo.rekor_pub_key())
|
||||||
|
.with_fulcio_certs(repo.fulcio_certs());
|
||||||
|
}
|
||||||
|
Some(FulcioAndRekorData::FromCustomData {
|
||||||
|
rekor_public_key,
|
||||||
|
fulcio_certs,
|
||||||
|
}) => {
|
||||||
|
if let Some(pk) = rekor_public_key {
|
||||||
|
cosign_client_builder = cosign_client_builder.with_rekor_pub_key(pk);
|
||||||
|
}
|
||||||
|
if !fulcio_certs.is_empty() {
|
||||||
|
let certs: Vec<sigstore::registry::Certificate> = fulcio_certs
|
||||||
|
.iter()
|
||||||
|
.map(|c| {
|
||||||
|
let sc: sigstore::registry::Certificate = c.into();
|
||||||
|
sc
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
cosign_client_builder = cosign_client_builder.with_fulcio_certs(&certs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
warn!("Sigstore Verifier created without Fulcio data: keyless signatures are going to be discarded because they cannot be verified");
|
||||||
|
warn!("Sigstore Verifier created without Rekor data: transparency log data won't be used");
|
||||||
|
warn!("Sigstore capabilities are going to be limited");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cosign_client_builder = cosign_client_builder.enable_registry_caching();
|
||||||
|
cosign_client_builder
|
||||||
|
.build()
|
||||||
|
.map_err(|e| anyhow!("could not build a cosign client: {}", e))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn verify_public_key(
|
||||||
|
&mut self,
|
||||||
|
image: String,
|
||||||
|
pub_keys: Vec<String>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<VerificationResponse> {
|
||||||
|
if pub_keys.is_empty() {
|
||||||
|
return Err(anyhow!("Must provide at least one pub key"));
|
||||||
|
}
|
||||||
|
let mut signatures_all_of: Vec<Signature> = Vec::new();
|
||||||
|
for k in pub_keys.iter() {
|
||||||
|
let signature = Signature::PubKey {
|
||||||
|
owner: None,
|
||||||
|
key: k.clone(),
|
||||||
|
annotations: annotations.clone(),
|
||||||
|
};
|
||||||
|
signatures_all_of.push(signature);
|
||||||
|
}
|
||||||
|
let verification_config = LatestVerificationConfig {
|
||||||
|
all_of: Some(signatures_all_of),
|
||||||
|
any_of: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = self.verifier.verify(&image, &verification_config).await;
|
||||||
|
match result {
|
||||||
|
Ok(digest) => Ok(VerificationResponse {
|
||||||
|
digest,
|
||||||
|
is_trusted: true,
|
||||||
|
}),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn verify_keyless(
|
||||||
|
&mut self,
|
||||||
|
image: String,
|
||||||
|
keyless: Vec<KeylessInfo>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<VerificationResponse> {
|
||||||
|
if keyless.is_empty() {
|
||||||
|
return Err(anyhow!("Must provide keyless info"));
|
||||||
|
}
|
||||||
|
// Build interim VerificationConfig:
|
||||||
|
//
|
||||||
|
let mut signatures_all_of: Vec<Signature> = Vec::new();
|
||||||
|
for k in keyless.iter() {
|
||||||
|
let signature = Signature::GenericIssuer {
|
||||||
|
issuer: k.issuer.clone(),
|
||||||
|
subject: Subject::Equal(k.subject.clone()),
|
||||||
|
annotations: annotations.clone(),
|
||||||
|
};
|
||||||
|
signatures_all_of.push(signature);
|
||||||
|
}
|
||||||
|
let verification_config = LatestVerificationConfig {
|
||||||
|
all_of: Some(signatures_all_of),
|
||||||
|
any_of: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = self.verifier.verify(&image, &verification_config).await;
|
||||||
|
match result {
|
||||||
|
Ok(digest) => Ok(VerificationResponse {
|
||||||
|
digest,
|
||||||
|
is_trusted: true,
|
||||||
|
}),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn verify_keyless_prefix(
|
||||||
|
&mut self,
|
||||||
|
image: String,
|
||||||
|
keyless_prefix: Vec<KeylessPrefixInfo>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<VerificationResponse> {
|
||||||
|
if keyless_prefix.is_empty() {
|
||||||
|
return Err(anyhow!("Must provide keyless info"));
|
||||||
|
}
|
||||||
|
// Build interim VerificationConfig:
|
||||||
|
//
|
||||||
|
let mut signatures_all_of: Vec<Signature> = Vec::new();
|
||||||
|
for k in keyless_prefix.iter() {
|
||||||
|
let prefix = url::Url::parse(&k.url_prefix).expect("Cannot build url prefix");
|
||||||
|
let signature = Signature::GenericIssuer {
|
||||||
|
issuer: k.issuer.clone(),
|
||||||
|
subject: Subject::UrlPrefix(prefix),
|
||||||
|
annotations: annotations.clone(),
|
||||||
|
};
|
||||||
|
signatures_all_of.push(signature);
|
||||||
|
}
|
||||||
|
let verification_config = LatestVerificationConfig {
|
||||||
|
all_of: Some(signatures_all_of),
|
||||||
|
any_of: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = self.verifier.verify(&image, &verification_config).await;
|
||||||
|
match result {
|
||||||
|
Ok(digest) => Ok(VerificationResponse {
|
||||||
|
digest,
|
||||||
|
is_trusted: true,
|
||||||
|
}),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn verify_github_actions(
|
||||||
|
&mut self,
|
||||||
|
image: String,
|
||||||
|
owner: String,
|
||||||
|
repo: Option<String>,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<VerificationResponse> {
|
||||||
|
if owner.is_empty() {
|
||||||
|
return Err(anyhow!("Must provide owner info"));
|
||||||
|
}
|
||||||
|
// Build interim VerificationConfig:
|
||||||
|
//
|
||||||
|
let mut signatures_all_of: Vec<Signature> = Vec::new();
|
||||||
|
let signature = Signature::GithubAction {
|
||||||
|
owner: owner.clone(),
|
||||||
|
repo: repo.clone(),
|
||||||
|
annotations: annotations.clone(),
|
||||||
|
};
|
||||||
|
signatures_all_of.push(signature);
|
||||||
|
let verification_config = LatestVerificationConfig {
|
||||||
|
all_of: Some(signatures_all_of),
|
||||||
|
any_of: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = self.verifier.verify(&image, &verification_config).await;
|
||||||
|
match result {
|
||||||
|
Ok(digest) => Ok(VerificationResponse {
|
||||||
|
digest,
|
||||||
|
is_trusted: true,
|
||||||
|
}),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn verify_certificate(
|
||||||
|
&mut self,
|
||||||
|
image: &str,
|
||||||
|
certificate: &[u8],
|
||||||
|
certificate_chain: Option<&[Vec<u8>]>,
|
||||||
|
require_rekor_bundle: bool,
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
) -> Result<VerificationResponse> {
|
||||||
|
let (source_image_digest, trusted_layers) =
|
||||||
|
fetch_sigstore_remote_data(&self.cosign_client, image).await?;
|
||||||
|
let chain: Option<Vec<Certificate>> = certificate_chain.map(|certs| {
|
||||||
|
certs
|
||||||
|
.iter()
|
||||||
|
.map(|cert_data| Certificate {
|
||||||
|
data: cert_data.to_owned(),
|
||||||
|
encoding: CertificateEncoding::Pem,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
});
|
||||||
|
|
||||||
|
let cert_verifier =
|
||||||
|
CertificateVerifier::from_pem(certificate, require_rekor_bundle, chain.as_deref())?;
|
||||||
|
|
||||||
|
let mut verification_constraints: VerificationConstraintVec = vec![Box::new(cert_verifier)];
|
||||||
|
if let Some(a) = annotations {
|
||||||
|
let annotations_verifier = AnnotationVerifier { annotations: a };
|
||||||
|
verification_constraints.push(Box::new(annotations_verifier));
|
||||||
|
}
|
||||||
|
|
||||||
|
let result =
|
||||||
|
sigstore::cosign::verify_constraints(&trusted_layers, verification_constraints.iter())
|
||||||
|
.map(|_| source_image_digest)
|
||||||
|
.map_err(|e| anyhow!("verification failed: {}", e));
|
||||||
|
match result {
|
||||||
|
Ok(digest) => Ok(VerificationResponse {
|
||||||
|
digest,
|
||||||
|
is_trusted: true,
|
||||||
|
}),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
193
src/callback_requests.rs
Normal file
193
src/callback_requests.rs
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::verification::{KeylessInfo, KeylessPrefixInfo};
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::{
|
||||||
|
SigstoreVerificationInputV1, SigstoreVerificationInputV2,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
/// Holds the response to a waPC evaluation request
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CallbackResponse {
|
||||||
|
/// The data to be given back to the waPC guest
|
||||||
|
pub payload: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A request sent by some synchronous code (usually waPC's host_callback)
|
||||||
|
/// that can be evaluated only inside of asynchronous code.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CallbackRequest {
|
||||||
|
/// The actual request to be evaluated
|
||||||
|
pub request: CallbackRequestType,
|
||||||
|
/// A tokio oneshot channel over which the evaluation response has to be sent
|
||||||
|
pub response_channel: oneshot::Sender<Result<CallbackResponse>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Describes the different kinds of request a waPC guest can make to
|
||||||
|
/// our host.
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub enum CallbackRequestType {
|
||||||
|
/// Require the computation of the manifest digest of an OCI object (be
|
||||||
|
/// it an image or anything else that can be stored into an OCI registry)
|
||||||
|
OciManifestDigest {
|
||||||
|
/// String pointing to the object (e.g.: `registry.testing.lan/busybox:1.0.0`)
|
||||||
|
image: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Require the verification of the manifest digest of an OCI object (be
|
||||||
|
/// it an image or anything else that can be stored into an OCI registry)
|
||||||
|
/// to be signed by Sigstore, using public keys mode
|
||||||
|
SigstorePubKeyVerify {
|
||||||
|
/// String pointing to the object (e.g.: `registry.testing.lan/busybox:1.0.0`)
|
||||||
|
image: String,
|
||||||
|
/// List of PEM encoded keys that must have been used to sign the OCI object
|
||||||
|
pub_keys: Vec<String>,
|
||||||
|
/// Optional - Annotations that must have been provided by all signers when they signed the OCI artifact
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Require the verification of the manifest digest of an OCI object to be
|
||||||
|
/// signed by Sigstore, using keyless mode
|
||||||
|
SigstoreKeylessVerify {
|
||||||
|
/// String pointing to the object (e.g.: `registry.testing.lan/busybox:1.0.0`)
|
||||||
|
image: String,
|
||||||
|
/// List of keyless signatures that must be found
|
||||||
|
keyless: Vec<KeylessInfo>,
|
||||||
|
/// Optional - Annotations that must have been provided by all signers when they signed the OCI artifact
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Require the verification of the manifest digest of an OCI object to be
|
||||||
|
/// signed by Sigstore using keyless mode, where the passed subject is a URL
|
||||||
|
/// prefix of the subject to match
|
||||||
|
SigstoreKeylessPrefixVerify {
|
||||||
|
/// String pointing to the object (e.g.: `registry.testing.lan/busybox:1.0.0`)
|
||||||
|
image: String,
|
||||||
|
/// List of keyless signatures that must be found
|
||||||
|
keyless_prefix: Vec<KeylessPrefixInfo>,
|
||||||
|
/// Optional - Annotations that must have been provided by all signers when they signed the OCI artifact
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Require the verification of the manifest digest of an OCI object to be
|
||||||
|
/// signed by Sigstore using keyless mode and performed in GitHub Actions
|
||||||
|
SigstoreGithubActionsVerify {
|
||||||
|
/// String pointing to the object (e.g.: `registry.testing.lan/busybox:1.0.0`)
|
||||||
|
image: String,
|
||||||
|
/// owner of the repository. E.g: octocat
|
||||||
|
owner: String,
|
||||||
|
/// Optional - Repo of the GH Action workflow that signed the artifact. E.g: example-repo
|
||||||
|
repo: Option<String>,
|
||||||
|
/// Optional - Annotations that must have been provided by all signers when they signed the OCI artifact
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Require the verification of the manifest digest of an OCI object
|
||||||
|
/// using the user provided certificate
|
||||||
|
SigstoreCertificateVerify {
|
||||||
|
/// String pointing to the object (e.g.: `registry.testing.lan/busybox:1.0.0`)
|
||||||
|
image: String,
|
||||||
|
/// PEM encoded certificate used to verify the signature
|
||||||
|
certificate: Vec<u8>,
|
||||||
|
/// Optional - the certificate chain that is used to verify the provided
|
||||||
|
/// certificate. When not specified, the certificate is assumed to be trusted
|
||||||
|
certificate_chain: Option<Vec<Vec<u8>>>,
|
||||||
|
/// Require the signature layer to have a Rekor bundle.
|
||||||
|
/// Having a Rekor bundle allows further checks to be performed,
|
||||||
|
/// like ensuring the signature has been produced during the validity
|
||||||
|
/// time frame of the certificate.
|
||||||
|
///
|
||||||
|
/// It is recommended to set this value to `true` to have a more secure
|
||||||
|
/// verification process.
|
||||||
|
require_rekor_bundle: bool,
|
||||||
|
/// Optional - Annotations that must have been provided by all signers when they signed the OCI artifact
|
||||||
|
annotations: Option<HashMap<String, String>>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Lookup the addresses for a given hostname via DNS
|
||||||
|
DNSLookupHost { host: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SigstoreVerificationInputV2> for CallbackRequestType {
|
||||||
|
fn from(val: SigstoreVerificationInputV2) -> Self {
|
||||||
|
match val {
|
||||||
|
SigstoreVerificationInputV2::SigstorePubKeyVerify {
|
||||||
|
image,
|
||||||
|
pub_keys,
|
||||||
|
annotations,
|
||||||
|
} => CallbackRequestType::SigstorePubKeyVerify {
|
||||||
|
image,
|
||||||
|
pub_keys,
|
||||||
|
annotations,
|
||||||
|
},
|
||||||
|
SigstoreVerificationInputV2::SigstoreKeylessVerify {
|
||||||
|
image,
|
||||||
|
keyless,
|
||||||
|
annotations,
|
||||||
|
} => CallbackRequestType::SigstoreKeylessVerify {
|
||||||
|
image,
|
||||||
|
keyless,
|
||||||
|
annotations,
|
||||||
|
},
|
||||||
|
SigstoreVerificationInputV2::SigstoreKeylessPrefixVerify {
|
||||||
|
image,
|
||||||
|
keyless_prefix,
|
||||||
|
annotations,
|
||||||
|
} => CallbackRequestType::SigstoreKeylessPrefixVerify {
|
||||||
|
image,
|
||||||
|
keyless_prefix,
|
||||||
|
annotations,
|
||||||
|
},
|
||||||
|
SigstoreVerificationInputV2::SigstoreGithubActionsVerify {
|
||||||
|
image,
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
annotations,
|
||||||
|
} => CallbackRequestType::SigstoreGithubActionsVerify {
|
||||||
|
image,
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
annotations,
|
||||||
|
},
|
||||||
|
SigstoreVerificationInputV2::SigstoreCertificateVerify {
|
||||||
|
image,
|
||||||
|
certificate,
|
||||||
|
certificate_chain,
|
||||||
|
require_rekor_bundle,
|
||||||
|
annotations,
|
||||||
|
} => CallbackRequestType::SigstoreCertificateVerify {
|
||||||
|
image,
|
||||||
|
certificate,
|
||||||
|
certificate_chain,
|
||||||
|
require_rekor_bundle,
|
||||||
|
annotations,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SigstoreVerificationInputV1> for CallbackRequestType {
|
||||||
|
fn from(val: SigstoreVerificationInputV1) -> Self {
|
||||||
|
match val {
|
||||||
|
SigstoreVerificationInputV1::SigstorePubKeyVerify {
|
||||||
|
image,
|
||||||
|
pub_keys,
|
||||||
|
annotations,
|
||||||
|
} => CallbackRequestType::SigstorePubKeyVerify {
|
||||||
|
image,
|
||||||
|
pub_keys,
|
||||||
|
annotations,
|
||||||
|
},
|
||||||
|
SigstoreVerificationInputV1::SigstoreKeylessVerify {
|
||||||
|
image,
|
||||||
|
keyless,
|
||||||
|
annotations,
|
||||||
|
} => CallbackRequestType::SigstoreKeylessVerify {
|
||||||
|
image,
|
||||||
|
keyless,
|
||||||
|
annotations,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
83
src/cluster_context.rs
Normal file
83
src/cluster_context.rs
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use kube::{
|
||||||
|
api::{ListParams, Request},
|
||||||
|
Client,
|
||||||
|
};
|
||||||
|
use std::sync::RwLock;
|
||||||
|
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref CLUSTER_CONTEXT: ClusterContext = ClusterContext::default();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClusterContext represents a structure that can be used to retrieve
|
||||||
|
// information about a running Kubernetes cluster.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ClusterContext {
|
||||||
|
ingresses: RwLock<String>,
|
||||||
|
namespaces: RwLock<String>,
|
||||||
|
services: RwLock<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClusterContext {
|
||||||
|
pub fn get<'a>() -> &'a ClusterContext {
|
||||||
|
&CLUSTER_CONTEXT
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ingresses(&self) -> String {
|
||||||
|
(*self.ingresses.read().unwrap()).clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn namespaces(&self) -> String {
|
||||||
|
(*self.namespaces.read().unwrap()).clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn services(&self) -> String {
|
||||||
|
(*self.services.read().unwrap()).clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn refresh(&self, kubernetes_client: &Client) -> Result<()> {
|
||||||
|
{
|
||||||
|
let namespace_list = kubernetes_client
|
||||||
|
.request_text(
|
||||||
|
Request::new("/api/v1/namespaces")
|
||||||
|
.list(&ListParams::default())
|
||||||
|
.map_err(|err| anyhow!("could not list namespaces: {:?}", err))?,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Ok(mut namespaces) = self.namespaces.write() {
|
||||||
|
*namespaces = namespace_list
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let service_list = kubernetes_client
|
||||||
|
.request_text(
|
||||||
|
Request::new("/api/v1/services")
|
||||||
|
.list(&ListParams::default())
|
||||||
|
.map_err(|err| anyhow!("could not list services: {:?}", err))?,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Ok(mut services) = self.services.write() {
|
||||||
|
*services = service_list
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let ingress_list = kubernetes_client
|
||||||
|
.request_text(
|
||||||
|
Request::new("/apis/networking.k8s.io/v1/ingresses")
|
||||||
|
.list(&ListParams::default())
|
||||||
|
.map_err(|err| anyhow!("could not list ingresses: {:?}", err))?,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Ok(mut ingresses) = self.ingresses.write() {
|
||||||
|
*ingresses = ingress_list
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
10
src/constants.rs
Normal file
10
src/constants.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
pub const KUBEWARDEN_CUSTOM_SECTION_METADATA: &str = "io.kubewarden.metadata";
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_POLICY_TITLE: &str = "io.kubewarden.policy.title";
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_POLICY_DESCRIPTION: &str = "io.kubewarden.policy.description";
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_POLICY_AUTHOR: &str = "io.kubewarden.policy.author";
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_POLICY_URL: &str = "io.kubewarden.policy.url";
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_POLICY_SOURCE: &str = "io.kubewarden.policy.source";
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_POLICY_LICENSE: &str = "io.kubewarden.policy.license";
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_POLICY_USAGE: &str = "io.kubewarden.policy.usage";
|
||||||
|
|
||||||
|
pub const KUBEWARDEN_ANNOTATION_KWCTL_VERSION: &str = "io.kubewarden.kwctl";
|
||||||
31
src/lib.rs
Normal file
31
src/lib.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
pub extern crate burrego;
|
||||||
|
extern crate wasmparser;
|
||||||
|
|
||||||
|
pub mod admission_response;
|
||||||
|
pub mod callback_handler;
|
||||||
|
pub mod callback_requests;
|
||||||
|
pub mod cluster_context;
|
||||||
|
pub mod constants;
|
||||||
|
pub(crate) mod policy;
|
||||||
|
pub mod policy_evaluator;
|
||||||
|
pub mod policy_evaluator_builder;
|
||||||
|
pub mod policy_metadata;
|
||||||
|
mod policy_tracing;
|
||||||
|
pub mod runtimes;
|
||||||
|
|
||||||
|
// API's that expose other crate types (such as Kubewarden Policy SDK
|
||||||
|
// or `policy_fetcher`) can either implement their own exposed types,
|
||||||
|
// and means to convert those types internally to their dependencies
|
||||||
|
// types, or depending on the specific case, re-export dependencies
|
||||||
|
// API's directly.
|
||||||
|
//
|
||||||
|
// Re-exporting specific crates that belong to us is easier for common
|
||||||
|
// consumers of these libraries along with the `policy-evaluator`, so
|
||||||
|
// they can access these crates through the `policy-evaluator` itself,
|
||||||
|
// streamlining their dependencies as well.
|
||||||
|
pub use kube;
|
||||||
|
pub use kubewarden_policy_sdk;
|
||||||
|
pub use kubewarden_policy_sdk::metadata::ProtocolVersion;
|
||||||
|
pub use policy_fetcher;
|
||||||
|
pub use validator;
|
||||||
|
pub use wasmtime_provider::wasmtime;
|
||||||
76
src/policy.rs
Normal file
76
src/policy.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use std::clone::Clone;
|
||||||
|
use std::fmt;
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
|
||||||
|
use crate::callback_requests::CallbackRequest;
|
||||||
|
|
||||||
|
/// Minimal amount of information about a policy that need to
|
||||||
|
/// be always accessible at runtime.
|
||||||
|
///
|
||||||
|
/// This struct is used extensively inside of the `host_callback`
|
||||||
|
/// function to obtain information about the policy that is invoking
|
||||||
|
/// a host waPC function, and handle the request.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Policy {
|
||||||
|
/// The policy identifier. This is mostly relevant for Policy Server,
|
||||||
|
/// which uses the identifier provided by the user inside of the `policy.yml`
|
||||||
|
/// file
|
||||||
|
pub id: String,
|
||||||
|
|
||||||
|
/// This is relevant only for waPC-based policies. This is the unique ID
|
||||||
|
/// associated to the waPC policy.
|
||||||
|
/// Burrego policies have this field set to `None`
|
||||||
|
instance_id: Option<u64>,
|
||||||
|
|
||||||
|
/// Channel used by the synchronous world (the `host_callback` waPC function),
|
||||||
|
/// to request the computation of code that can only be run inside of an
|
||||||
|
/// asynchronous block
|
||||||
|
pub callback_channel: Option<mpsc::Sender<CallbackRequest>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Policy {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let callback_channel = match self.callback_channel {
|
||||||
|
Some(_) => "Some(...)",
|
||||||
|
None => "None",
|
||||||
|
};
|
||||||
|
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
r#"Policy {{ id: "{}", instance_id: {:?}, callback_channel: {} }}"#,
|
||||||
|
self.id, self.instance_id, callback_channel,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Policy {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.id == other.id && self.instance_id == other.instance_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl Default for Policy {
|
||||||
|
fn default() -> Self {
|
||||||
|
Policy {
|
||||||
|
id: String::default(),
|
||||||
|
instance_id: None,
|
||||||
|
callback_channel: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Policy {
|
||||||
|
pub(crate) fn new(
|
||||||
|
id: String,
|
||||||
|
policy_id: Option<u64>,
|
||||||
|
callback_channel: Option<mpsc::Sender<CallbackRequest>>,
|
||||||
|
) -> Result<Policy> {
|
||||||
|
Ok(Policy {
|
||||||
|
id,
|
||||||
|
instance_id: policy_id,
|
||||||
|
callback_channel,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
204
src/policy_evaluator.rs
Normal file
204
src/policy_evaluator.rs
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use kubewarden_policy_sdk::metadata::ProtocolVersion;
|
||||||
|
use kubewarden_policy_sdk::settings::SettingsValidationResponse;
|
||||||
|
use serde::Serialize;
|
||||||
|
use serde_json::value;
|
||||||
|
use std::{convert::TryFrom, fmt};
|
||||||
|
|
||||||
|
use crate::admission_response::AdmissionResponse;
|
||||||
|
use crate::policy::Policy;
|
||||||
|
use crate::runtimes::burrego::Runtime as BurregoRuntime;
|
||||||
|
use crate::runtimes::wapc::Runtime as WapcRuntime;
|
||||||
|
use crate::runtimes::Runtime;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, serde::Deserialize, serde::Serialize, Debug)]
|
||||||
|
pub enum PolicyExecutionMode {
|
||||||
|
#[serde(rename = "kubewarden-wapc")]
|
||||||
|
KubewardenWapc,
|
||||||
|
#[serde(rename = "opa")]
|
||||||
|
Opa,
|
||||||
|
#[serde(rename = "gatekeeper")]
|
||||||
|
OpaGatekeeper,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for PolicyExecutionMode {
|
||||||
|
fn default() -> Self {
|
||||||
|
PolicyExecutionMode::KubewardenWapc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for PolicyExecutionMode {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let json = serde_json::to_string(self).map_err(|_| fmt::Error {})?;
|
||||||
|
write!(f, "{}", json.replace('"', ""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct ValidateRequest(pub(crate) serde_json::Value);
|
||||||
|
|
||||||
|
impl ValidateRequest {
|
||||||
|
pub fn new(request: serde_json::Value) -> Self {
|
||||||
|
ValidateRequest(request)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn uid(&self) -> &str {
|
||||||
|
if let Some(uid) = self.0.get("uid").and_then(value::Value::as_str) {
|
||||||
|
uid
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) enum RegoPolicyExecutionMode {
|
||||||
|
Opa,
|
||||||
|
Gatekeeper,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<PolicyExecutionMode> for RegoPolicyExecutionMode {
|
||||||
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
|
fn try_from(execution_mode: PolicyExecutionMode) -> Result<RegoPolicyExecutionMode> {
|
||||||
|
match execution_mode {
|
||||||
|
PolicyExecutionMode::Opa => Ok(RegoPolicyExecutionMode::Opa),
|
||||||
|
PolicyExecutionMode::OpaGatekeeper => Ok(RegoPolicyExecutionMode::Gatekeeper),
|
||||||
|
PolicyExecutionMode::KubewardenWapc => Err(anyhow!(
|
||||||
|
"execution mode not convertible to a Rego based executon mode"
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) type PolicySettings = serde_json::Map<String, serde_json::Value>;
|
||||||
|
|
||||||
|
pub trait Evaluator {
|
||||||
|
fn validate(&mut self, request: ValidateRequest) -> AdmissionResponse;
|
||||||
|
fn validate_settings(&mut self) -> SettingsValidationResponse;
|
||||||
|
fn protocol_version(&mut self) -> Result<ProtocolVersion>;
|
||||||
|
fn policy_id(&self) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PolicyEvaluator {
|
||||||
|
pub(crate) runtime: Runtime,
|
||||||
|
pub(crate) settings: PolicySettings,
|
||||||
|
pub policy: Policy,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for PolicyEvaluator {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("PolicyEvaluator")
|
||||||
|
.field("id", &self.policy.id)
|
||||||
|
.field("settings", &self.settings)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Evaluator for PolicyEvaluator {
|
||||||
|
fn policy_id(&self) -> String {
|
||||||
|
self.policy.id.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip(request))]
|
||||||
|
fn validate(&mut self, request: ValidateRequest) -> AdmissionResponse {
|
||||||
|
match self.runtime {
|
||||||
|
Runtime::Wapc(ref mut wapc_host) => {
|
||||||
|
WapcRuntime(wapc_host).validate(&self.settings, &request)
|
||||||
|
}
|
||||||
|
Runtime::Burrego(ref mut burrego_evaluator) => {
|
||||||
|
BurregoRuntime(burrego_evaluator).validate(&self.settings, &request)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument]
|
||||||
|
fn validate_settings(&mut self) -> SettingsValidationResponse {
|
||||||
|
let settings_str = match serde_json::to_string(&self.settings) {
|
||||||
|
Ok(settings) => settings,
|
||||||
|
Err(err) => {
|
||||||
|
return SettingsValidationResponse {
|
||||||
|
valid: false,
|
||||||
|
message: Some(format!("could not marshal settings: {}", err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match self.runtime {
|
||||||
|
Runtime::Wapc(ref mut wapc_host) => {
|
||||||
|
WapcRuntime(wapc_host).validate_settings(settings_str)
|
||||||
|
}
|
||||||
|
Runtime::Burrego(ref mut burrego_evaluator) => {
|
||||||
|
BurregoRuntime(burrego_evaluator).validate_settings(settings_str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn protocol_version(&mut self) -> Result<ProtocolVersion> {
|
||||||
|
match &mut self.runtime {
|
||||||
|
Runtime::Wapc(ref mut wapc_host) => WapcRuntime(wapc_host).protocol_version(),
|
||||||
|
_ => Err(anyhow!(
|
||||||
|
"protocol_version is only applicable to a Kubewarden policy"
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn serialize_policy_execution_mode() {
|
||||||
|
let mut test_data: HashMap<String, PolicyExecutionMode> = HashMap::new();
|
||||||
|
test_data.insert(
|
||||||
|
serde_json::to_string(&json!("kubewarden-wapc")).unwrap(),
|
||||||
|
PolicyExecutionMode::KubewardenWapc,
|
||||||
|
);
|
||||||
|
test_data.insert(
|
||||||
|
serde_json::to_string(&json!("opa")).unwrap(),
|
||||||
|
PolicyExecutionMode::Opa,
|
||||||
|
);
|
||||||
|
test_data.insert(
|
||||||
|
serde_json::to_string(&json!("gatekeeper")).unwrap(),
|
||||||
|
PolicyExecutionMode::OpaGatekeeper,
|
||||||
|
);
|
||||||
|
|
||||||
|
for (expected, mode) in &test_data {
|
||||||
|
let actual = serde_json::to_string(&mode);
|
||||||
|
assert!(actual.is_ok());
|
||||||
|
assert_eq!(expected, &actual.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deserialize_policy_execution_mode() {
|
||||||
|
let mut test_data: HashMap<String, PolicyExecutionMode> = HashMap::new();
|
||||||
|
test_data.insert(
|
||||||
|
serde_json::to_string(&json!("kubewarden-wapc")).unwrap(),
|
||||||
|
PolicyExecutionMode::KubewardenWapc,
|
||||||
|
);
|
||||||
|
test_data.insert(
|
||||||
|
serde_json::to_string(&json!("opa")).unwrap(),
|
||||||
|
PolicyExecutionMode::Opa,
|
||||||
|
);
|
||||||
|
test_data.insert(
|
||||||
|
serde_json::to_string(&json!("gatekeeper")).unwrap(),
|
||||||
|
PolicyExecutionMode::OpaGatekeeper,
|
||||||
|
);
|
||||||
|
|
||||||
|
for (mode_str, expected) in &test_data {
|
||||||
|
let actual: std::result::Result<PolicyExecutionMode, serde_json::Error> =
|
||||||
|
serde_json::from_str(&mode_str);
|
||||||
|
assert_eq!(expected, &actual.unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
// an unknown policy mode should not be deserializable
|
||||||
|
let actual: std::result::Result<PolicyExecutionMode, serde_json::Error> =
|
||||||
|
serde_json::from_str("hello world");
|
||||||
|
assert!(actual.is_err());
|
||||||
|
}
|
||||||
|
}
|
||||||
373
src/policy_evaluator_builder.rs
Normal file
373
src/policy_evaluator_builder.rs
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use std::path::Path;
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use wasmtime_provider::wasmtime;
|
||||||
|
|
||||||
|
use crate::callback_requests::CallbackRequest;
|
||||||
|
use crate::policy::Policy;
|
||||||
|
use crate::policy_evaluator::{PolicyEvaluator, PolicyExecutionMode};
|
||||||
|
use crate::runtimes::wapc::WAPC_POLICY_MAPPING;
|
||||||
|
use crate::runtimes::{burrego::BurregoStack, wapc::WapcStack, Runtime};
|
||||||
|
|
||||||
|
/// Configure behavior of wasmtime [epoch-based interruptions](https://docs.rs/wasmtime/latest/wasmtime/struct.Config.html#method.epoch_interruption)
|
||||||
|
///
|
||||||
|
/// There are two kind of deadlines that apply to waPC modules:
|
||||||
|
///
|
||||||
|
/// * waPC initialization code: this is the code defined by the module inside
|
||||||
|
/// of the `wapc_init` or the `_start` functions
|
||||||
|
/// * user function: the actual waPC guest function written by an user
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
pub(crate) struct EpochDeadlines {
|
||||||
|
/// Deadline for waPC initialization code. Expressed in number of epoch ticks
|
||||||
|
pub wapc_init: u64,
|
||||||
|
|
||||||
|
/// Deadline for user-defined waPC function computation. Expressed in number of epoch ticks
|
||||||
|
pub wapc_func: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper Struct that creates a `PolicyEvaluator` object
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct PolicyEvaluatorBuilder {
|
||||||
|
engine: Option<wasmtime::Engine>,
|
||||||
|
policy_id: String,
|
||||||
|
policy_file: Option<String>,
|
||||||
|
policy_contents: Option<Vec<u8>>,
|
||||||
|
policy_module: Option<wasmtime::Module>,
|
||||||
|
execution_mode: Option<PolicyExecutionMode>,
|
||||||
|
settings: Option<serde_json::Map<String, serde_json::Value>>,
|
||||||
|
callback_channel: Option<mpsc::Sender<CallbackRequest>>,
|
||||||
|
wasmtime_cache: bool,
|
||||||
|
epoch_deadlines: Option<EpochDeadlines>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PolicyEvaluatorBuilder {
|
||||||
|
/// Create a new PolicyEvaluatorBuilder object. The `policy_id` must be
|
||||||
|
/// specified.
|
||||||
|
pub fn new(policy_id: String) -> PolicyEvaluatorBuilder {
|
||||||
|
PolicyEvaluatorBuilder {
|
||||||
|
policy_id,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// [`wasmtime::Engine`] instance to be used when creating the
|
||||||
|
/// policy evaluator
|
||||||
|
///
|
||||||
|
/// **Warning:** when used, all the [`wasmtime::Engine`] specific settings
|
||||||
|
/// must be set by the caller when creating the engine.
|
||||||
|
/// This includes options like: cache, epoch counter
|
||||||
|
pub fn engine(mut self, engine: wasmtime::Engine) -> Self {
|
||||||
|
self.engine = Some(engine);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the policy by reading the Wasm file from disk.
|
||||||
|
/// Cannot be used at the same time as `policy_contents`
|
||||||
|
pub fn policy_file(mut self, path: &Path) -> Result<PolicyEvaluatorBuilder> {
|
||||||
|
let filename = path
|
||||||
|
.to_str()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.ok_or_else(|| anyhow!("Cannot convert given path to String"))?;
|
||||||
|
self.policy_file = Some(filename);
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the policy by using the Wasm object given via the `data` array.
|
||||||
|
/// Cannot be used at the same time as `policy_file`
|
||||||
|
pub fn policy_contents(mut self, data: &[u8]) -> PolicyEvaluatorBuilder {
|
||||||
|
self.policy_contents = Some(data.to_owned());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Use a pre-built [`wasmtime::Module`] instance.
|
||||||
|
/// **Warning:** you must provide also the [`wasmtime::Engine`] used
|
||||||
|
/// to allocate the `Module`, otherwise the code will panic at runtime
|
||||||
|
pub fn policy_module(mut self, module: wasmtime::Module) -> Self {
|
||||||
|
self.policy_module = Some(module);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the policy execution mode
|
||||||
|
pub fn execution_mode(mut self, mode: PolicyExecutionMode) -> PolicyEvaluatorBuilder {
|
||||||
|
self.execution_mode = Some(mode);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enable Wasmtime cache feature
|
||||||
|
pub fn enable_wasmtime_cache(mut self) -> PolicyEvaluatorBuilder {
|
||||||
|
self.wasmtime_cache = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the settings the policy will use at evaluation time
|
||||||
|
pub fn settings(
|
||||||
|
mut self,
|
||||||
|
s: Option<serde_json::Map<String, serde_json::Value>>,
|
||||||
|
) -> PolicyEvaluatorBuilder {
|
||||||
|
self.settings = s;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enable Wasmtime [epoch-based interruptions](wasmtime::Config::epoch_interruption) and set
|
||||||
|
/// the deadlines to be enforced
|
||||||
|
///
|
||||||
|
/// Two kind of deadlines have to be set:
|
||||||
|
///
|
||||||
|
/// * `wapc_init_deadline`: the number of ticks the waPC initialization code can take before the
|
||||||
|
/// code is interrupted. This is the code usually defined inside of the `wapc_init`/`_start`
|
||||||
|
/// functions
|
||||||
|
/// * `wapc_func_deadline`: the number of ticks any regular waPC guest function can run before
|
||||||
|
/// its terminated by the host
|
||||||
|
///
|
||||||
|
/// Both these limits are expressed using the number of ticks that are allowed before the
|
||||||
|
/// WebAssembly execution is interrupted.
|
||||||
|
/// It's up to the embedder of waPC to define how much time a single tick is granted. This could
|
||||||
|
/// be 1 second, 10 nanoseconds, or whatever the user prefers.
|
||||||
|
///
|
||||||
|
/// **Warning:** when providing an instance of `wasmtime::Engine` via the
|
||||||
|
/// `WasmtimeEngineProvider::engine` helper, ensure the `wasmtime::Engine`
|
||||||
|
/// has been created with the `epoch_interruption` feature enabled
|
||||||
|
#[must_use]
|
||||||
|
pub fn enable_epoch_interruptions(
|
||||||
|
mut self,
|
||||||
|
wapc_init_deadline: u64,
|
||||||
|
wapc_func_deadline: u64,
|
||||||
|
) -> Self {
|
||||||
|
self.epoch_deadlines = Some(EpochDeadlines {
|
||||||
|
wapc_init: wapc_init_deadline,
|
||||||
|
wapc_func: wapc_func_deadline,
|
||||||
|
});
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Specify the channel that is used by the synchronous world (the waPC `host_callback`
|
||||||
|
/// function) to obtain information that can be computed only from within a
|
||||||
|
/// tokio runtime.
|
||||||
|
///
|
||||||
|
/// Note well: if no channel is given, the policy will still be created, but
|
||||||
|
/// some waPC functions exposed by the host will not be available at runtime.
|
||||||
|
/// The policy evaluation will not fail because of that, but the guest will
|
||||||
|
/// get an error instead of the expected result.
|
||||||
|
pub fn callback_channel(
|
||||||
|
mut self,
|
||||||
|
channel: mpsc::Sender<CallbackRequest>,
|
||||||
|
) -> PolicyEvaluatorBuilder {
|
||||||
|
self.callback_channel = Some(channel);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure the configuration provided to the build is correct
|
||||||
|
fn validate_user_input(&self) -> Result<()> {
|
||||||
|
if self.policy_file.is_some() && self.policy_contents.is_some() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Cannot specify 'policy_file' and 'policy_contents' at the same time"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if self.policy_file.is_some() && self.policy_module.is_some() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Cannot specify 'policy_file' and 'policy_module' at the same time"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if self.policy_contents.is_some() && self.policy_module.is_some() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Cannot specify 'policy_contents' and 'policy_module' at the same time"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.policy_file.is_none()
|
||||||
|
&& self.policy_contents.is_none()
|
||||||
|
&& self.policy_module.is_none()
|
||||||
|
{
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Must specify one among: `policy_file`, `policy_contents` and `policy_module`"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.engine.is_none() && self.policy_module.is_some() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"You must provide the `engine` that was used to instantiate the given `policy_module`"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.execution_mode.is_none() {
|
||||||
|
return Err(anyhow!("Must specify execution mode"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create the instance of `PolicyEvaluator` to be used
|
||||||
|
pub fn build(&self) -> Result<PolicyEvaluator> {
|
||||||
|
self.validate_user_input()?;
|
||||||
|
|
||||||
|
let engine = self
|
||||||
|
.engine
|
||||||
|
.as_ref()
|
||||||
|
.map_or_else(
|
||||||
|
|| {
|
||||||
|
let mut wasmtime_config = wasmtime::Config::new();
|
||||||
|
if self.wasmtime_cache {
|
||||||
|
wasmtime_config.cache_config_load_default()?;
|
||||||
|
}
|
||||||
|
if self.epoch_deadlines.is_some() {
|
||||||
|
wasmtime_config.epoch_interruption(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
wasmtime::Engine::new(&wasmtime_config)
|
||||||
|
},
|
||||||
|
|e| Ok(e.clone()),
|
||||||
|
)
|
||||||
|
.map_err(|e| anyhow!("cannot create wasmtime engine: {:?}", e))?;
|
||||||
|
|
||||||
|
let module: wasmtime::Module = if let Some(m) = &self.policy_module {
|
||||||
|
// it's fine to clone a Module, this is a cheap operation that just
|
||||||
|
// copies its internal reference. See wasmtime docs
|
||||||
|
m.clone()
|
||||||
|
} else {
|
||||||
|
match &self.policy_file {
|
||||||
|
Some(file) => wasmtime::Module::from_file(&engine, file),
|
||||||
|
None => wasmtime::Module::new(&engine, self.policy_contents.as_ref().unwrap()),
|
||||||
|
}?
|
||||||
|
};
|
||||||
|
|
||||||
|
let execution_mode = self.execution_mode.unwrap();
|
||||||
|
|
||||||
|
let (policy, runtime) = match execution_mode {
|
||||||
|
PolicyExecutionMode::KubewardenWapc => {
|
||||||
|
let wapc_stack = WapcStack::new(engine, module, self.epoch_deadlines)?;
|
||||||
|
|
||||||
|
let policy = Self::from_contents_internal(
|
||||||
|
self.policy_id.clone(),
|
||||||
|
self.callback_channel.clone(),
|
||||||
|
|| Some(wapc_stack.wapc_host_id()),
|
||||||
|
Policy::new,
|
||||||
|
execution_mode,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let policy_runtime = Runtime::Wapc(wapc_stack);
|
||||||
|
(policy, policy_runtime)
|
||||||
|
}
|
||||||
|
PolicyExecutionMode::Opa | PolicyExecutionMode::OpaGatekeeper => {
|
||||||
|
let policy = Self::from_contents_internal(
|
||||||
|
self.policy_id.clone(),
|
||||||
|
self.callback_channel.clone(),
|
||||||
|
|| None,
|
||||||
|
Policy::new,
|
||||||
|
execution_mode,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut builder = burrego::EvaluatorBuilder::default()
|
||||||
|
.engine(&engine)
|
||||||
|
.module(module)
|
||||||
|
.host_callbacks(crate::runtimes::burrego::new_host_callbacks());
|
||||||
|
|
||||||
|
if let Some(deadlines) = self.epoch_deadlines {
|
||||||
|
builder = builder.enable_epoch_interruptions(deadlines.wapc_func);
|
||||||
|
}
|
||||||
|
let evaluator = builder.build()?;
|
||||||
|
|
||||||
|
let policy_runtime = Runtime::Burrego(BurregoStack {
|
||||||
|
evaluator,
|
||||||
|
entrypoint_id: 0, // currently hardcoded to this value
|
||||||
|
policy_execution_mode: execution_mode.try_into()?,
|
||||||
|
});
|
||||||
|
|
||||||
|
(policy, policy_runtime)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(PolicyEvaluator {
|
||||||
|
runtime,
|
||||||
|
policy,
|
||||||
|
settings: self.settings.clone().unwrap_or_default(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_contents_internal<E, P>(
|
||||||
|
id: String,
|
||||||
|
callback_channel: Option<mpsc::Sender<CallbackRequest>>,
|
||||||
|
engine_initializer: E,
|
||||||
|
policy_initializer: P,
|
||||||
|
policy_execution_mode: PolicyExecutionMode,
|
||||||
|
) -> Result<Policy>
|
||||||
|
where
|
||||||
|
E: Fn() -> Option<u64>,
|
||||||
|
P: Fn(String, Option<u64>, Option<mpsc::Sender<CallbackRequest>>) -> Result<Policy>,
|
||||||
|
{
|
||||||
|
let instance_id = engine_initializer();
|
||||||
|
let policy = policy_initializer(id, instance_id, callback_channel)?;
|
||||||
|
if policy_execution_mode == PolicyExecutionMode::KubewardenWapc {
|
||||||
|
WAPC_POLICY_MAPPING
|
||||||
|
.write()
|
||||||
|
.expect("cannot write to global WAPC_POLICY_MAPPING")
|
||||||
|
.insert(
|
||||||
|
instance_id.ok_or_else(|| anyhow!("invalid policy id"))?,
|
||||||
|
policy.clone(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(policy)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn policy_is_registered_in_the_mapping() -> Result<()> {
|
||||||
|
let policy_name = "policy_is_registered_in_the_mapping";
|
||||||
|
|
||||||
|
// We cannot set policy.id at build time, because some attributes
|
||||||
|
// of Policy are private.
|
||||||
|
let mut policy = Policy::default();
|
||||||
|
policy.id = policy_name.to_string();
|
||||||
|
|
||||||
|
let policy_id = 1;
|
||||||
|
|
||||||
|
PolicyEvaluatorBuilder::from_contents_internal(
|
||||||
|
"mock_policy".to_string(),
|
||||||
|
None,
|
||||||
|
|| Some(policy_id),
|
||||||
|
|_, _, _| Ok(policy.clone()),
|
||||||
|
PolicyExecutionMode::KubewardenWapc,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let policy_mapping = WAPC_POLICY_MAPPING.read().unwrap();
|
||||||
|
let found = policy_mapping
|
||||||
|
.iter()
|
||||||
|
.find(|(_id, policy)| policy.id == policy_name);
|
||||||
|
|
||||||
|
assert!(found.is_some());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn policy_is_not_registered_in_the_mapping_if_not_wapc() -> Result<()> {
|
||||||
|
let policy_name = "policy_is_not_registered_in_the_mapping_if_not_wapc";
|
||||||
|
|
||||||
|
// We cannot set policy.id at build time, because some attributes
|
||||||
|
// of Policy are private.
|
||||||
|
let mut policy = Policy::default();
|
||||||
|
policy.id = policy_name.to_string();
|
||||||
|
|
||||||
|
let policy_id = 1;
|
||||||
|
|
||||||
|
PolicyEvaluatorBuilder::from_contents_internal(
|
||||||
|
policy_name.to_string(),
|
||||||
|
None,
|
||||||
|
|| Some(policy_id),
|
||||||
|
|_, _, _| Ok(policy.clone()),
|
||||||
|
PolicyExecutionMode::OpaGatekeeper,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let policy_mapping = WAPC_POLICY_MAPPING.read().unwrap();
|
||||||
|
let found = policy_mapping
|
||||||
|
.iter()
|
||||||
|
.find(|(_id, policy)| policy.id == policy_name);
|
||||||
|
|
||||||
|
assert!(found.is_none());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
574
src/policy_metadata.rs
Normal file
574
src/policy_metadata.rs
Normal file
@@ -0,0 +1,574 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use kubewarden_policy_sdk::metadata::ProtocolVersion;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::path::Path;
|
||||||
|
use validator::{Validate, ValidationError};
|
||||||
|
use wasmparser::{Parser, Payload};
|
||||||
|
|
||||||
|
use crate::policy_evaluator::PolicyExecutionMode;
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone, Hash, Eq, PartialEq)]
|
||||||
|
pub enum Operation {
|
||||||
|
#[serde(rename = "CREATE")]
|
||||||
|
Create,
|
||||||
|
#[serde(rename = "UPDATE")]
|
||||||
|
Update,
|
||||||
|
#[serde(rename = "DELETE")]
|
||||||
|
Delete,
|
||||||
|
#[serde(rename = "CONNECT")]
|
||||||
|
Connect,
|
||||||
|
#[serde(rename = "*")]
|
||||||
|
All,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone, Validate)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct Rule {
|
||||||
|
#[validate(length(min = 1), custom = "validate_asterisk_usage")]
|
||||||
|
pub api_groups: Vec<String>,
|
||||||
|
#[validate(length(min = 1), custom = "validate_asterisk_usage")]
|
||||||
|
pub api_versions: Vec<String>,
|
||||||
|
#[validate(length(min = 1), custom = "validate_resources")]
|
||||||
|
pub resources: Vec<String>,
|
||||||
|
#[validate(
|
||||||
|
length(min = 1),
|
||||||
|
custom = "validate_asterisk_usage_inside_of_operations"
|
||||||
|
)]
|
||||||
|
pub operations: Vec<Operation>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_asterisk_usage(data: &[String]) -> Result<(), ValidationError> {
|
||||||
|
if data.contains(&String::from("*")) && data.len() > 1 {
|
||||||
|
return Err(ValidationError::new(
|
||||||
|
"No other elements can be defined when '*' is used",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_asterisk_usage_inside_of_operations(data: &[Operation]) -> Result<(), ValidationError> {
|
||||||
|
if data.contains(&Operation::All) && data.len() > 1 {
|
||||||
|
return Err(ValidationError::new(
|
||||||
|
"No other elements can be defined when '*' is used",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_resources(data: &[String]) -> Result<(), ValidationError> {
|
||||||
|
// This method is a transposition of the check done by Kubernetes
|
||||||
|
// see https://github.com/kubernetes/kubernetes/blob/09268c16853b233ebaedcd6a877eac23690b5190/pkg/apis/admissionregistration/validation/validation.go#L44
|
||||||
|
|
||||||
|
// */x
|
||||||
|
let mut resources_with_wildcard_subresources: HashSet<String> = HashSet::new();
|
||||||
|
// x/*
|
||||||
|
let mut subresources_with_wildcard_resource: HashSet<String> = HashSet::new();
|
||||||
|
// */*
|
||||||
|
let mut has_double_wildcard = false;
|
||||||
|
// *
|
||||||
|
let mut has_single_wildcard = false;
|
||||||
|
// x
|
||||||
|
let mut has_resource_without_subresource = false;
|
||||||
|
|
||||||
|
for resource in data.iter() {
|
||||||
|
if resource.is_empty() {
|
||||||
|
return Err(ValidationError::new("empty resource is not allowed"));
|
||||||
|
}
|
||||||
|
match resource.as_str() {
|
||||||
|
"*/*" => has_double_wildcard = true,
|
||||||
|
"*" => has_single_wildcard = true,
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
|
||||||
|
let parts: Vec<&str> = resource.splitn(2, '/').collect();
|
||||||
|
if parts.len() == 1 {
|
||||||
|
has_resource_without_subresource = resource.as_str() != "*";
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let res = parts[0];
|
||||||
|
let sub = parts[1];
|
||||||
|
|
||||||
|
if resources_with_wildcard_subresources.contains(res) {
|
||||||
|
let msg = format!("if '{}/*' is present, must not specify {}", resource, res);
|
||||||
|
return Err(ValidationError::new(Box::leak(msg.into_boxed_str())));
|
||||||
|
}
|
||||||
|
if subresources_with_wildcard_resource.contains(sub) {
|
||||||
|
let msg = format!("if '*/{}' is present, must not specify {}", sub, resource);
|
||||||
|
return Err(ValidationError::new(Box::leak(msg.into_boxed_str())));
|
||||||
|
}
|
||||||
|
if sub == "*" {
|
||||||
|
resources_with_wildcard_subresources.insert(String::from(res));
|
||||||
|
}
|
||||||
|
if res == "*" {
|
||||||
|
subresources_with_wildcard_resource.insert(String::from(sub));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if data.len() > 1 && has_double_wildcard {
|
||||||
|
return Err(ValidationError::new(
|
||||||
|
"if '*/*' is present, must not specify other resources",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if has_single_wildcard && has_resource_without_subresource {
|
||||||
|
return Err(ValidationError::new(
|
||||||
|
"if '*' is present, must not specify other resources without subresources",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone, Validate)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[validate(schema(function = "validate_metadata", skip_on_field_errors = false))]
|
||||||
|
pub struct Metadata {
|
||||||
|
#[validate(required)]
|
||||||
|
pub protocol_version: Option<ProtocolVersion>,
|
||||||
|
#[validate]
|
||||||
|
pub rules: Vec<Rule>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub annotations: Option<HashMap<String, String>>,
|
||||||
|
pub mutating: bool,
|
||||||
|
#[serde(default = "_default_true")]
|
||||||
|
pub background_audit: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub context_aware: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub execution_mode: PolicyExecutionMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn _default_true() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Metadata {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
protocol_version: None,
|
||||||
|
rules: vec![],
|
||||||
|
annotations: Some(HashMap::new()),
|
||||||
|
mutating: false,
|
||||||
|
background_audit: true,
|
||||||
|
context_aware: false,
|
||||||
|
execution_mode: PolicyExecutionMode::KubewardenWapc,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Metadata {
|
||||||
|
pub fn from_path(path: &Path) -> Result<Option<Metadata>> {
|
||||||
|
Metadata::from_contents(&std::fs::read(path)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_contents(policy: &[u8]) -> Result<Option<Metadata>> {
|
||||||
|
for payload in Parser::new(0).parse_all(policy) {
|
||||||
|
if let Payload::CustomSection(reader) = payload? {
|
||||||
|
if reader.name() == crate::constants::KUBEWARDEN_CUSTOM_SECTION_METADATA {
|
||||||
|
return Ok(Some(serde_json::from_slice(reader.data())?));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_metadata(metadata: &Metadata) -> Result<(), ValidationError> {
|
||||||
|
if metadata.execution_mode == PolicyExecutionMode::KubewardenWapc
|
||||||
|
&& metadata.protocol_version == Some(ProtocolVersion::Unknown)
|
||||||
|
{
|
||||||
|
return Err(ValidationError::new(
|
||||||
|
"Must specifify a valid protocol version",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use assert_json_diff::assert_json_eq;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn metadata_validation_pass() -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("")],
|
||||||
|
api_versions: vec![String::from("v1")],
|
||||||
|
resources: vec![String::from("pods")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
let metadata = Metadata {
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
assert!(metadata.validate().is_ok());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn metadata_validation_failure() -> Result<(), ()> {
|
||||||
|
// fail because api_groups has both '*' and another value
|
||||||
|
let mut pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from(""), String::from("*")],
|
||||||
|
api_versions: vec![String::from("v1")],
|
||||||
|
resources: vec![String::from("pods")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
let protocol_version = Some(ProtocolVersion::V1);
|
||||||
|
|
||||||
|
let mut metadata = Metadata {
|
||||||
|
protocol_version,
|
||||||
|
annotations: None,
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
mutating: false,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
|
||||||
|
// fail because api_group is empty
|
||||||
|
pod_rule = Rule {
|
||||||
|
api_groups: vec![],
|
||||||
|
api_versions: vec![String::from("v1")],
|
||||||
|
resources: vec![String::from("pods")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
metadata.rules = vec![pod_rule];
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
|
||||||
|
// fail because operations has both '*' and another value
|
||||||
|
pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("")],
|
||||||
|
api_versions: vec![String::from("v1")],
|
||||||
|
resources: vec![String::from("pods")],
|
||||||
|
operations: vec![Operation::All, Operation::Create],
|
||||||
|
};
|
||||||
|
metadata.rules = vec![pod_rule];
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
|
||||||
|
// fails because there's no valid protocol version defined
|
||||||
|
pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("")],
|
||||||
|
api_versions: vec![String::from("v1")],
|
||||||
|
resources: vec![String::from("pods")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
metadata = Metadata {
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
|
||||||
|
pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("")],
|
||||||
|
api_versions: vec![String::from("v1")],
|
||||||
|
resources: vec![String::from("pods")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
metadata = Metadata {
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
|
||||||
|
// fails because the protocol cannot be None
|
||||||
|
metadata = Metadata {
|
||||||
|
protocol_version: None,
|
||||||
|
execution_mode: PolicyExecutionMode::KubewardenWapc,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn metadata_with_kubewarden_execution_mode_must_have_a_valid_protocol() {
|
||||||
|
let metadata = Metadata {
|
||||||
|
protocol_version: Some(ProtocolVersion::Unknown),
|
||||||
|
execution_mode: PolicyExecutionMode::KubewardenWapc,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
execution_mode: PolicyExecutionMode::KubewardenWapc,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn metadata_with_rego_execution_mode_must_have_a_valid_protocol() {
|
||||||
|
for mode in vec![PolicyExecutionMode::Opa, PolicyExecutionMode::OpaGatekeeper] {
|
||||||
|
let metadata = Metadata {
|
||||||
|
protocol_version: Some(ProtocolVersion::Unknown),
|
||||||
|
execution_mode: mode,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn metadata_without_rules() -> Result<(), ()> {
|
||||||
|
let metadata = Metadata {
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
annotations: None,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let expected = json!({
|
||||||
|
"protocolVersion": "v1",
|
||||||
|
"rules": [ ],
|
||||||
|
"mutating": false,
|
||||||
|
"backgroundAudit": true,
|
||||||
|
"contextAware": false,
|
||||||
|
"executionMode": "kubewarden-wapc",
|
||||||
|
});
|
||||||
|
|
||||||
|
let actual = serde_json::to_value(&metadata).unwrap();
|
||||||
|
assert_json_eq!(expected, actual);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn metadata_backwards_compatibility() -> Result<(), ()> {
|
||||||
|
// missing backgroundAudit on purpose
|
||||||
|
let json_metadata = json!({
|
||||||
|
"protocolVersion": "v1",
|
||||||
|
"rules": [ ],
|
||||||
|
"mutating": false,
|
||||||
|
"contextAware": false,
|
||||||
|
"executionMode": "kubewarden-wapc",
|
||||||
|
});
|
||||||
|
|
||||||
|
let expected = Metadata {
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
annotations: None,
|
||||||
|
background_audit: true,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let actual: Metadata = serde_json::from_value(json_metadata).unwrap();
|
||||||
|
assert_json_eq!(expected, actual);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn metadata_init() -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("")],
|
||||||
|
api_versions: vec![String::from("v1")],
|
||||||
|
resources: vec![String::from("pods")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
annotations.insert(
|
||||||
|
String::from("io.kubewarden.policy.author"),
|
||||||
|
String::from("Flavio Castelli"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
annotations: Some(annotations),
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let expected = json!(
|
||||||
|
{
|
||||||
|
"protocolVersion": "v1",
|
||||||
|
"rules": [
|
||||||
|
{
|
||||||
|
"apiGroups":[""],
|
||||||
|
"apiVersions":["v1"],
|
||||||
|
"resources":["pods"],
|
||||||
|
"operations":["CREATE"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"annotations": {
|
||||||
|
"io.kubewarden.policy.author": "Flavio Castelli"
|
||||||
|
},
|
||||||
|
"mutating": false,
|
||||||
|
"backgroundAudit": true,
|
||||||
|
"contextAware": false,
|
||||||
|
"executionMode": "kubewarden-wapc",
|
||||||
|
});
|
||||||
|
|
||||||
|
let actual = serde_json::to_value(&metadata).unwrap();
|
||||||
|
assert_json_eq!(expected, actual);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_resource_asterisk_can_coexist_with_resources_that_have_subresources(
|
||||||
|
) -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("a")],
|
||||||
|
api_versions: vec![String::from("a")],
|
||||||
|
resources: vec![
|
||||||
|
String::from("*"),
|
||||||
|
String::from("a/b"),
|
||||||
|
String::from("a/*"),
|
||||||
|
String::from("*/b"),
|
||||||
|
],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
annotations.insert(
|
||||||
|
String::from("io.kubewarden.policy.author"),
|
||||||
|
String::from("Flavio Castelli"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
annotations: Some(annotations),
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_ok());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_resource_asterisk_cannot_mix_with_resources_that_do_not_have_subresources(
|
||||||
|
) -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("a")],
|
||||||
|
api_versions: vec![String::from("a")],
|
||||||
|
resources: vec![String::from("*"), String::from("a")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
annotations.insert(
|
||||||
|
String::from("io.kubewarden.policy.author"),
|
||||||
|
String::from("Flavio Castelli"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
annotations: Some(annotations),
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_resource_foo_slash_asterisk_subresource_cannot_mix_with_foo_slash_bar(
|
||||||
|
) -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("a")],
|
||||||
|
api_versions: vec![String::from("a")],
|
||||||
|
resources: vec![String::from("a/*"), String::from("a/x")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
annotations.insert(
|
||||||
|
String::from("io.kubewarden.policy.author"),
|
||||||
|
String::from("Flavio Castelli"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
annotations: Some(annotations),
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_resource_foo_slash_asterisk_can_mix_with_foo() -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("a")],
|
||||||
|
api_versions: vec![String::from("a")],
|
||||||
|
resources: vec![String::from("a/*"), String::from("a")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
annotations.insert(
|
||||||
|
String::from("io.kubewarden.policy.author"),
|
||||||
|
String::from("Flavio Castelli"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
annotations: Some(annotations),
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_ok());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_resource_asterisk_slash_bar_cannot_mix_with_foo_slash_bar() -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("a")],
|
||||||
|
api_versions: vec![String::from("a")],
|
||||||
|
resources: vec![String::from("*/a"), String::from("x/a")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
annotations.insert(
|
||||||
|
String::from("io.kubewarden.policy.author"),
|
||||||
|
String::from("Flavio Castelli"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
annotations: Some(annotations),
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_resource_double_asterisk_cannot_mix_with_other_resources() -> Result<(), ()> {
|
||||||
|
let pod_rule = Rule {
|
||||||
|
api_groups: vec![String::from("a")],
|
||||||
|
api_versions: vec![String::from("a")],
|
||||||
|
resources: vec![String::from("*/*"), String::from("a")],
|
||||||
|
operations: vec![Operation::Create],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut annotations: HashMap<String, String> = HashMap::new();
|
||||||
|
annotations.insert(
|
||||||
|
String::from("io.kubewarden.policy.author"),
|
||||||
|
String::from("Flavio Castelli"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let metadata = Metadata {
|
||||||
|
annotations: Some(annotations),
|
||||||
|
protocol_version: Some(ProtocolVersion::V1),
|
||||||
|
rules: vec![pod_rule],
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(metadata.validate().is_err());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
77
src/policy_tracing.rs
Normal file
77
src/policy_tracing.rs
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::{event, Level};
|
||||||
|
|
||||||
|
use crate::policy::Policy;
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
enum PolicyLogEntryLevel {
|
||||||
|
Trace,
|
||||||
|
Debug,
|
||||||
|
Info,
|
||||||
|
Warning,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for PolicyLogEntryLevel {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::de::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let s = String::deserialize(deserializer)?;
|
||||||
|
match s.to_uppercase().as_str() {
|
||||||
|
"TRACE" => Ok(PolicyLogEntryLevel::Trace),
|
||||||
|
"DEBUG" => Ok(PolicyLogEntryLevel::Debug),
|
||||||
|
"INFO" => Ok(PolicyLogEntryLevel::Info),
|
||||||
|
"WARNING" => Ok(PolicyLogEntryLevel::Warning),
|
||||||
|
"ERROR" => Ok(PolicyLogEntryLevel::Error),
|
||||||
|
_ => Err(anyhow!("unknown log level {}", s)).map_err(serde::de::Error::custom),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
|
struct PolicyLogEntry {
|
||||||
|
level: PolicyLogEntryLevel,
|
||||||
|
message: Option<String>,
|
||||||
|
#[serde(flatten)]
|
||||||
|
data: Option<serde_json::Map<String, serde_json::Value>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Policy {
|
||||||
|
#[tracing::instrument(name = "policy_log", skip(contents))]
|
||||||
|
pub(crate) fn log(&self, contents: &[u8]) -> Result<()> {
|
||||||
|
let log_entry: PolicyLogEntry = serde_json::from_slice(contents)?;
|
||||||
|
macro_rules! log {
|
||||||
|
($level:path) => {
|
||||||
|
event!(
|
||||||
|
target: "policy_log",
|
||||||
|
$level,
|
||||||
|
data = %&serde_json::to_string(&log_entry.data.clone().unwrap())?.as_str(),
|
||||||
|
"{}",
|
||||||
|
log_entry.message.clone().unwrap_or_default(),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
match log_entry.level {
|
||||||
|
PolicyLogEntryLevel::Trace => {
|
||||||
|
log!(Level::TRACE);
|
||||||
|
}
|
||||||
|
PolicyLogEntryLevel::Debug => {
|
||||||
|
log!(Level::DEBUG);
|
||||||
|
}
|
||||||
|
PolicyLogEntryLevel::Info => {
|
||||||
|
log!(Level::INFO);
|
||||||
|
}
|
||||||
|
PolicyLogEntryLevel::Warning => {
|
||||||
|
log!(Level::WARN);
|
||||||
|
}
|
||||||
|
PolicyLogEntryLevel::Error => {
|
||||||
|
log!(Level::ERROR);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
6
src/runtime.rs
Normal file
6
src/runtime.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
enum Runtime {
|
||||||
|
Wapc(wapc::WapcHost),
|
||||||
|
// The `BurregoEvaluator` variant is boxed since it outsizes the
|
||||||
|
// other variants of this enum.
|
||||||
|
Burrego(Box<BurregoEvaluator>),
|
||||||
|
}
|
||||||
188
src/runtimes/burrego.rs
Normal file
188
src/runtimes/burrego.rs
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
use anyhow::anyhow;
|
||||||
|
use burrego::host_callbacks::HostCallbacks;
|
||||||
|
use kubewarden_policy_sdk::settings::SettingsValidationResponse;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde_json::json;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
|
use crate::admission_response::{AdmissionResponse, AdmissionResponseStatus};
|
||||||
|
use crate::policy_evaluator::RegoPolicyExecutionMode;
|
||||||
|
use crate::policy_evaluator::{PolicySettings, ValidateRequest};
|
||||||
|
|
||||||
|
pub(crate) struct BurregoStack {
|
||||||
|
pub evaluator: burrego::Evaluator,
|
||||||
|
pub entrypoint_id: i32,
|
||||||
|
pub policy_execution_mode: RegoPolicyExecutionMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Runtime<'a>(pub(crate) &'a mut BurregoStack);
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "error")]
|
||||||
|
fn opa_abort(msg: &str) {}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "info")]
|
||||||
|
fn opa_println(msg: &str) {}
|
||||||
|
|
||||||
|
pub(crate) fn new_host_callbacks() -> HostCallbacks {
|
||||||
|
HostCallbacks {
|
||||||
|
opa_abort,
|
||||||
|
opa_println,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Runtime<'a> {
|
||||||
|
pub fn validate(
|
||||||
|
&mut self,
|
||||||
|
settings: &PolicySettings,
|
||||||
|
request: &ValidateRequest,
|
||||||
|
) -> AdmissionResponse {
|
||||||
|
let uid = request.uid();
|
||||||
|
|
||||||
|
// OPA and Gatekeeper expect arguments in different ways. Provide the ones that each expect.
|
||||||
|
let (document_to_evaluate, data) = match self.0.policy_execution_mode {
|
||||||
|
RegoPolicyExecutionMode::Opa => {
|
||||||
|
// Policies for OPA expect the whole `AdmissionReview`
|
||||||
|
// object: produce a synthetic external one so
|
||||||
|
// existing OPA policies are compatible.
|
||||||
|
(
|
||||||
|
json!({
|
||||||
|
"apiVersion": "admission.k8s.io/v1",
|
||||||
|
"kind": "AdmissionReview",
|
||||||
|
"request": &request.0,
|
||||||
|
}),
|
||||||
|
json!(settings),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
RegoPolicyExecutionMode::Gatekeeper => {
|
||||||
|
// Gatekeeper policies include a toplevel `review`
|
||||||
|
// object that contains the AdmissionRequest to be
|
||||||
|
// evaluated in an `object` attribute, and the
|
||||||
|
// parameters -- defined in their `ConstraintTemplate`
|
||||||
|
// and configured when the Policy is created.
|
||||||
|
(
|
||||||
|
json!({
|
||||||
|
"parameters": settings,
|
||||||
|
"review": &request.0,
|
||||||
|
}),
|
||||||
|
json!({"kubernetes": ""}), // TODO (ereslibre): Kubernetes context goes here
|
||||||
|
)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let burrego_evaluation =
|
||||||
|
self.0
|
||||||
|
.evaluator
|
||||||
|
.evaluate(self.0.entrypoint_id, &document_to_evaluate, &data);
|
||||||
|
|
||||||
|
match burrego_evaluation {
|
||||||
|
Ok(evaluation_result) => {
|
||||||
|
match self.0.policy_execution_mode {
|
||||||
|
RegoPolicyExecutionMode::Opa => {
|
||||||
|
// Open Policy agent policies entrypoint
|
||||||
|
// return a Kubernetes `AdmissionReview`
|
||||||
|
// object.
|
||||||
|
let evaluation_result = evaluation_result
|
||||||
|
.get(0)
|
||||||
|
.and_then(|r| r.get("result"))
|
||||||
|
.and_then(|r| r.get("response"));
|
||||||
|
|
||||||
|
match evaluation_result {
|
||||||
|
Some(evaluation_result) => {
|
||||||
|
match serde_json::from_value(evaluation_result.clone()) {
|
||||||
|
Ok(evaluation_result) => AdmissionResponse {
|
||||||
|
uid: uid.to_string(),
|
||||||
|
..evaluation_result
|
||||||
|
},
|
||||||
|
Err(err) => AdmissionResponse::reject_internal_server_error(
|
||||||
|
uid.to_string(),
|
||||||
|
err.to_string(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => AdmissionResponse::reject_internal_server_error(
|
||||||
|
uid.to_string(),
|
||||||
|
"cannot interpret OPA policy result".to_string(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RegoPolicyExecutionMode::Gatekeeper => {
|
||||||
|
// Gatekeeper entrypoint is usually a
|
||||||
|
// `violations` rule that might evaluate to a
|
||||||
|
// list of violations, each violation with a
|
||||||
|
// `msg` string explaining the violation
|
||||||
|
// reason. If no violations are reported, the
|
||||||
|
// request is accepted. Otherwise it is
|
||||||
|
// rejected.
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct Violation {
|
||||||
|
msg: Option<String>,
|
||||||
|
}
|
||||||
|
#[derive(Debug, Default, Deserialize)]
|
||||||
|
struct Violations {
|
||||||
|
result: Vec<Violation>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let violations: Violations = evaluation_result
|
||||||
|
.get(0)
|
||||||
|
.ok_or_else(|| anyhow!("invalid response from policy"))
|
||||||
|
.and_then(|response| {
|
||||||
|
serde_json::from_value(response.clone())
|
||||||
|
.map_err(|err| anyhow!("invalid response from policy: {}", err))
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
if violations.result.is_empty() {
|
||||||
|
AdmissionResponse {
|
||||||
|
uid: uid.to_string(),
|
||||||
|
allowed: true,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
AdmissionResponse {
|
||||||
|
uid: uid.to_string(),
|
||||||
|
allowed: false,
|
||||||
|
status: Some(AdmissionResponseStatus {
|
||||||
|
message: Some(
|
||||||
|
violations
|
||||||
|
.result
|
||||||
|
.iter()
|
||||||
|
.filter_map(|violation| violation.msg.clone())
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(", "),
|
||||||
|
),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
error!(
|
||||||
|
error = ?err,
|
||||||
|
"error evaluating policy with burrego"
|
||||||
|
);
|
||||||
|
if matches!(
|
||||||
|
err,
|
||||||
|
burrego::errors::BurregoError::ExecutionDeadlineExceeded
|
||||||
|
) {
|
||||||
|
if let Err(reset_error) = self.0.evaluator.reset() {
|
||||||
|
error!(?reset_error, "cannot reset burrego evaluator, further invocations might fail or behave not properly");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
AdmissionResponse::reject_internal_server_error(uid.to_string(), err.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate_settings(&mut self, _settings: String) -> SettingsValidationResponse {
|
||||||
|
// The burrego backend is mainly for compatibility with
|
||||||
|
// existing OPA policies. Those policies don't have a generic
|
||||||
|
// way of validating settings. Return true
|
||||||
|
SettingsValidationResponse {
|
||||||
|
valid: true,
|
||||||
|
message: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
7
src/runtimes/mod.rs
Normal file
7
src/runtimes/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
pub mod burrego;
|
||||||
|
pub(crate) mod wapc;
|
||||||
|
|
||||||
|
pub(crate) enum Runtime {
|
||||||
|
Wapc(wapc::WapcStack),
|
||||||
|
Burrego(burrego::BurregoStack),
|
||||||
|
}
|
||||||
533
src/runtimes/wapc.rs
Normal file
533
src/runtimes/wapc.rs
Normal file
@@ -0,0 +1,533 @@
|
|||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use kubewarden_policy_sdk::host_capabilities::{
|
||||||
|
crypto_v1::{CertificateVerificationRequest, CertificateVerificationResponse},
|
||||||
|
SigstoreVerificationInputV1, SigstoreVerificationInputV2,
|
||||||
|
};
|
||||||
|
use kubewarden_policy_sdk::metadata::ProtocolVersion;
|
||||||
|
use kubewarden_policy_sdk::response::ValidationResponse as PolicyValidationResponse;
|
||||||
|
use kubewarden_policy_sdk::settings::SettingsValidationResponse;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use serde_json::json;
|
||||||
|
use std::{collections::HashMap, convert::TryFrom, sync::RwLock};
|
||||||
|
use tokio::sync::oneshot::Receiver;
|
||||||
|
use tokio::sync::{mpsc, oneshot};
|
||||||
|
use tracing::{debug, error, info};
|
||||||
|
use wasmtime_provider::wasmtime;
|
||||||
|
|
||||||
|
use crate::admission_response::AdmissionResponse;
|
||||||
|
use crate::callback_handler::verify_certificate;
|
||||||
|
use crate::callback_requests::{CallbackRequest, CallbackRequestType, CallbackResponse};
|
||||||
|
use crate::cluster_context::ClusterContext;
|
||||||
|
use crate::policy::Policy;
|
||||||
|
use crate::policy_evaluator::{PolicySettings, ValidateRequest};
|
||||||
|
|
||||||
|
pub(crate) struct Runtime<'a>(pub(crate) &'a mut WapcStack);
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub(crate) static ref WAPC_POLICY_MAPPING: RwLock<HashMap<u64, Policy>> =
|
||||||
|
RwLock::new(HashMap::with_capacity(64));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Error message returned by wasmtime_provider when the guest execution
|
||||||
|
/// is interrupted because of epoch deadline is exceeded.
|
||||||
|
///
|
||||||
|
/// Unfortunately, wasmtime_provider doesn't return a typed error, hence we have
|
||||||
|
/// to look for this text
|
||||||
|
const WAPC_EPOCH_INTERRUPTION_ERR_MSG: &str = "guest code interrupted, execution deadline exceeded";
|
||||||
|
|
||||||
|
pub(crate) fn host_callback(
|
||||||
|
policy_id: u64,
|
||||||
|
binding: &str,
|
||||||
|
namespace: &str,
|
||||||
|
operation: &str,
|
||||||
|
payload: &[u8],
|
||||||
|
) -> Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
match binding {
|
||||||
|
"kubewarden" => match namespace {
|
||||||
|
"tracing" => match operation {
|
||||||
|
"log" => {
|
||||||
|
let policy_mapping = WAPC_POLICY_MAPPING.read().unwrap();
|
||||||
|
let policy = policy_mapping.get(&policy_id).unwrap();
|
||||||
|
if let Err(e) = policy.log(payload) {
|
||||||
|
let p =
|
||||||
|
String::from_utf8(payload.to_vec()).unwrap_or_else(|e| e.to_string());
|
||||||
|
error!(
|
||||||
|
payload = p.as_str(),
|
||||||
|
error = e.to_string().as_str(),
|
||||||
|
"Cannot log event"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(Vec::new())
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
error!(namespace, operation, "unknown operation");
|
||||||
|
Err(format!("unknown operation: {}", operation).into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"oci" => match operation {
|
||||||
|
"v1/verify" => {
|
||||||
|
let req: SigstoreVerificationInputV1 =
|
||||||
|
serde_json::from_slice(payload.to_vec().as_ref())?;
|
||||||
|
let req_type: CallbackRequestType = req.into();
|
||||||
|
let (tx, rx) = oneshot::channel::<Result<CallbackResponse>>();
|
||||||
|
let req = CallbackRequest {
|
||||||
|
request: req_type,
|
||||||
|
response_channel: tx,
|
||||||
|
};
|
||||||
|
|
||||||
|
send_request_and_wait_for_response(policy_id, binding, operation, req, rx)
|
||||||
|
}
|
||||||
|
"v2/verify" => {
|
||||||
|
let req: SigstoreVerificationInputV2 =
|
||||||
|
serde_json::from_slice(payload.to_vec().as_ref())?;
|
||||||
|
let req_type: CallbackRequestType = req.into();
|
||||||
|
let (tx, rx) = oneshot::channel::<Result<CallbackResponse>>();
|
||||||
|
let req = CallbackRequest {
|
||||||
|
request: req_type,
|
||||||
|
response_channel: tx,
|
||||||
|
};
|
||||||
|
|
||||||
|
send_request_and_wait_for_response(policy_id, binding, operation, req, rx)
|
||||||
|
}
|
||||||
|
"v1/manifest_digest" => {
|
||||||
|
let image: String = serde_json::from_slice(payload.to_vec().as_ref())?;
|
||||||
|
debug!(
|
||||||
|
policy_id,
|
||||||
|
binding,
|
||||||
|
operation,
|
||||||
|
image = image.as_str(),
|
||||||
|
"Sending request via callback channel"
|
||||||
|
);
|
||||||
|
let (tx, rx) = oneshot::channel::<Result<CallbackResponse>>();
|
||||||
|
let req = CallbackRequest {
|
||||||
|
request: CallbackRequestType::OciManifestDigest { image },
|
||||||
|
response_channel: tx,
|
||||||
|
};
|
||||||
|
send_request_and_wait_for_response(policy_id, binding, operation, req, rx)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
error!("unknown operation: {}", operation);
|
||||||
|
Err(format!("unknown operation: {}", operation).into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"net" => match operation {
|
||||||
|
"v1/dns_lookup_host" => {
|
||||||
|
let host: String = serde_json::from_slice(payload.to_vec().as_ref())?;
|
||||||
|
debug!(
|
||||||
|
policy_id,
|
||||||
|
binding,
|
||||||
|
operation,
|
||||||
|
?host,
|
||||||
|
"Sending request via callback channel"
|
||||||
|
);
|
||||||
|
let (tx, rx) = oneshot::channel::<Result<CallbackResponse>>();
|
||||||
|
let req = CallbackRequest {
|
||||||
|
request: CallbackRequestType::DNSLookupHost { host },
|
||||||
|
response_channel: tx,
|
||||||
|
};
|
||||||
|
send_request_and_wait_for_response(policy_id, binding, operation, req, rx)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
error!("unknown operation: {}", operation);
|
||||||
|
Err(format!("unknown operation: {}", operation).into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"crypto" => match operation {
|
||||||
|
"v1/is_certificate_trusted" => {
|
||||||
|
let req: CertificateVerificationRequest =
|
||||||
|
serde_json::from_slice(payload.to_vec().as_ref())?;
|
||||||
|
let response: CertificateVerificationResponse = match verify_certificate(req) {
|
||||||
|
Ok(b) => b.into(),
|
||||||
|
Err(e) => {
|
||||||
|
return Err(format!("Error when verifying certificate: {}", e).into())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(serde_json::to_vec(&response)?)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
error!(namespace, operation, "unknown operation");
|
||||||
|
Err(format!("unknown operation: {}", operation).into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => {
|
||||||
|
error!("unknown namespace: {}", namespace);
|
||||||
|
Err(format!("unknown namespace: {}", namespace).into())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"kubernetes" => {
|
||||||
|
let cluster_context = ClusterContext::get();
|
||||||
|
match namespace {
|
||||||
|
"ingresses" => Ok(cluster_context.ingresses().into()),
|
||||||
|
"namespaces" => Ok(cluster_context.namespaces().into()),
|
||||||
|
"services" => Ok(cluster_context.services().into()),
|
||||||
|
_ => {
|
||||||
|
error!("unknown namespace: {}", namespace);
|
||||||
|
Err(format!("unknown namespace: {}", namespace).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
error!("unknown binding: {}", binding);
|
||||||
|
Err(format!("unknown binding: {}", binding).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn send_request_and_wait_for_response(
|
||||||
|
policy_id: u64,
|
||||||
|
binding: &str,
|
||||||
|
operation: &str,
|
||||||
|
req: CallbackRequest,
|
||||||
|
mut rx: Receiver<Result<CallbackResponse>>,
|
||||||
|
) -> Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let policy_mapping = WAPC_POLICY_MAPPING.read().unwrap();
|
||||||
|
let policy = policy_mapping.get(&policy_id).unwrap();
|
||||||
|
|
||||||
|
let cb_channel: mpsc::Sender<CallbackRequest> = if let Some(c) = policy.callback_channel.clone()
|
||||||
|
{
|
||||||
|
Ok(c)
|
||||||
|
} else {
|
||||||
|
error!(
|
||||||
|
policy_id,
|
||||||
|
binding, operation, "Cannot process waPC request: callback channel not provided"
|
||||||
|
);
|
||||||
|
Err(anyhow!(
|
||||||
|
"Cannot process waPC request: callback channel not provided"
|
||||||
|
))
|
||||||
|
}?;
|
||||||
|
|
||||||
|
let send_result = cb_channel.try_send(req);
|
||||||
|
if let Err(e) = send_result {
|
||||||
|
return Err(format!("Error sending request over callback channel: {:?}", e).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// wait for the response
|
||||||
|
loop {
|
||||||
|
match rx.try_recv() {
|
||||||
|
Ok(msg) => {
|
||||||
|
return match msg {
|
||||||
|
Ok(resp) => Ok(resp.payload),
|
||||||
|
Err(e) => {
|
||||||
|
error!(
|
||||||
|
policy_id,
|
||||||
|
binding,
|
||||||
|
operation,
|
||||||
|
error = e.to_string().as_str(),
|
||||||
|
"callback evaluation failed"
|
||||||
|
);
|
||||||
|
Err(format!("Callback evaluation failure: {:?}", e).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(oneshot::error::TryRecvError::Empty) => {
|
||||||
|
// do nothing, keep waiting for a reply
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!(
|
||||||
|
policy_id,
|
||||||
|
binding,
|
||||||
|
operation,
|
||||||
|
error = e.to_string().as_str(),
|
||||||
|
"Cannot process waPC request: error obtaining response over callback channel"
|
||||||
|
);
|
||||||
|
return Err("Error obtaining response over callback channel".into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct WapcStack {
|
||||||
|
engine: wasmtime::Engine,
|
||||||
|
module: wasmtime::Module,
|
||||||
|
epoch_deadlines: Option<crate::policy_evaluator_builder::EpochDeadlines>,
|
||||||
|
wapc_host: wapc::WapcHost,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WapcStack {
|
||||||
|
pub(crate) fn new(
|
||||||
|
engine: wasmtime::Engine,
|
||||||
|
module: wasmtime::Module,
|
||||||
|
epoch_deadlines: Option<crate::policy_evaluator_builder::EpochDeadlines>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let wapc_host = Self::setup_wapc_host(engine.clone(), module.clone(), epoch_deadlines)?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
engine,
|
||||||
|
module,
|
||||||
|
epoch_deadlines,
|
||||||
|
wapc_host,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Provision a new wapc_host. Useful for starting from a clean slate
|
||||||
|
/// after an epoch deadline interruption is raised.
|
||||||
|
///
|
||||||
|
/// This method takes care of de-registering the old wapc_host and
|
||||||
|
/// registering the new one inside of the global WAPC_POLICY_MAPPING
|
||||||
|
/// variable.
|
||||||
|
pub(crate) fn reset(&mut self) -> Result<()> {
|
||||||
|
// Create a new wapc_host
|
||||||
|
let new_wapc_host = Self::setup_wapc_host(
|
||||||
|
self.engine.clone(),
|
||||||
|
self.module.clone(),
|
||||||
|
self.epoch_deadlines,
|
||||||
|
)?;
|
||||||
|
let old_wapc_host_id = self.wapc_host.id();
|
||||||
|
|
||||||
|
// Remove the old policy from WAPC_POLICY_MAPPING and add the new one
|
||||||
|
// We need a write lock to do that
|
||||||
|
{
|
||||||
|
let mut map = WAPC_POLICY_MAPPING
|
||||||
|
.write()
|
||||||
|
.expect("cannot get write access to WAPC_POLICY_MAPPING");
|
||||||
|
let policy = map.remove(&old_wapc_host_id).ok_or_else(|| {
|
||||||
|
anyhow!("cannot find old waPC policy with id {}", old_wapc_host_id)
|
||||||
|
})?;
|
||||||
|
map.insert(new_wapc_host.id(), policy);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.wapc_host = new_wapc_host;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn setup_wapc_host(
|
||||||
|
engine: wasmtime::Engine,
|
||||||
|
module: wasmtime::Module,
|
||||||
|
epoch_deadlines: Option<crate::policy_evaluator_builder::EpochDeadlines>,
|
||||||
|
) -> Result<wapc::WapcHost> {
|
||||||
|
let mut builder = wasmtime_provider::WasmtimeEngineProviderBuilder::new()
|
||||||
|
.engine(engine)
|
||||||
|
.module(module);
|
||||||
|
if let Some(deadlines) = epoch_deadlines {
|
||||||
|
builder = builder.enable_epoch_interruptions(deadlines.wapc_init, deadlines.wapc_func);
|
||||||
|
}
|
||||||
|
|
||||||
|
let engine_provider = builder.build()?;
|
||||||
|
let wapc_host =
|
||||||
|
wapc::WapcHost::new(Box::new(engine_provider), Some(Box::new(host_callback)))?;
|
||||||
|
Ok(wapc_host)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn wapc_host_id(&self) -> u64 {
|
||||||
|
self.wapc_host.id()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Runtime<'a> {
|
||||||
|
pub fn validate(
|
||||||
|
&mut self,
|
||||||
|
settings: &PolicySettings,
|
||||||
|
request: &ValidateRequest,
|
||||||
|
) -> AdmissionResponse {
|
||||||
|
let uid = request.uid();
|
||||||
|
|
||||||
|
//NOTE: object is null for DELETE operations
|
||||||
|
let req_obj = request.0.get("object");
|
||||||
|
|
||||||
|
let validate_params = json!({
|
||||||
|
"request": request,
|
||||||
|
"settings": settings,
|
||||||
|
});
|
||||||
|
|
||||||
|
let validate_str = match serde_json::to_string(&validate_params) {
|
||||||
|
Ok(s) => s,
|
||||||
|
Err(e) => {
|
||||||
|
error!(
|
||||||
|
error = e.to_string().as_str(),
|
||||||
|
"cannot serialize validation params"
|
||||||
|
);
|
||||||
|
return AdmissionResponse::reject_internal_server_error(
|
||||||
|
uid.to_string(),
|
||||||
|
e.to_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match self.0.wapc_host.call("validate", validate_str.as_bytes()) {
|
||||||
|
Ok(res) => {
|
||||||
|
let pol_val_resp: Result<PolicyValidationResponse> = serde_json::from_slice(&res)
|
||||||
|
.map_err(|e| anyhow!("cannot deserialize policy validation response: {:?}", e));
|
||||||
|
pol_val_resp
|
||||||
|
.and_then(|pol_val_resp| {
|
||||||
|
AdmissionResponse::from_policy_validation_response(
|
||||||
|
uid.to_string(),
|
||||||
|
req_obj,
|
||||||
|
&pol_val_resp,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
error!(
|
||||||
|
error = e.to_string().as_str(),
|
||||||
|
"cannot build validation response from policy result"
|
||||||
|
);
|
||||||
|
AdmissionResponse::reject_internal_server_error(
|
||||||
|
uid.to_string(),
|
||||||
|
e.to_string(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!(error = e.to_string().as_str(), "waPC communication error");
|
||||||
|
if e.to_string()
|
||||||
|
.as_str()
|
||||||
|
.contains(WAPC_EPOCH_INTERRUPTION_ERR_MSG)
|
||||||
|
{
|
||||||
|
// TL;DR: after code execution is interrupted because of an
|
||||||
|
// epoch deadline being reached, we have to reset the waPC host
|
||||||
|
// to ensure further invocations of the policy work as expected.
|
||||||
|
//
|
||||||
|
// The waPC host is using the wasmtime_provider, which internally
|
||||||
|
// uses a wasmtime::Engine and a wasmtime::Store.
|
||||||
|
// The Store keeps track of the stateful data of the policy. When an
|
||||||
|
// epoch deadline is reached, wasmtime::Engine stops the execution of
|
||||||
|
// the wasm guest. There's NO CLEANUP code called inside of the guest.
|
||||||
|
// It's like unplugging the power cord from a turned on computer.
|
||||||
|
//
|
||||||
|
// When the guest function is invoked again, the previous state stored
|
||||||
|
// inside of wasmtime::Store is used.
|
||||||
|
// That can lead to unexpected issues. For example, if the guest makes
|
||||||
|
// uses of a Mutex, something like that can happen (I've witnessed that):
|
||||||
|
//
|
||||||
|
// * Guest code 1st run:
|
||||||
|
// - Mutex.lock
|
||||||
|
// * Host: interrupt code execution because of epoch deadline
|
||||||
|
// * Guest code 2nd run:
|
||||||
|
// - The Mutex is still locked, because that's what is stored inside
|
||||||
|
// of the wasmtime::Store
|
||||||
|
// - Guest attempts to `lock` the Mutex -> error is raised
|
||||||
|
//
|
||||||
|
// The guest code will stay in this broken state forever. The only
|
||||||
|
// solution to that is to reinitialize the wasmtime::Store.
|
||||||
|
// It's hard to provide a facility for that inside of WapcHost, because
|
||||||
|
// epoch deadline is a feature provided only by the wasmtime backend.
|
||||||
|
// Hence it's easier to just recreate the wapc_host associated with this
|
||||||
|
// policy evaluator
|
||||||
|
if let Err(reset_err) = self.0.reset() {
|
||||||
|
error!(error = reset_err.to_string().as_str(), "cannot reset waPC stack - further calls to this policy can result in errors");
|
||||||
|
} else {
|
||||||
|
info!("wapc_host reset performed after timeout protection was triggered");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
AdmissionResponse::reject_internal_server_error(uid.to_string(), e.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate_settings(&mut self, settings: String) -> SettingsValidationResponse {
|
||||||
|
match self
|
||||||
|
.0
|
||||||
|
.wapc_host
|
||||||
|
.call("validate_settings", settings.as_bytes())
|
||||||
|
{
|
||||||
|
Ok(res) => {
|
||||||
|
let vr: Result<SettingsValidationResponse> = serde_json::from_slice(&res)
|
||||||
|
.map_err(|e| anyhow!("cannot convert response: {:?}", e));
|
||||||
|
vr.unwrap_or_else(|e| SettingsValidationResponse {
|
||||||
|
valid: false,
|
||||||
|
message: Some(format!("error: {:?}", e)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(err) => SettingsValidationResponse {
|
||||||
|
valid: false,
|
||||||
|
message: Some(format!(
|
||||||
|
"Error invoking settings validation callback: {:?}",
|
||||||
|
err
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn protocol_version(&self) -> Result<ProtocolVersion> {
|
||||||
|
match self.0.wapc_host.call("protocol_version", &[0; 0]) {
|
||||||
|
Ok(res) => ProtocolVersion::try_from(res.clone()).map_err(|e| {
|
||||||
|
anyhow!(
|
||||||
|
"Cannot create ProtocolVersion object from '{:?}': {:?}",
|
||||||
|
res,
|
||||||
|
e
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
Err(err) => Err(anyhow!(
|
||||||
|
"Cannot invoke 'protocol_version' waPC function: {:?}",
|
||||||
|
err
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::{sync, thread, time};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn wapc_epoch_interrutpion_error_msg() {
|
||||||
|
// This unit test makes sure that waPC host error raised when a wasmtime
|
||||||
|
// epoch_interruption happens contains the WAPC_EPOCH_INTERRUPTION_ERR_MSG
|
||||||
|
// string
|
||||||
|
//
|
||||||
|
// The unit test is a bit "low-level", meaning the target are the
|
||||||
|
// wapc libraries we consume, not the "high" level code we expose
|
||||||
|
// as part of policy-evaluator.
|
||||||
|
// This is done to make the whole testing process simple:
|
||||||
|
// * No need to download a wasm module from a registry/commit a ~3Mb
|
||||||
|
// binary blob to this git repository
|
||||||
|
// * Reduce the code being tested to the bare minimum
|
||||||
|
|
||||||
|
let mut engine_conf = wasmtime::Config::default();
|
||||||
|
engine_conf.epoch_interruption(true);
|
||||||
|
let engine = wasmtime::Engine::new(&engine_conf).expect("cannot create wasmtime engine");
|
||||||
|
|
||||||
|
let wat = include_bytes!("../../test_data/endless_wasm/wapc_endless_loop.wat");
|
||||||
|
let module = wasmtime::Module::new(&engine, wat).expect("cannot compile WAT to wasm");
|
||||||
|
|
||||||
|
// Create the wapc engine, the code will be interrupted after 10 ticks
|
||||||
|
// happen. We produce 1 tick every 10 milliseconds, see below
|
||||||
|
let wapc_engine_builder = wasmtime_provider::WasmtimeEngineProviderBuilder::new()
|
||||||
|
.engine(engine.clone())
|
||||||
|
.module(module)
|
||||||
|
.enable_epoch_interruptions(10, 10);
|
||||||
|
|
||||||
|
let wapc_engine = wapc_engine_builder
|
||||||
|
.build()
|
||||||
|
.expect("error creating wasmtime engine provider");
|
||||||
|
let host = wapc::WapcHost::new(Box::new(wapc_engine), Some(Box::new(host_callback)))
|
||||||
|
.expect("cannot create waPC host");
|
||||||
|
|
||||||
|
// Create a lock to break the endless loop of the ticker thread
|
||||||
|
let timer_lock = sync::Arc::new(sync::RwLock::new(false));
|
||||||
|
let quit_lock = timer_lock.clone();
|
||||||
|
|
||||||
|
// Start a thread that ticks the epoch timer of the wasmtime
|
||||||
|
// engine. 1 tick equals 10 milliseconds
|
||||||
|
thread::spawn(move || {
|
||||||
|
let interval = time::Duration::from_millis(10);
|
||||||
|
loop {
|
||||||
|
thread::sleep(interval);
|
||||||
|
engine.increment_epoch();
|
||||||
|
if *quit_lock.read().unwrap() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// This triggers an endless loop inside of wasm
|
||||||
|
// If the epoch_interruption doesn't work, this unit test
|
||||||
|
// will never complete
|
||||||
|
let res = host.call("run", "".as_bytes());
|
||||||
|
|
||||||
|
// Tell the ticker thread to quit
|
||||||
|
{
|
||||||
|
let mut w = timer_lock.write().unwrap();
|
||||||
|
*w = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure we got back an error from waPC, the error must
|
||||||
|
// contain the WAPC_EPOCH_INTERRUPTION_ERR_MSG string
|
||||||
|
let err = res.unwrap_err();
|
||||||
|
assert!(err
|
||||||
|
.to_string()
|
||||||
|
.as_str()
|
||||||
|
.contains(WAPC_EPOCH_INTERRUPTION_ERR_MSG));
|
||||||
|
}
|
||||||
|
}
|
||||||
1
test_data/endless_wasm/.gitignore
vendored
Normal file
1
test_data/endless_wasm/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
*.wasm
|
||||||
12
test_data/endless_wasm/Makefile
Normal file
12
test_data/endless_wasm/Makefile
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
wasm_endless_loop.wasm: wasm_endless_loop.wat
|
||||||
|
wat2wasm wasm_endless_loop.wat -o wasm_endless_loop.wasm
|
||||||
|
|
||||||
|
wapc_endless_loop.wasm: wapc_endless_loop.wat
|
||||||
|
wat2wasm wapc_endless_loop.wat -o wapc_endless_loop.wasm
|
||||||
|
|
||||||
|
.PHONY: build
|
||||||
|
build: wasm_endless_loop.wasm wapc_endless_loop.wasm
|
||||||
|
|
||||||
|
.PHONY: clean
|
||||||
|
clean:
|
||||||
|
rm -rf *.wasm
|
||||||
28
test_data/endless_wasm/README.md
Normal file
28
test_data/endless_wasm/README.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
This directory contains the source code of two WebAssembly modules, bot of them
|
||||||
|
perform an endless loop.
|
||||||
|
|
||||||
|
The code is written using the WebAssembly text format (aka `WAT`).
|
||||||
|
|
||||||
|
## `wasm_endless_loop.wat`
|
||||||
|
|
||||||
|
This is a module meant to be used with vanilla wasmtime engine.
|
||||||
|
|
||||||
|
The code exports a function called `endless_loop` that just performs
|
||||||
|
and endless loop.
|
||||||
|
This function takes zero parameters and doesn't return anything.
|
||||||
|
|
||||||
|
The `start` function of the WebAssembly module invokes the `endless_loop`, that
|
||||||
|
means that running the final `.wasm` file via something like `wasmtime run` will
|
||||||
|
cause the endless function to be executed.
|
||||||
|
|
||||||
|
## `wapc_endless_loop.wat`
|
||||||
|
|
||||||
|
This is a module meant to be used by a waPC host.
|
||||||
|
|
||||||
|
This code cheats a little, from the outside it looks like any regular waPC module
|
||||||
|
because it exposes the two functions required by a waPC host. However, these
|
||||||
|
two functions are reduced to the bare mimimum.
|
||||||
|
|
||||||
|
The most important difference is that no waPC function is registered by the
|
||||||
|
module. Calling any kind of waPC function from the host will result in an
|
||||||
|
endless loop being executed.
|
||||||
47
test_data/endless_wasm/wapc_endless_loop.wat
Normal file
47
test_data/endless_wasm/wapc_endless_loop.wat
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
;; This is a module meant to be used by a waPC host.
|
||||||
|
;;
|
||||||
|
;; This code cheats a little, from the outside it looks like any regular waPC module
|
||||||
|
;; because it exposes the two functions required by a waPC host. However, these
|
||||||
|
;; two functions are reduced to the bare mimimum.
|
||||||
|
;;
|
||||||
|
;; The most important difference is that no waPC function is registered by the
|
||||||
|
;; module. Calling any kind of waPC function from the host will result in an
|
||||||
|
;; endless loop being executed.
|
||||||
|
|
||||||
|
(module
|
||||||
|
(memory (export "memory") 1)
|
||||||
|
|
||||||
|
;; waPC host expects a function called wapc_init to be exported
|
||||||
|
(func $wapc_init (export "wapc_init")
|
||||||
|
;; we don't do anything in there
|
||||||
|
nop
|
||||||
|
)
|
||||||
|
|
||||||
|
;; non exported function that performs an endless loop
|
||||||
|
(func $endless_loop
|
||||||
|
;; create a variable and initialize it to 0
|
||||||
|
(local $am_i_done i32)
|
||||||
|
|
||||||
|
(loop $endless
|
||||||
|
;; if $am_i_done is not equal to 1 -> go back to the beginning of the loop
|
||||||
|
local.get $am_i_done
|
||||||
|
i32.const 1
|
||||||
|
i32.ne
|
||||||
|
br_if $endless
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
;; waPC host expects a function called wapc_init to be exported
|
||||||
|
;; A real implementation would look for the name of the waPC function
|
||||||
|
;; to be invoked, read its payload, invoke the function and
|
||||||
|
;; provide a success/failure boolean as result.
|
||||||
|
;; In this case we just start an endless loop. We don't care about the
|
||||||
|
;; waPC function to be invoked, nor the payload.
|
||||||
|
(func $guest_call (export "__guest_call")
|
||||||
|
(param $operation_size i32)
|
||||||
|
(param $payload_size i32)
|
||||||
|
(result i32)
|
||||||
|
(call $endless_loop)
|
||||||
|
i32.const 0
|
||||||
|
)
|
||||||
|
)
|
||||||
15
test_data/endless_wasm/wasm_endless_loop.wat
Normal file
15
test_data/endless_wasm/wasm_endless_loop.wat
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
(module
|
||||||
|
(func $endless_loop (export "endless_loop")
|
||||||
|
;; create a variable and initialize it to 0
|
||||||
|
(local $am_i_done i32)
|
||||||
|
|
||||||
|
(loop $endless
|
||||||
|
;; if $am_i_done is not equal to 1 -> go back to the beginning of the loop
|
||||||
|
local.get $am_i_done
|
||||||
|
i32.const 1
|
||||||
|
i32.ne
|
||||||
|
br_if $endless
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(start $endless_loop)
|
||||||
|
)
|
||||||
Reference in New Issue
Block a user