From 4db2270dcfc04e25b587506f464d9c3deaa60b74 Mon Sep 17 00:00:00 2001 From: Anand Krishnamoorthi <35780660+anakrish@users.noreply.github.com> Date: Wed, 22 Nov 2023 14:19:23 -0800 Subject: [PATCH] More library functions (#51) - units.parse, units.parse_bytes - json.is_valid, json.marshal, json.unmarshal - yaml.is_valid, yaml.marshal, yaml.unmarshal - object.subset - set_diff * Also print number of errors due to each missing function * Also lock down fully passing OPA suites Signed-off-by: Anand Krishnamoorthi --- .github/workflows/rust.yml | 3 + Cargo.toml | 2 +- scripts/pre-push | 3 + src/builtins/deprecated.rs | 12 +- src/builtins/encoding.rs | 56 +++++- src/builtins/mod.rs | 3 +- src/builtins/objects.rs | 29 ++++ src/builtins/units.rs | 162 ++++++++++++++++++ src/interpreter.rs | 64 +++++-- src/scheduler.rs | 6 +- .../cases/builtins/units/parse.yaml | 153 +++++++++++++++++ .../cases/builtins/units/parse_bytes.yaml | 115 +++++++++++++ tests/opa.passing | 42 +++++ tests/opa.rs | 31 +++- 14 files changed, 650 insertions(+), 31 deletions(-) create mode 100644 src/builtins/units.rs create mode 100644 tests/interpreter/cases/builtins/units/parse.yaml create mode 100644 tests/interpreter/cases/builtins/units/parse_bytes.yaml create mode 100644 tests/opa.passing diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 00ca61da..8f652985 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -32,3 +32,6 @@ jobs: run: cargo build --verbose --all-targets --target x86_64-unknown-linux-musl - name: Run tests (MUSL) run: cargo test --verbose --target x86_64-unknown-linux-musl + - name: Run tests (OPA Conformance) + run: >- + cargo test --test opa -- $(tr '\n' ' ' < tests/opa.passing) diff --git a/Cargo.toml b/Cargo.toml index ad8eb28e..dfe91359 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,4 +32,4 @@ debug = true [[test]] name="opa" harness=false -test=false \ No newline at end of file +test=false diff --git a/scripts/pre-push b/scripts/pre-push index 7790f4b5..f48b7e24 100755 --- a/scripts/pre-push +++ b/scripts/pre-push @@ -14,4 +14,7 @@ if [ -f Cargo.toml ]; then if [[ "$OSTYPE" == "linux-gnu"* ]]; then scripts/coverage fi + + # Ensure that OPA conformance tests don't regress. + cargo test --test opa -- $(tr '\n' ' ' < tests/opa.passing) fi diff --git a/src/builtins/deprecated.rs b/src/builtins/deprecated.rs index 20881c99..81b5be90 100644 --- a/src/builtins/deprecated.rs +++ b/src/builtins/deprecated.rs @@ -2,7 +2,7 @@ // Licensed under the MIT License. use crate::ast::{Expr, Ref}; -use crate::builtins::utils::ensure_args_count; +use crate::builtins::utils::{ensure_args_count, ensure_set}; use crate::builtins::BuiltinFcn; use crate::lexer::Span; use crate::value::Value; @@ -19,7 +19,7 @@ lazy_static! { m.insert("all", (all, 1)); m.insert("any", (any, 1)); - + m.insert("set_diff", (set_diff, 2)); m }; } @@ -49,3 +49,11 @@ fn any(span: &Span, params: &[Ref], args: &[Value]) -> Result { } })) } + +fn set_diff(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "set_diff"; + ensure_args_count(span, name, params, args, 2)?; + let s1 = ensure_set(name, ¶ms[0], args[0].clone())?; + let s2 = ensure_set(name, ¶ms[1], args[1].clone())?; + Ok(Value::from_set(s1.difference(&s2).cloned().collect())) +} diff --git a/src/builtins/encoding.rs b/src/builtins/encoding.rs index 0d8ea8b5..87ad94cf 100644 --- a/src/builtins/encoding.rs +++ b/src/builtins/encoding.rs @@ -9,11 +9,17 @@ use crate::value::Value; use std::collections::HashMap; -use anyhow::Result; +use anyhow::{Context, Result}; use data_encoding::BASE64; pub fn register(m: &mut HashMap<&'static str, builtins::BuiltinFcn>) { m.insert("base64.decode", (base64_decode, 1)); + m.insert("json.is_valid", (json_is_valid, 1)); + m.insert("json.marshal", (json_marshal, 1)); + m.insert("jsonunmarshal", (json_unmarshal, 1)); + m.insert("yaml.is_valid", (yaml_is_valid, 1)); + m.insert("yaml.marshal", (yaml_marshal, 1)); + m.insert("yaml.unmarshal", (yaml_unmarshal, 1)); } fn base64_decode(span: &Span, params: &[Ref], args: &[Value]) -> Result { @@ -26,3 +32,51 @@ fn base64_decode(span: &Span, params: &[Ref], args: &[Value]) -> Result], args: &[Value]) -> Result { + let name = "yaml.is_valid"; + ensure_args_count(span, name, params, args, 1)?; + + let yaml_str = ensure_string(name, ¶ms[0], &args[0])?; + Ok(Value::Bool(Value::from_yaml_str(&yaml_str).is_ok())) +} + +fn yaml_marshal(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "yaml.marshal"; + ensure_args_count(span, name, params, args, 1)?; + Ok(Value::String( + serde_yaml::to_string(&args[0]) + .with_context(|| span.error("could not serialize to yaml"))?, + )) +} + +fn yaml_unmarshal(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "yaml.unmarshal"; + ensure_args_count(span, name, params, args, 1)?; + let yaml_str = ensure_string(name, ¶ms[0], &args[0])?; + Value::from_yaml_str(&yaml_str).with_context(|| span.error("could not deserialize yaml.")) +} + +fn json_is_valid(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "json.is_valid"; + ensure_args_count(span, name, params, args, 1)?; + + let json_str = ensure_string(name, ¶ms[0], &args[0])?; + Ok(Value::Bool(Value::from_json_str(&json_str).is_ok())) +} + +fn json_marshal(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "json.marshal"; + ensure_args_count(span, name, params, args, 1)?; + Ok(Value::String( + serde_json::to_string(&args[0]) + .with_context(|| span.error("could not serialize to json"))?, + )) +} + +fn json_unmarshal(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "json.unmarshal"; + ensure_args_count(span, name, params, args, 1)?; + let json_str = ensure_string(name, ¶ms[0], &args[0])?; + Value::from_json_str(&json_str).with_context(|| span.error("could not deserialize json.")) +} diff --git a/src/builtins/mod.rs b/src/builtins/mod.rs index da81bbe3..8ac2d14d 100644 --- a/src/builtins/mod.rs +++ b/src/builtins/mod.rs @@ -17,6 +17,7 @@ mod strings; mod time; mod tracing; pub mod types; +mod units; mod utils; use crate::ast::{Expr, Ref}; @@ -65,7 +66,7 @@ lazy_static! { //opa::register(&mut m); debugging::register(&mut m); tracing::register(&mut m); - + units::register(&mut m); m }; } diff --git a/src/builtins/objects.rs b/src/builtins/objects.rs index f5e70fd2..97ffa840 100644 --- a/src/builtins/objects.rs +++ b/src/builtins/objects.rs @@ -20,6 +20,7 @@ pub fn register(m: &mut HashMap<&'static str, builtins::BuiltinFcn>) { m.insert("object.get", (get, 3)); m.insert("object.keys", (keys, 1)); m.insert("object.remove", (remove, 2)); + m.insert("object.subset", (subset, 2)); } fn json_filter_impl(v: &Value, filter: &Value) -> Value { @@ -202,3 +203,31 @@ fn remove(span: &Span, params: &[Ref], args: &[Value]) -> Result { Ok(Value::Object(obj)) } + +fn is_subset(sup: &Value, sub: &Value) -> bool { + match (sup, sub) { + (Value::Object(sup), Value::Object(sub)) => { + sub.iter().all(|(k, vsub)| { + match sup.get(k) { + // Some(vsup @ Value::Object(_)) => is_subset(vsup, vsub), + Some(vsup) => is_subset(vsup, vsub), + _ => false, + } + }) + } + (Value::Set(sup), Value::Set(sub)) => sub.is_subset(sup), + (Value::Array(sup), Value::Array(sub)) => sup.windows(sub.len()).any(|w| w == &sub[..]), + (Value::Array(sup), Value::Set(_)) => { + let sup = Value::from_set(sup.iter().cloned().collect()); + is_subset(&sup, sub) + } + (sup, sub) => sup == sub, + } +} + +fn subset(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "object.subset"; + ensure_args_count(span, name, params, args, 2)?; + + Ok(Value::Bool(is_subset(&args[0], &args[1]))) +} diff --git a/src/builtins/units.rs b/src/builtins/units.rs new file mode 100644 index 00000000..f5d1733d --- /dev/null +++ b/src/builtins/units.rs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +use crate::ast::{Expr, Ref}; +use crate::builtins; +use crate::builtins::utils::{ensure_args_count, ensure_string}; +use crate::lexer::Span; +use crate::value::{Float, Number, Value}; + +use std::collections::HashMap; + +use anyhow::{bail, Context, Result}; +use ordered_float::OrderedFloat; + +pub fn register(m: &mut HashMap<&'static str, builtins::BuiltinFcn>) { + m.insert("units.parse", (parse, 1)); + m.insert("units.parse_bytes", (parse_bytes, 1)); +} + +fn parse(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "units.parse"; + ensure_args_count(span, name, params, args, 1)?; + let string = ensure_string(name, ¶ms[0], &args[0])?; + let string = string.as_str(); + + // Remove quotes. + let string = if string.starts_with('"') && string.ends_with('"') && string.len() >= 2 { + &string[1..string.len() - 1] + } else { + string + }; + + // Disallow whitespace. + if string.chars().any(char::is_whitespace) { + bail!(span.error("spaces not allowed in resource strings")); + } + + let (number_part, suffix) = match string.find(|c: char| c.is_alphabetic()) { + Some(p) => (&string[0..p], &string[p..]), + _ => (string, ""), + }; + + let n: Float = if number_part.starts_with('.') { + serde_json::from_str(format!("0{number_part}").as_str()) + } else { + serde_json::from_str(number_part) + } + .with_context(|| span.error("could not parse number"))?; + + Ok(Value::Number(Number(OrderedFloat( + n * 10f64.powf(match suffix { + "E" | "e" => 18, + "P" | "p" => 15, + "T" | "t" => 12, + "G" | "g" => 9, + "M" => 6, + "K" | "k" => 3, + "m" => -3, + + // The following are not supported by OPA + "Q" => 30, + "R" => 27, + "Y" => 24, + "Z" => 21, + "h" => 2, + "da" => 1, + "d" => -1, + "c" => -2, + + "μ" => -6, + "n" => -9, + "f" => -15, + "a" => -18, + "z" => -21, + "y" => -24, + "r" => -27, + "q" => -30, + + // No suffix specified. + "" => 0, + _ => { + return Ok(Value::Number(Number(OrderedFloat( + n * 2f64.powf(match suffix.to_ascii_lowercase().as_str() { + "ki" => 10, + "mi" => 20, + "gi" => 30, + "ti" => 40, + "pi" => 50, + "ei" => 60, + "zi" => 70, + "yi" => 80, + _ => return Ok(Value::Undefined), + } as f64), + )))); + } + } as f64), + )))) +} + +fn parse_bytes(span: &Span, params: &[Ref], args: &[Value]) -> Result { + let name = "units.parse_bytes"; + ensure_args_count(span, name, params, args, 1)?; + let string = ensure_string(name, ¶ms[0], &args[0])?; + let string = string.as_str(); + + // Remove quotes. + let string = if string.starts_with('"') && string.ends_with('"') && string.len() >= 2 { + &string[1..string.len() - 1] + } else { + string + }; + + // Disallow whitespace. + if string.chars().any(char::is_whitespace) { + bail!(span.error("spaces not allowed in resource strings")); + } + + let (number_part, suffix) = match string.find(|c: char| c.is_alphabetic()) { + Some(p) => (&string[0..p], &string[p..]), + _ => (string, ""), + }; + + let n: Float = if number_part.starts_with('.') { + serde_json::from_str(format!("0{number_part}").as_str()) + } else { + serde_json::from_str(number_part) + } + .with_context(|| span.error("could not parse number"))?; + + Ok(Value::Number(Number(OrderedFloat(f64::round( + n * 2f64.powf(match suffix.to_ascii_lowercase().as_str() { + "yi" | "yib" => 80, + "zi" | "zib" => 70, + "ei" | "eib" => 60, + "pi" | "pib" => 50, + "ti" | "tib" => 40, + "gi" | "gib" => 30, + "mi" | "mib" => 20, + "ki" | "kib" => 10, + "" => 0, + _ => { + return Ok(Value::Number(Number(OrderedFloat( + n * 10f64.powf(match suffix.to_ascii_lowercase().as_str() { + "q" | "qb" => 30, + "r" | "rb" => 27, + "y" | "yb" => 24, + "z" | "zb" => 21, + "e" | "eb" => 18, + "p" | "pb" => 15, + "t" | "tb" => 12, + "g" | "gb" => 9, + "m" | "mb" => 6, + "k" | "kb" => 3, + _ => { + return Ok(Value::Undefined); + } + } as f64), + )))) + } + } as f64), + ))))) +} diff --git a/src/interpreter.rs b/src/interpreter.rs index 6723a1ea..1c3a45d5 100644 --- a/src/interpreter.rs +++ b/src/interpreter.rs @@ -65,6 +65,7 @@ impl Default for QueryResult { #[derive(Debug, Clone, Default, Serialize)] pub struct QueryResults { + #[serde(skip_serializing_if = "Vec::is_empty")] pub result: Vec, } @@ -75,6 +76,7 @@ struct Context { value: Value, result: Option, results: QueryResults, + is_compr: bool, } #[derive(Debug)] @@ -494,7 +496,7 @@ impl Interpreter { // Omit recording undefined values. if value == Value::Undefined { - return Ok(Value::Bool(false)); + return Ok(value); //Ok(Value::Bool(false)); } self.add_variable_or(&name)?; @@ -527,6 +529,7 @@ impl Interpreter { value: Value::new_set(), result: None, results: QueryResults::default(), + is_compr: false, }); let mut r = true; match domain { @@ -842,9 +845,14 @@ impl Interpreter { if let Some(ctx) = self.contexts.last_mut() { if let Some(result) = &mut ctx.result { - result - .expressions - .push(Self::make_expression_result(span, &value)) + if value != Value::Undefined { + result + .expressions + .push(Self::make_expression_result(span, &value)) + } else { + result.bindings = Value::new_object(); + result.expressions.clear(); + } } } @@ -1165,9 +1173,9 @@ impl Interpreter { Value::Set(ref mut s) => { Rc::make_mut(s).insert(output); } - _ => bail!("internal error: invalid context value"), + a => bail!("internal error: invalid context value {a}"), } - } else { + } else if !ctx.is_compr { match &ctx.value { Value::Set(_) => (), _ => ctx.value = Value::Undefined, @@ -1191,7 +1199,11 @@ impl Interpreter { .insert(Value::String(name.to_string()), value.clone()); } } - ctx.results.result.push(result); + if result.expressions.iter().all(|v| v != &Value::Undefined) + && !result.expressions.is_empty() + { + ctx.results.result.push(result); + } } return Ok(true); @@ -1306,7 +1318,11 @@ impl Interpreter { .insert(Value::String(name.to_string()), value.clone()); } } - ctx.results.result.push(result); + if result.expressions.iter().all(|v| v != &Value::Undefined) + && !result.expressions.is_empty() + { + ctx.results.result.push(result); + } } } @@ -1428,6 +1444,7 @@ impl Interpreter { value: Value::new_array(), result: None, results: QueryResults::default(), + is_compr: true, }); // Evaluate body first. @@ -1447,6 +1464,7 @@ impl Interpreter { value: Value::new_set(), result: None, results: QueryResults::default(), + is_compr: true, }); self.eval_query(query)?; @@ -1470,6 +1488,7 @@ impl Interpreter { value: Value::new_object(), result: None, results: QueryResults::default(), + is_compr: true, }); self.eval_query(query)?; @@ -1615,6 +1634,7 @@ impl Interpreter { value: Value::new_set(), result: None, results: QueryResults::default(), + is_compr: false, }; // Back up local variables of current function and empty @@ -1734,7 +1754,6 @@ impl Interpreter { } // Evaluate the associated default rules after non-default rules if let Some(rules) = self.default_rules.get(&path) { - dbg!(&path); for (r, _) in rules.clone() { if !self.processed.contains(&r) { let module = self.get_rule_module(&r)?; @@ -1826,7 +1845,12 @@ impl Interpreter { )), }, // TODO: Handle string vs rawstring - Expr::String(span) => Ok(Value::String(span.text().to_string())), + Expr::String(span) => { + match serde_json::from_str::(format!("\"{}\"", span.text()).as_str()) { + Ok(s) => Ok(s), + Err(e) => bail!(span.error(format!("invalid string literal. {e}").as_str())), + } + } Expr::RawString(span) => Ok(Value::String(span.text().to_string())), // TODO: Handle undefined variables @@ -1884,6 +1908,7 @@ impl Interpreter { value, result: None, results: QueryResults::default(), + is_compr: false, }, path, )) @@ -1897,6 +1922,7 @@ impl Interpreter { value: Value::new_set(), result: None, results: QueryResults::default(), + is_compr: false, }, path, )) @@ -1938,6 +1964,7 @@ impl Interpreter { value: Value::new_array(), result: None, results: QueryResults::default(), + is_compr: false, }); } result = self.eval_query(&body.query); @@ -2018,10 +2045,10 @@ impl Interpreter { } } - pub fn merge_value(span: &Span, value: &mut Value, new: Value) -> Result<()> { + pub fn merge_rule_value(span: &Span, value: &mut Value, new: Value) -> Result<()> { match value.merge(new) { Ok(()) => Ok(()), - Err(err) => return Err(span.error(format!("{err}").as_str())), + Err(_) => Err(span.error("rules should not produce multiple outputs.")), } } @@ -2173,15 +2200,15 @@ impl Interpreter { if let Value::Object(btree) = &vref { if !btree.contains_key(&index) { - Self::merge_value(span, vref, object)?; + Self::merge_rule_value(span, vref, object)?; } } else if let Value::Undefined = vref { - Self::merge_value(span, vref, object)?; + Self::merge_rule_value(span, vref, object)?; } } else { let vref = Self::make_or_get_value_mut(&mut self.data, &paths)?; if let Value::Undefined = &vref { - Self::merge_value(span, vref, value)?; + Self::merge_rule_value(span, vref, value)?; } }; @@ -2204,7 +2231,7 @@ impl Interpreter { // Ensure that path is created. let vref = Self::make_or_get_value_mut(&mut self.data, path)?; if Self::get_value_chained(self.init_data.clone(), path) == Value::Undefined { - Self::merge_value(span, vref, value) + Self::merge_rule_value(span, vref, value) } else { Err(span.error("value for rule has already been specified in data document")) } @@ -2408,6 +2435,7 @@ impl Interpreter { // Request that results be gathered. result: Some(QueryResult::default()), results: QueryResults::default(), + is_compr: false, }); let prev_module = self.set_current_module(self.modules.last().cloned())?; @@ -2431,7 +2459,9 @@ impl Interpreter { let orig_idx = ord[expr_idx] as usize; ordered_expressions[orig_idx] = value.clone(); } - results.result[idx].expressions = ordered_expressions; + if !ordered_expressions.iter().any(|v| v == &Value::Undefined) { + results.result[idx].expressions = ordered_expressions; + } } } self_schedule.order.remove(k); diff --git a/src/scheduler.rs b/src/scheduler.rs index 735d85f3..7f70c4cd 100644 --- a/src/scheduler.rs +++ b/src/scheduler.rs @@ -43,6 +43,7 @@ pub fn schedule( empty: &Str, ) -> Result { let num_statements = infos.len(); + let orig_infos: Vec<&StmtInfo> = infos.iter().collect(); // Mapping from each var to the list of statements that define it. let mut defining_stmts: BTreeMap> = BTreeMap::new(); @@ -191,7 +192,10 @@ pub fn schedule( } if order.len() != num_statements { - bail!("could not schedule all statements {order:?} {num_statements}"); + eprintln!("could not schedule all statements {order:?} {orig_infos:?}"); + return Ok(SortResult::Order( + (0..num_statements).map(|i| i as u16).collect(), + )); } // TODO: determine cycles. diff --git a/tests/interpreter/cases/builtins/units/parse.yaml b/tests/interpreter/cases/builtins/units/parse.yaml new file mode 100644 index 00000000..857c3445 --- /dev/null +++ b/tests/interpreter/cases/builtins/units/parse.yaml @@ -0,0 +1,153 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +cases: + - note: all + data: {} + modules: + - | + package test + + # Supportex by OPA + a = [ "E", "e", "P", "p", "T", "t" , "G" , "g" , "M" , "K" , "k" , "m" ] + + # Not supported by OPA + b = [ "Q", "R", "Y", "Z", "h", "da", "d", "c", + "μ", "n", "f", "a", "z", "y", "r", "q" + ] + + c = [ + "ki", "Ki", "kI", "KI", + "mi", "Mi", "mI", "MI", + "gi", "Gi", "gI", "GI", + "ti", "Ti", "tI", "TI", + "pi", "Pi", "pI", "PI", + "ei", "Ei", "eI", "EI", + ] + + d = [ + "zi", "Zi", "zI", "ZI", + "yi", "Yi", "yI", "YI", + ] + + results = { + "p1" : [ units.parse(s) | s = concat("", ["1", a[_]]) ], + "p2" : [ units.parse(s) | s = concat("", ["1", b[_]]) ], + "p3" : [ units.parse(s) | s = concat("", ["1", c[_]]) ], + "p4" : [ units.parse(s) | s = concat("", ["1", d[_]]) ], + # No suffix, quoted. + "p5" : [ units.parse("1"), units.parse("\"1\"") ] + } + query: data.test.results + want_result: + p1: + - 1e18 + - 1e18 + - 1e15 + - 1e15 + - 1e12 + - 1e12 + - 1e9 + - 1e9 + - 1e6 + - 1e3 + - 1e3 + - 1e-3 + p2: + - 1e30 + - 1e27 + - 1e24 + - 1e21 + - 1e2 + - 1e1 + - 1e-1 + - 1e-2 + - 1e-6 + - 1e-9 + - 1e-15 + - 1e-18 + - 1e-21 + - 1e-24 + - 1e-27 + - 1e-30 + p3: + - 1024 + - 1024 + - 1024 + - 1024 + - 1048576 + - 1048576 + - 1048576 + - 1048576 + - 1073741824 + - 1073741824 + - 1073741824 + - 1073741824 + - 1099511627776 + - 1099511627776 + - 1099511627776 + - 1099511627776 + - 1125899906842624 + - 1125899906842624 + - 1125899906842624 + - 1125899906842624 + - 1152921504606846976 + - 1152921504606846976 + - 1152921504606846976 + - 1152921504606846976 + p4: + - 1.1805916207174113e21 + - 1.1805916207174113e21 + - 1.1805916207174113e21 + - 1.1805916207174113e21 + - 1.2089258196146292e24 + - 1.2089258196146292e24 + - 1.2089258196146292e24 + - 1.2089258196146292e24 + p5: [1, 1] + + - note: extra argument + data: {} + modules: + - | + package test + a = units.parse("1m", "") + query: data.test + error: expects 1 argument + + - note: zero arguments + data: {} + modules: + - | + package test + a = units.parse() + query: data.test + error: expects 1 argument + + - note: array + data: {} + modules: + - | + package test + a = units.parse(["1"]) + query: data.test + error: expects string argument + + - note: space + data: {} + modules: + - | + package test + a = units.parse("1 m") + query: data.test + error: spaces not allowed in resource string + + - note: b suffix not supported + data: {} + modules: + - | + package test + a = units.parse("1mb") + query: data.test + want_result: {} + diff --git a/tests/interpreter/cases/builtins/units/parse_bytes.yaml b/tests/interpreter/cases/builtins/units/parse_bytes.yaml new file mode 100644 index 00000000..6ef18844 --- /dev/null +++ b/tests/interpreter/cases/builtins/units/parse_bytes.yaml @@ -0,0 +1,115 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +cases: + - note: all + data: {} + modules: + - | + package test + a = [ + "y", + "z", + "e", + "p", + "t", + "g", + "m", + "k", + ] + + p1 = [ units.parse_bytes(s) | s = concat("", ["1", a[_], "i"])] + p1 = [ units.parse_bytes(s) | s = concat("", ["1", a[_], "i", "b"])] + p1 = [ units.parse_bytes(s) | s = concat("", ["1", a[_], "i", "B"])] + + p1 = [ units.parse_bytes(s) | s = concat("", ["1", a[_], "I"])] + p1 = [ units.parse_bytes(s) | s = concat("", ["1", a[_], "I", "b"])] + p1 = [ units.parse_bytes(s) | s = concat("", ["1", a[_], "I", "B"])] + + p1 = [ units.parse_bytes(s) | s = concat("", ["1", upper(a[_]), "i", "b"])] + p1 = [ units.parse_bytes(s) | s = concat("", ["1", upper(a[_]), "i", "B"])] + + p1 = [ units.parse_bytes(s) | s = concat("", ["1", upper(a[_]), "I", "b"])] + p1 = [ units.parse_bytes(s) | s = concat("", ["1", upper(a[_]), "I", "B"])] + + b = [ + "q", + "r", + "y", + "z", + "e", + "p", + "t", + "g", + "m", + "k", + ] + + p2 = [ units.parse_bytes(s) | s = concat("", ["1", b[_], "b"])] + p2 = [ units.parse_bytes(s) | s = concat("", ["1", b[_], "B"])] + p2 = [ units.parse_bytes(s) | s = concat("", ["1", upper(b[_]), "b"])] + p2 = [ units.parse_bytes(s) | s = concat("", ["1", upper(b[_]), "B"])] + + + results= { + "p1": p1, + "p2": p2, + } + query: data.test.results + want_result: + p1: + - 1.2089258196146292e24 + - 1.1805916207174113e21 + - 1152921504606846976 + - 1125899906842624 + - 1099511627776 + - 1073741824 + - 1048576 + - 1024 + p2: + - 1e30 + - 1e27 + - 1e24 + - 1e21 + - 1e18 + - 1e15 + - 1e12 + - 1e9 + - 1e6 + - 1e3 + + - note: extra argument + data: {} + modules: + - | + package test + a = units.parse_bytes("1m", "") + query: data.test + error: expects 1 argument + + - note: zero arguments + data: {} + modules: + - | + package test + a = units.parse_bytes() + query: data.test + error: expects 1 argument + + - note: array + data: {} + modules: + - | + package test + a = units.parse_bytes(["1"]) + query: data.test + error: expects string argument + + - note: space + data: {} + modules: + - | + package test + a = units.parse_bytes("1 m") + query: data.test + error: spaces not allowed in resource string diff --git a/tests/opa.passing b/tests/opa.passing new file mode 100644 index 00000000..2f2810f4 --- /dev/null +++ b/tests/opa.passing @@ -0,0 +1,42 @@ +aggregates +all +any +array +assignments +bitsand +bitsnegate +bitsor +bitsshiftright +bitsxor +comparisonexpr +completedoc +compositebasedereference +dataderef +embeddedvirtualdoc +evaltermexpr +example +fix1863 +intersection +invalidkeyerror +jsonfilteridempotent +nestedreferences +objectfilteridempotent +objectfilternonstringkey +objectremoveidempotent +objectremovenonstringkey +partialsetdoc +rand +replacen +semvercompare +sets +subset +topdowndynamicdispatch +trim +trimleft +trimprefix +trimright +trimsuffix +typebuiltin +typenamebuiltin +union +units \ No newline at end of file diff --git a/tests/opa.rs b/tests/opa.rs index 7d35f0c9..3d8e2f95 100644 --- a/tests/opa.rs +++ b/tests/opa.rs @@ -2,7 +2,7 @@ // Licensed under the MIT License. use regorus::*; -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeMap; use std::io::{self, Write}; use std::path::Path; use std::process::Command; @@ -72,7 +72,7 @@ fn run_opa_tests(opa_tests_dir: String, folders: &[String]) -> Result<()> { let tests_path = Path::new(&opa_tests_dir); let mut status = BTreeMap::::new(); let mut n = 0; - let mut missing_functions = BTreeSet::new(); + let mut missing_functions = BTreeMap::new(); for entry in WalkDir::new(&opa_tests_dir) .sort_by_file_name() .into_iter() @@ -93,7 +93,7 @@ fn run_opa_tests(opa_tests_dir: String, folders: &[String]) -> Result<()> { continue; } - let run_test = folders.is_empty() || folders.iter().any(|f| path_dir_str.contains(f)); + let run_test = folders.is_empty() || folders.iter().any(|f| &path_dir_str == f); if !run_test { continue; } @@ -108,24 +108,35 @@ fn run_opa_tests(opa_tests_dir: String, folders: &[String]) -> Result<()> { (Ok(actual), Some(expected)) if &actual == expected => { entry.0 += 1; } + (Ok(actual), None) + if actual == Value::new_array() + && case.want_error.is_none() + && case.error.is_none() => + { + entry.0 += 1; + } (Err(_), None) if case.want_error.is_some() => { // Expected failure. entry.0 += 1; } (r, _) => { print!("\n{} failed.", case.note); + dbg!((&case, &r)); if let Err(e) = r { let msg = e.to_string(); let pat = "could not find function "; if let Some(pos) = msg.find(pat) { let fcn = &msg[pos + pat.len()..]; - missing_functions.insert(fcn.to_string()); + missing_functions + .entry(fcn.to_string()) + .and_modify(|e| *e += 1) + .or_insert(1); } } let path = Path::new("target/opa/failures").join(path_dir); std::fs::create_dir_all(path.clone())?; - let mut cmd = "target/debug/examples/regorus eval".to_string(); + let mut cmd = "cargo run --example dregorus eval".to_string(); if let Some(data) = &case.data { let json_path = path.join(format!("data{n}.json")); cmd += format!(" -d {}", json_path.display()).as_str(); @@ -163,7 +174,7 @@ fn run_opa_tests(opa_tests_dir: String, folders: &[String]) -> Result<()> { } } - println!("\nTESTSUITE STATUS"); + println!("\nOPA TESTSUITE STATUS"); println!(" {:40} {:4} {:4}", "FOLDER", "PASS", "FAIL"); let (mut npass, mut nfail) = (0, 0); for (dir, (pass, fail)) in status { @@ -187,9 +198,13 @@ fn run_opa_tests(opa_tests_dir: String, folders: &[String]) -> Result<()> { if !missing_functions.is_empty() { println!("\nMISSING FUNCTIONS"); - for (idx, fcn) in missing_functions.iter().enumerate() { - println!("\x1b[31m {:4}: {fcn}\x1b[0m", idx + 1); + println!(" {:4} {:40} {}", "", "FUNCTION", "FAILURES"); + let mut ncalls = 0; + for (idx, (fcn, calls)) in missing_functions.iter().enumerate() { + println!("\x1b[31m {:4}: {fcn:40} {calls}\x1b[0m", idx + 1); + ncalls += calls; } + println!("\x1b[31m {:4} {:40} {ncalls}\x1b[0m", "", "TOTAL"); } if nfail != 0 {