diff --git a/.gitignore b/.gitignore index 1e9a94b200..ed20880f9f 100644 --- a/.gitignore +++ b/.gitignore @@ -35,3 +35,6 @@ project/plugins/project/ # ergo's checkout location for integration testing ergo-tests/ + +# spam test's checkout location +spam-tests/ diff --git a/build.sbt b/build.sbt index e6236001b0..fedb83ec0a 100644 --- a/build.sbt +++ b/build.sbt @@ -133,7 +133,7 @@ scalacOptions in(Compile, compile) ++= Seq("-release", "8") //scalacOptions in Compile ++= Seq("-Xprompt", "-Ydebug", "-verbose" ) parallelExecution in Test := false -publishArtifact in Test := false +publishArtifact in Test := true pomIncludeRepository := { _ => false } @@ -201,7 +201,7 @@ lazy val sigma = (project in file(".")) .settings(commonSettings: _*) def runErgoTask(task: String, sigmastateVersion: String, log: Logger): Unit = { - val ergoBranch = "master" + val ergoBranch = "ergobox-opt" val sbtEnvVars = Seq("BUILD_ENV" -> "test", "SIGMASTATE_VERSION" -> sigmastateVersion) log.info(s"Testing current build in Ergo (branch $ergoBranch):") @@ -253,3 +253,41 @@ commands += Command.command("ergoItTest") { state => "ergoItTestTask" :: state } + +def runSpamTestTask(task: String, sigmastateVersion: String, log: Logger): Unit = { + val spamBranch = "master" + val envVars = Seq("SIGMASTATE_VERSION" -> sigmastateVersion, + "SPECIAL_VERSION" -> specialVersion, + // SSH_SPAM_REPO_KEY should be set (see Jenkins Credentials Binding Plugin) + "GIT_SSH_COMMAND" -> "ssh -i $SSH_SPAM_REPO_KEY") + + log.info(s"Testing current build with spam tests (branch $spamBranch):") + val cwd = new File("") + val spamPath = new File(cwd.absolutePath + "/spam-tests/") + log.info(s"Cleaning $spamPath") + s"rm -rf ${spamPath.absolutePath}" ! + + log.info(s"Cloning spam tests branch $spamBranch into ${spamPath.absolutePath}") + Process(Seq("git", "clone", "-b", spamBranch, "--single-branch", "git@github.com:greenhat/sigma-spam.git", spamPath.absolutePath), + cwd.getAbsoluteFile, + envVars: _*) ! + + log.info(s"Running spam tests in $spamPath with Sigmastate version $sigmastateVersion") + val res = Process(Seq("sbt", task), spamPath, envVars: _*) ! + + if (res != 0) sys.error(s"Spam $task failed!") +} + +lazy val spamTestTask = TaskKey[Unit]("spamTestTask", "run spam tests with current version") +spamTestTask := { + val log = streams.value.log + val sigmastateVersion = version.value + runSpamTestTask("test", sigmastateVersion, log) +} + +commands += Command.command("spamTest") { state => + "clean" :: + "publishLocal" :: + "spamTestTask" :: + state +} diff --git a/docs/PR-review-checklist.md b/docs/PR-review-checklist.md deleted file mode 100644 index 3f1827a041..0000000000 --- a/docs/PR-review-checklist.md +++ /dev/null @@ -1,13 +0,0 @@ -## What should be checked during PR review - -### For each $TypeName.$methodName there should be - -1. test case in SigmaDslTests (checks SigmaDsl <-> ErgoScript equality) -2. test case in CostingSpecification -3. costing rule method in ${TypeName}Coster -4. for each SMethod registration - - .withInfo($description, $argsInfo) - - .withIRInfo($irBuilder, $opDescriptor) - -### For each PredefinedFunc registration there should be - - PredefFuncInfo($irBuilder, $opDescriptor) diff --git a/docs/TypeSerialization.md b/docs/TypeSerialization.md deleted file mode 100644 index 4549ecfa44..0000000000 --- a/docs/TypeSerialization.md +++ /dev/null @@ -1,173 +0,0 @@ -# SigmaIR Serialization format - -This document defines a binary format, which is used to store Sigma scripts in persistent stores, -to transfer them over wire and to enable cross-platform interoperation. -It organized as the following: first we describe how the types (like `Int`, `Coll[Byte]`, etc.) are serialized, -then we define serialization of typed data. This will give us a basis to describe serialization of -Constant nodes of SigmaIR. From that we proceed to serialization of arbitrary SigmaIR trees. - -## Type Serialization - -### Goal -Currently types are serialized as part of `ExtractRegisterAs`, `ConcreteCollection`, -`CollectionConstant`, `TaggedVariable`, nodes. The following encoding is designed minimize a number of bytes -required to represent type in serialization format. -In most cases SType value serialises into a single byte. -In the intermediate representation, IR, each type is represented by a tree of nodes where leaves are primitive types -and other nodes are type constructors. -Simple (sub-optimal) way to serialize type would be to give each primitive type and each type constructor -a unique type code. Then, to serialize a node, we need to emit its code and then perform recursive descent -to serialize all children. -However, we use special encoding schema to save bytes for the types that are used more often. - -We assume the most frequently used types are: -- primitive types - Int, Byte, Boolean, BigInt, GroupElement, Box, AvlTree -- Collections of primitive types - `Coll[Byte]` etc -- Options of primitive types - `Option[Int]` etc. -- Nested arrays of primitive types - `Coll[Coll[Int]]` etc. -- Functions of primitive types - `Box => Boolean` etc. -- First biased pair of types - `(_, Int)` when we know the first component is a primitive type. -- Second biased pair of types - `(Int, _)` when we know the second component is a primitive type. -- Symmetric pair of types - `(Int, Int)` when we know both types are the same - -All the types above should be represented in an optimized way (preferable by a single byte). -For other types, we do recursive descent down the type tree as it is defined below. - -### Distribution of type codes -The whole space of 256 codes is divided as the following: - -Interval | Description --------|------------ -0x00 | special value to represent undefined type (NoType in IR) -0x01 - 0x6F(111) | data types including primitive types, arrays, options aka nullable types, classes (in future), 111 = 255 - 144 different codes -0x70(112) - 0xFF(255) | function types (T1 => T2), 144 = 12 x 12 different codes - -### Encoding Data Types - -There are 9 different values for primitive types and 2 more are reserved for future extensions. -Each primitive type has id in a range {1,...,11} as the following. - -Id | Type -------|------- -1 | Boolean -2 | Byte -3 | Short (16 bit) -4 | Int (32 bit) -5 | Long (64 bit) -6 | BigInt (java.math.BigInteger) -7 | GroupElement (org.bouncycastle.math.ec.ECPoint) -8 | SigmaProp -9 | reserved for Char -10 | reserved for Double -11 | reserved - -For each type constructor like Coll or Option we use the encoding schema defined below. -Type constructor has associated _base code_ (e.g. 12 for `Coll[_]`, 24 for `Coll[Coll[_]]` etc. ), which is multiple of 12. -Base code can be added to primitive type id to produce code of constructed type, for example 12 + 1 = 13 is a code of `Coll[Byte]`. -The code of type constructor (12 in this example) is used when type parameter is non-primitive type -(e.g. `Coll[(Byte, Int)]`), is which case recursive descent is performed. -This encoding allows very simple and quick decoding by using div and mod operations. - -The interval of codes for data types is divided as the following: - -Interval | Type constructor | Description ---------------------|------------------ |------------ -0x01 - 0x0B(11) | | primitive types (including 2 reserved) -0x0C(12) | `Coll[_]` | Collection of non-primivite types (`Coll[(Int,Boolean)]`) -0x0D(13) - 0x17(23) | `Coll[_]` | Collection of primitive types (`Coll[Byte]`, `Coll[Int]`, etc.) -0x18(24) | `Coll[Coll[_]]` | Nested collection of non-primitive types (`Coll[Coll[(Int,Boolean)]]`) -0x19(25) - 0x23(35) | `Coll[Coll[_]]` | Nested collection of primitive types (`Coll[Coll[Byte]]`, `Coll[Coll[Int]]`) -0x24(36) | `Option[_]` | Option of non-primitive type (`Option[(Int, Byte)]`) -0x25(37) - 0x2F(47) | `Option[_]` | Option of primitive type (`Option[Int]`) -0x30(48) | `Option[Coll[_]]` | Option of Coll of non-primitive type (`Option[Coll[(Int, Boolean)]]`) -0x31(49) - 0x3B(59) | `Option[Coll[_]]` | Option of Coll of primitive type (`Option[Coll[Int]]`) -0x3C(60) | `(_,_)` | Pair of non-primitive types (`((Int, Byte), (Boolean,Box))`, etc.) -0x3D(61) - 0x47(71) | `(_, Int)` | Pair of types where first is primitive (`(_, Int)`) -0x48(72) | `(_,_,_)` | Triple of types -0x49(73) - 0x53(83) | `(Int, _)` | Pair of types where second is primitive (`(Int, _)`) -0x54(84) | `(_,_,_,_)` | Quadruple of types -0x55(85) - 0x5F(95) | `(_, _)` | Symmetric pair of primitive types (`(Int, Int)`, `(Byte,Byte)`, etc.) -0x60(96) | `(_,...,_)` | `Tuple` type with more than 4 items `(Int, Byte, Box, Boolean, Int)` -0x61(97) | `Any` | Any type -0x62(98) | `Unit` | Unit type -0x63(99) | `Box` | Box type -0x64(100) | `AvlTree` | AvlTree type -0x65(101) | `Context` | Context type -0x65(102) | `String` | String -0x66(103) | `IV` | TypeIdent -0x67(104)- 0x6E(110)| | reserved for future use -0x6F(111) | | Reserved for future `Class` type (e.g. user-defined types) - - -### Encoding Function Types - -We use 12 different values for both domain and range types of functions. -This gives us 12 * 12 = 144 function types in total and allows to represent 11 * 11 = 121 functions over primitive types using just single byte. - -Each code F in a range of function types can be represented as - -F = D * 12 + R + 112, where D, R in {0,...,11} - indices of domain and range types correspondingly, -112 - is the first code in an interval of function types. - -If D = 0 then domain type is not primitive and recursive descent is necessary to write/read domain type. - -If R = 0 then range type is not primitive and recursive descent is necessary to write/read range type. - -### Recursive Descent - -When argument of the type constructor is not primitive type we fallback to simple encoding schema. -In such a case we emit special code for the type constructor according to the table above and descend recursively -to every child node of the type tree. -We do this descend only for those children whose code cannot be embedded in parent code. -For example, serialization of `Coll[(Int,Boolean)]` proceeds as the following: -1) emit 0x0C because element of collection is not primitive -2) recursively serialize `(Int, Boolean)` -3) emit 0x3D because first item in the pair is primitive -4) recursivley serialize `Boolean` -5) emit 0x02 - the code for primitive type `Boolean` - -### Examples - -Type | D | R | Bytes | #Bytes | Comments ----------------------|-----|-----|-------------------|--------|--------- -`Byte` | | | 1 | 1 | -`Coll[Byte]` | | | 12 + 1 = 13 | 1 | -`Coll[Coll[Byte]]` | | | 24 + 1 = 25 | 1 | -`Option[Byte]` | | | 36 + 1 = 37 | 1 | register -`Option[Coll[Byte]]`| | | 48 + 1 = 49 | 1 | register -`(Int,Int)` | | | 84 + 3 = 87 | 1 | fold -`Box=>Boolean` | 7 | 2 | 198 = 7*12+2+112 | 1 | exist, forall -`(Int,Int)=>Int` | 0 | 3 | 115=0*12+3+112, 87 | 2 | fold -`(Int,Boolean)` | | | 60 + 3, 2 | 2 | -`(Int,Box)=>Boolean` | 0 | 2 | 0*12+2+112, 60+3, 7 | 3 | - -## Data and Constant serialization - -The contents of a typed data structure can be fully described by a type tree. -For example having a typed data object `d: (Int, Coll[Byte], Boolean)` we can tell that `d` has 3 items, -the first item contain 64-bit integer, the second - collection of bytes, and the third - logical true/false value. - -To serialize/deserialize typed data we need to know its type descriptor (type tree). -Serialization procedure is recursive over type tree and the corresponding subcomponents of an object. -For primitive types (the leaves of the type tree) the format is fixed. The values of primitive types are serialized -using predefined function shown in the following table - -Value: Type | Function | Format --------------- |------------------------------ |------- -`x: Byte` | `byte: Byte => Coll[Byte]` | `[x & 0xFF]` - one byte storing value x -`x: Short` | `short: Short => Coll[Byte]` | `[x & 0xFFFF]` - two bytes in big-endian order storing value x -`x: Int` | `int: Int => Coll[Byte]` | `[x & 0xFFFFFFFF]` - four bytes in big-endian order storing value x -`x: Long` | `long: Int => Coll[Byte]` | `[x & 0xFFFFFFFFFFFFFFFF]` - eight bytes in big-endian order storing value x - -Thus, serialization format is defined recursively as shown in the following table - -Object | Type | Format ---------------|------------------|------- -x = 0xXX | `Byte` | `byte(x)` - one byte storing value x -b = false/true| `Boolean` | `if (b) byte(0x01) else byte(0x00)]` - one byte storing 0 or 1 -n = 0xXXXXXXXXXXXXXXXX | `Int` | `[XX,XX,XX,XX,XX,XX,XX,XX]` - big endian 8 bytes -N = new BigInteger() | `BigInt` | xs = N.toByteArray, `[serialize(xs)]` - serialize as `Coll[Byte]`, see also BigInteger.toByteArray -e = new EcPoint() | `GroupElement` | `[e.getEncoded]` see also use GroupElementSerializer -box = new ErgoBox() | `Box` | `[putLong(box.value), putValue(box.proposition), putArray[Any](box.registers), 32, putBytes(box.transactionId), putShort(box.boxId)]` -t = new AvlTree() | `AvlTree` | `[serialize(t.startingDigest), putInt(t.keyLength), putOpt(t.valueLengthOpt), putOpt(t.maxNumOperations), putOpt(t.maxDeletes)]` -xs = Coll(x1, .., xN) | `Coll[T]` | `[xs.length & 0xXXXX, serialize(x1), ..., serialize(xN)]` - 2 bytes of length and recursive bytes of all the elements diff --git a/docs/pr-review-policy.md b/docs/pr-review-policy.md new file mode 100644 index 0000000000..256ac756b4 --- /dev/null +++ b/docs/pr-review-policy.md @@ -0,0 +1,115 @@ +## Pull Request Review Policy + +### Motivation + +ErgoScript (working name Sigma) is used in Ergo to validate box spending conditions. +This is part of network consensus protocol and thus critical for long-term survivability +of Ergo blockchain. + +From Sigma development point of view, consensus critical part of Ergo +is BlockCandidate validation code. More concretely `ErgoTransaction.validateStateful` method. + +To achieve network consensus, block validation should be the same on all the nodes. +If some nodes have different validation results, this may lead to a network fork +(which we call hard-fork here, or HF for short). + +To prevent accidental hard forking changes in the code the development process should have +barriers to prevent them from creeping in to the mainnet releases. + +It is not possible to prove NO HF with testing alone. +In addition, other software development practices should be employed to decrease +the unfortunate probability of hard forking changes. In particular more +formalized development process can reduce risks. + +The developer of Sigma need to understand and know internal working +of the code in `ErgoTransaciton.validateStateful` +method and analyse the impact of all his/her changes on this method behavior. + +This part of Ergo should be well known for all sigma developers. +Everything which is used in `ErgoTransaciton.validateStateful` is part of consensus +and hence `red` zone. + +### PR Requirements + +Before the first mainnet has been released we tried to keep sigma release versions in sync with ergo versions. +Now it doesn’t look possible, since ergo will have more frequent changes. +For example (at the time of writing) `v3.0.1` is next release branch in sigma and `v3.0.5` is next in ergo. +Thus between ergo and sigma we should keep only first two numbers of the version in sync. +So when ergo will switch to `v3.1.x`, sigma will also switch to `3.1.x`, but the last digit may differ. + +When creating new PRs, `master` branch should not be used directly in PRs as a base (or target branch). + +Use the following branches: +- [v3.0.1](https://github.com/ScorexFoundation/sigmastate-interpreter/tree/v3.0.1) - if you + can PROVE that you don’t have changes leading to hard-fork +- [v4.0](https://github.com/ScorexFoundation/sigmastate-interpreter/tree/v4.0) - if there are + hard-fork changes + +Because of the danger of hard forks, the requirement for new developments are +much stronger than in typical software development projects. + +The only way a new code can go to mainnet is after the following has been done: +1) the new PR is based on `v3.x` branch +2) the new PR passes all tests in `v3.x` branch (including ergo-unit, ergo-it and spam tests) +3) the PR is reviewed +4) at least 2 approving reviews are presented (@aslesarenko and @kushti are strictly required) +5) the PR is merged into `v3.x` branch +6) the commits are merged into master as part of `v3.x` branch and released after that + +This strict requirements are only for `v3.x` branches, because we need to guarantee +that we don’t accidentally introduce hard-forks. + +### PR Review Requirements + +During review all the changes should be examined for hard fork. In case of any suspicious change +the conversation should be opened during PR review and addressed by PR authors. + +*NOTE: PR authors shouldn't resolve opened conversations +(by pressing `Resolve conversation` button), this can be done by reviewers, after the +questions are resolved.* + +It is a responsibility of an author to identify suspicious changes and whether to avoid them +altogether or provide NO-HF-PROOF comment (see below) in advance, without request. +The responsibility of reviewer is to catch suspicious changes, not proved by the author, and +raise a conversation. +The responsibility of the author is to dispel those suspicions by providing a NO-HF-PROOF comment +in the source code (not in github PR comment). + +Reviewers should request NO-HF-PROOF comment for ALL suspicious changes +in the code. + +The example (and a template) for a comment is as the following: +```scala +/* NO HF PROOF: + Changed: the `if (spendingTransaction == null)` condition was removed + Motivation: to improve performance of hotspot code + Safety: + It is safe to remove this spendingTransaction == null check because v3.0.5 version of ergo + never create ErgoLikeContext instance with spendingTransaction = null. + Examined ergo code: ErgoTransaction.validateStatefull and all code paths started from it. +*/ +``` +The proof should be based on the analysis of the code. + +Upon receiving of NO-HF-PROOF comment, reviewers should verify the NO-HF-PROOF themselves and never assume the author did it correctly. + +Because of this added complexity of v3.x review process the developer +is strongly advised to minimize changes to a minimum which is absolutely required. + +If the change is semantic the author need to have strong reasons, +which should be motivated in the NO-HF-PROOF comment. + +Even if suspicious change is approved, there is still a chance that some +negative impact has been overlooked. +Thus, by default, we allow only one such change per release, to simplify root cause analysis +in case of any errors. + +### PR Review Checklist + +1. PR is based on `v3.x` branch +2. ergoBranch is correct and PR passes all tests (including ergo-unit, ergo-it and spam tests) +3. Every change is motivated +4. Every change preserves consensus +5. Every suspicious change comes with NO-HF-PROOF +6. Every NO-HF-PROOF is verified + diff --git a/docs/LANG.md b/docs/sigma-front-end.md similarity index 100% rename from docs/LANG.md rename to docs/sigma-front-end.md diff --git a/sigma-impl/src/main/scala/sigma/util/Extensions.scala b/sigma-impl/src/main/scala/sigma/util/Extensions.scala index 1acbd25b37..d418f1a265 100644 --- a/sigma-impl/src/main/scala/sigma/util/Extensions.scala +++ b/sigma-impl/src/main/scala/sigma/util/Extensions.scala @@ -5,7 +5,6 @@ import java.nio.ByteBuffer import special.collection.{Coll, Builder} import com.google.common.primitives.Ints -import scalan.{Nullable, RType} import scala.language.higherKinds @@ -17,8 +16,12 @@ object Extensions { def toByte: Byte = if (b) 1 else 0 } + /** @hotspot it is used in deserialization so we avoid allocation by any means. */ + @inline final def toUByte(b: Byte) = b & 0xFF + implicit class ByteOps(val b: Byte) extends AnyVal { - @inline def toUByte: Int = b & 0xFF + @inline def toUByte: Int = Extensions.toUByte(b) + def addExact(b2: Byte): Byte = { val r = b + b2 if (r < Byte.MinValue || r > Byte.MaxValue) diff --git a/src/main/scala/org/ergoplatform/ErgoBox.scala b/src/main/scala/org/ergoplatform/ErgoBox.scala index da326f64b6..d41a01c092 100644 --- a/src/main/scala/org/ergoplatform/ErgoBox.scala +++ b/src/main/scala/org/ergoplatform/ErgoBox.scala @@ -46,12 +46,13 @@ import scala.runtime.ScalaRunTime * @param creationHeight - height when a transaction containing the box was created. * This height is declared by user and should not exceed height of the block, * containing the transaction with this box. + * @hotspot don't beautify the code of this class */ class ErgoBox( override val value: Long, override val ergoTree: ErgoTree, override val additionalTokens: Coll[(TokenId, Long)] = Colls.emptyColl[(TokenId, Long)], - override val additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map(), + override val additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map.empty, val transactionId: ModifierId, val index: Short, override val creationHeight: Int @@ -136,27 +137,40 @@ object ErgoBox { val MaxTokens: Int = ErgoConstants.MaxTokens.value val maxRegisters: Int = ErgoConstants.MaxRegisters.value - val mandatoryRegisters: Vector[MandatoryRegisterId] = Vector(R0, R1, R2, R3) - val nonMandatoryRegisters: Vector[NonMandatoryRegisterId] = Vector(R4, R5, R6, R7, R8, R9) + + /** @hotspot don't beautify the code in this companion */ + private val _mandatoryRegisters: Array[MandatoryRegisterId] = Array(R0, R1, R2, R3) + val mandatoryRegisters: Seq[MandatoryRegisterId] = _mandatoryRegisters + + private val _nonMandatoryRegisters: Array[NonMandatoryRegisterId] = Array(R4, R5, R6, R7, R8, R9) + val nonMandatoryRegisters: Seq[NonMandatoryRegisterId] = _nonMandatoryRegisters + val startingNonMandatoryIndex: Byte = nonMandatoryRegisters.head.number .ensuring(_ == mandatoryRegisters.last.number + 1) - val allRegisters: Vector[RegisterId] = (mandatoryRegisters ++ nonMandatoryRegisters).ensuring(_.size == maxRegisters) + val allRegisters: Seq[RegisterId] = + Helpers.concatArrays[RegisterId]( + Helpers.castArray(_mandatoryRegisters): Array[RegisterId], + Helpers.castArray(_nonMandatoryRegisters): Array[RegisterId]).ensuring(_.length == maxRegisters) + val mandatoryRegistersCount: Byte = mandatoryRegisters.size.toByte val nonMandatoryRegistersCount: Byte = nonMandatoryRegisters.size.toByte val registerByName: Map[String, RegisterId] = allRegisters.map(r => s"R${r.number}" -> r).toMap - val registerByIndex: Map[Byte, RegisterId] = allRegisters.map(r => r.number -> r).toMap - def findRegisterByIndex(i: Byte): Option[RegisterId] = registerByIndex.get(i) + /** @hotspot called from ErgoBox serializer */ + @inline final def registerByIndex(index: Int): RegisterId = allRegisters(index) + + def findRegisterByIndex(i: Int): Option[RegisterId] = + if (0 <= i && i < maxRegisters) Some(registerByIndex(i)) else None val allZerosModifierId: ModifierId = Array.fill[Byte](32)(0.toByte).toModifierId def apply(value: Long, ergoTree: ErgoTree, creationHeight: Int, - additionalTokens: Seq[(TokenId, Long)] = Seq(), - additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map(), + additionalTokens: Seq[(TokenId, Long)] = Nil, + additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map.empty, transactionId: ModifierId = allZerosModifierId, boxIndex: Short = 0): ErgoBox = new ErgoBox(value, ergoTree, diff --git a/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala b/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala index f048c61a27..9d83860f47 100644 --- a/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala +++ b/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala @@ -10,12 +10,15 @@ import sigmastate.Values._ import sigmastate._ import sigmastate.SType.AnyOps import sigmastate.lang.Terms._ -import sigmastate.serialization.{ErgoTreeSerializer, SigmaSerializer} +import sigmastate.serialization.SigmaSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import special.collection.Coll import sigmastate.eval._ import sigmastate.eval.Extensions._ +import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer +import spire.syntax.all.cfor +import scala.collection.immutable import scala.runtime.ScalaRunTime /** @@ -40,7 +43,7 @@ class ErgoBoxCandidate(val value: Long, def proposition: BoolValue = ergoTree.toProposition(ergoTree.isConstantSegregation).asBoolValue - lazy val propositionBytes: Array[Byte] = ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(ergoTree) + lazy val propositionBytes: Array[Byte] = ergoTree.bytes lazy val bytesWithNoRef: Array[Byte] = ErgoBoxCandidate.serializer.toBytes(this) @@ -77,14 +80,14 @@ class ErgoBoxCandidate(val value: Long, } object ErgoBoxCandidate { - val UndefinedBoxRef = Array.fill(34)(0: Byte).toColl + val UndefinedBoxRef: Coll[Byte] = Array.fill(34)(0: Byte).toColl object serializer extends SigmaSerializer[ErgoBoxCandidate, ErgoBoxCandidate] { def serializeBodyWithIndexedDigests(obj: ErgoBoxCandidate, tokensInTx: Option[Coll[TokenId]], w: SigmaByteWriter): Unit = { w.putULong(obj.value) - w.putBytes(ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(obj.ergoTree)) + w.putBytes(DefaultSerializer.serializeErgoTree(obj.ergoTree)) w.putUInt(obj.creationHeight) w.putUByte(obj.additionalTokens.size) obj.additionalTokens.foreach { case (id, amount) => @@ -121,34 +124,43 @@ object ErgoBoxCandidate { serializeBodyWithIndexedDigests(obj, None, w) } + /** @hotspot don't beautify the code */ def parseBodyWithIndexedDigests(digestsInTx: Option[Coll[TokenId]], r: SigmaByteReader): ErgoBoxCandidate = { val previousPositionLimit = r.positionLimit r.positionLimit = r.position + ErgoBox.MaxBoxSize - val value = r.getULong() - val tree = ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(r, SigmaSerializer.MaxPropositionSize) - val creationHeight = r.getUInt().toInt - val addTokensCount = r.getUByte() - val addTokens = (0 until addTokensCount).map { _ => + val value = r.getULong() // READ + val tree = DefaultSerializer.deserializeErgoTree(r, SigmaSerializer.MaxPropositionSize) // READ + val creationHeight = r.getUInt().toInt // READ + val nTokens = r.getUByte() // READ + val tokenIds = new Array[Digest32](nTokens) + val tokenAmounts = new Array[Long](nTokens) + val tokenIdSize = TokenId.size + cfor(0)(_ < nTokens, _ + 1) { i => val tokenId = if (digestsInTx.isDefined) { - val digestIndex = r.getUInt().toInt + val digestIndex = r.getUInt().toInt // READ val digests = digestsInTx.get if (!digests.isDefinedAt(digestIndex)) sys.error(s"failed to find token id with index $digestIndex") digests(digestIndex) } else { - r.getBytes(TokenId.size) + r.getBytes(tokenIdSize) // READ } - val amount = r.getULong() - Digest32 @@ tokenId -> amount + val amount = r.getULong() // READ + tokenIds(i) = tokenId.asInstanceOf[Digest32] + tokenAmounts(i) = amount + } + val tokens = Colls.pairCollFromArrays(tokenIds, tokenAmounts) + + // TODO optimize: hotspot: replace Map with much faster Coll + val nRegs = r.getUByte() // READ + val b = immutable.Map.newBuilder[NonMandatoryRegisterId, EvaluatedValue[_ <: SType]] + b.sizeHint(nRegs) + cfor(0)(_ < nRegs, _ + 1) { iReg => + val reg = ErgoBox.nonMandatoryRegisters(iReg) + val v = r.getValue().asInstanceOf[EvaluatedValue[SType]] // READ + b += ((reg, v)) // don't use `->` since it incur additional wrapper overhead } - val regsCount = r.getUByte() - val regs = (0 until regsCount).map { iReg => - val regId = ErgoBox.startingNonMandatoryIndex + iReg - val reg = ErgoBox.findRegisterByIndex(regId.toByte).get.asInstanceOf[NonMandatoryRegisterId] - val v = r.getValue().asInstanceOf[EvaluatedValue[SType]] - (reg, v) - }.toMap r.positionLimit = previousPositionLimit - new ErgoBoxCandidate(value, tree, creationHeight, addTokens.toColl, regs) + new ErgoBoxCandidate(value, tree, creationHeight, tokens, b.result()) } override def parse(r: SigmaByteReader): ErgoBoxCandidate = { diff --git a/src/main/scala/org/ergoplatform/ErgoLikeContext.scala b/src/main/scala/org/ergoplatform/ErgoLikeContext.scala index 45fbdae276..84a0e9b53e 100644 --- a/src/main/scala/org/ergoplatform/ErgoLikeContext.scala +++ b/src/main/scala/org/ergoplatform/ErgoLikeContext.scala @@ -16,7 +16,7 @@ import SType._ import RType._ import org.ergoplatform.ErgoConstants.ScriptCostLimit import org.ergoplatform.validation.{ValidationRules, SigmaValidationSettings} - +import spire.syntax.all.cfor import scala.util.Try case class BlockchainState(currentHeight: Height, lastBlockUtxoRoot: AvlTreeData) @@ -59,7 +59,7 @@ class ErgoLikeContext(val currentHeight: Height, assert(preHeader == null || preHeader.height == currentHeight, "Incorrect preHeader height") assert(preHeader == null || java.util.Arrays.equals(minerPubkey, preHeader.minerPk.getEncoded.toArray), "Incorrect preHeader minerPubkey") assert(headers.toArray.headOption.forall(h => java.util.Arrays.equals(h.stateRoot.digest.toArray, lastBlockUtxoRoot.digest)), "Incorrect lastBlockUtxoRoot") - headers.toArray.indices.foreach { i => + cfor(0)(_ < headers.length, _ + 1) { i => if (i > 0) assert(headers(i - 1).parentId == headers(i).id, s"Incorrect chain: ${headers(i - 1).parentId},${headers(i).id}") } assert(preHeader == null || headers.toArray.headOption.forall(_.id == preHeader.parentId), s"preHeader.parentId should be id of the best header") diff --git a/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala b/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala index dfd8b0b88c..2c1bb71edf 100644 --- a/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala +++ b/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala @@ -11,7 +11,7 @@ import sigmastate.serialization.SigmaSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import special.collection.ExtensionMethods._ import sigmastate.eval.Extensions._ - +import spire.syntax.all.cfor import scala.collection.mutable import scala.util.Try import sigmastate.SType._ @@ -119,12 +119,15 @@ object ErgoLikeTransactionSerializer extends SigmaSerializer[ErgoLikeTransaction val distinctTokenIds = tokenIds.map(_.toColl).distinct.map(_.toArray.asInstanceOf[TokenId]) w.putUInt(distinctTokenIds.length) - distinctTokenIds.foreach { tokenId => - w.putBytes(tokenId.toArray) + cfor(0)(_ < distinctTokenIds.length, _ + 1) { i => + val tokenId = distinctTokenIds(i) + w.putBytes(tokenId) } // serialize outputs - w.putUShort(tx.outputCandidates.length) - for (out <- tx.outputCandidates) { + val outs = tx.outputCandidates + w.putUShort(outs.length) + cfor(0)(_ < outs.length, _ + 1) { i => + val out = outs(i) ErgoBoxCandidate.serializer.serializeBodyWithIndexedDigests(out, Some(distinctTokenIds), w) } } diff --git a/src/main/scala/org/ergoplatform/validation/SigmaValidationSettings.scala b/src/main/scala/org/ergoplatform/validation/SigmaValidationSettings.scala index b062cf501d..bb9a3508cd 100644 --- a/src/main/scala/org/ergoplatform/validation/SigmaValidationSettings.scala +++ b/src/main/scala/org/ergoplatform/validation/SigmaValidationSettings.scala @@ -58,7 +58,13 @@ abstract class SigmaValidationSettings extends Iterable[(Short, (ValidationRule, sealed class MapSigmaValidationSettings(private val map: Map[Short, (ValidationRule, RuleStatus)]) extends SigmaValidationSettings { override def iterator: Iterator[(Short, (ValidationRule, RuleStatus))] = map.iterator override def get(id: Short): Option[(ValidationRule, RuleStatus)] = map.get(id) - override def getStatus(id: Short): Option[RuleStatus] = map.get(id).map(_._2) + + /** @hotspot don't beautify this code */ + override def getStatus(id: Short): Option[RuleStatus] = { + val statusOpt = map.get(id) + val res = if (statusOpt.isDefined) Some(statusOpt.get._2) else None + res + } override def updated(id: Short, newStatus: RuleStatus): MapSigmaValidationSettings = { val (rule,_) = map(id) new MapSigmaValidationSettings(map.updated(id, (rule, newStatus))) diff --git a/src/main/scala/org/ergoplatform/validation/ValidationRules.scala b/src/main/scala/org/ergoplatform/validation/ValidationRules.scala index 14ba5f016f..3378d87194 100644 --- a/src/main/scala/org/ergoplatform/validation/ValidationRules.scala +++ b/src/main/scala/org/ergoplatform/validation/ValidationRules.scala @@ -6,7 +6,7 @@ import java.util import org.ergoplatform.ErgoConstants.MaxLoopLevelInCostFunction import scorex.util.ByteArrayBuilder import scorex.util.serialization.{VLQByteBufferReader, VLQByteBufferWriter} -import sigma.util.Extensions.ByteOps +import sigma.util.Extensions.toUByte import sigmastate.eval.IRContext import sigmastate.serialization.OpCodes.{OpCodeExtra, OpCode} import sigmastate.Values.{Value, ErgoTree, SValue, IntValue} @@ -27,41 +27,36 @@ case class ValidationRule( description: String ) extends SoftForkChecker { - /** Generic helper method to implement validation rules. - * It executes the given `block` only when this rule is disabled of `condition` is satisfied. - * Should be used in derived classes to implemented validation logic. - * - * @tparam T type of the result produced by `block` - * @param condition executes condition to be checked and returns its result - * @param cause executed only when condition returns false, attached as `cause` parameter when Validation exception - * @param args parameters which should be attached to ValidationException - * @param block executed only when condition returns true, its result become a result of `validate` call. - * @return result produced by the `block` if condition is true - * @throws SigmaException if this rule is not found in ValidationRules.currentSettings - * @throws ValidationException if the `condition` is not true. - * - * @see ValidationRules + /** Whether the status of this rule was checked on first invocation. */ + private var _checked: Boolean = false + + /** Check the rule is registered and enabled. + * Since it is easy to forget to register new rule, we need to do this check. + * But because it is hotspot, we do this check only once for each rule. + * @hotspot executed on every typeCode and opCode during script deserialization */ - protected def validate[T]( - condition: => Boolean, - cause: => Throwable, args: Seq[Any], block: => T): T = { - ValidationRules.currentSettings.getStatus(this.id) match { - case None => + @inline protected final def checkRule(): Unit = { + if (!_checked) { + if (ValidationRules.currentSettings.getStatus(this.id).isEmpty) throw new SigmaException(s"ValidationRule $this not found in validation settings") - case Some(DisabledRule) => - block // if the rule is disabled we still need to execute the block of code - case Some(_) => - if (condition) { - block - } - else if (cause.isInstanceOf[ValidationException]) { - throw cause - } - else { - throw ValidationException(s"Validation failed on $this with args $args", this, args, Option(cause)) - } + _checked = true // prevent this check on every call (only first call is checked) + } + // upon successful return we know the rule is registered with EnabledRule status + } + + /** Throws ValidationException with the given cause and args. + * Should be used in all validation rules to unify ValidationException instances + * which can be thrown (to simplify handling). + */ + protected def throwValidationException(cause: Throwable, args: Seq[Any]) = { + if (cause.isInstanceOf[ValidationException]) { + throw cause + } + else { + throw ValidationException(s"Validation failed on $this with args $args", this, args, Option(cause)) } } + } /** Base class for all exceptions which may be thrown by validation rules. @@ -86,125 +81,158 @@ object ValidationRules { object CheckDeserializedScriptType extends ValidationRule(FirstRuleId, "Deserialized script should have expected type") { - def apply[T](d: DeserializeContext[_], script: SValue)(block: => T): T = - validate(d.tpe == script.tpe, - new InterpreterException(s"Failed context deserialization of $d: \n" + - s"expected deserialized script to have type ${d.tpe}; got ${script.tpe}"), - Seq[Any](d, script), block - ) + final def apply[T](d: DeserializeContext[_], script: SValue): Unit = { + checkRule() + if (d.tpe != script.tpe) { + throwValidationException( + new InterpreterException(s"Failed context deserialization of $d: \n" + + s"expected deserialized script to have type ${d.tpe}; got ${script.tpe}"), + Array[Any](d, script)) + } + } } object CheckDeserializedScriptIsSigmaProp extends ValidationRule(1001, "Deserialized script should have SigmaProp type") { - def apply[T](root: SValue)(block: => T): T = - validate(root.tpe.isSigmaProp, - new SerializerException(s"Failed deserialization, expected deserialized script to have type SigmaProp; got ${root.tpe}"), - Seq(root), block - ) + final def apply[T](root: SValue): Unit = { + checkRule() + if (!root.tpe.isSigmaProp) { + throwValidationException( + new SerializerException(s"Failed deserialization, expected deserialized script to have type SigmaProp; got ${root.tpe}"), + Array(root)) + } + } } object CheckValidOpCode extends ValidationRule(1002, "Check the opcode is supported by registered serializer or is added via soft-fork") with SoftForkWhenCodeAdded { - def apply[T](ser: ValueSerializer[_], opCode: OpCode)(block: => T): T = { - def msg = s"Cannot find serializer for Value with opCode = LastConstantCode + ${opCode.toUByte - OpCodes.LastConstantCode}" - def args = Seq(opCode) - validate(ser != null && ser.opCode == opCode, new InvalidOpCode(msg), args, block) + final def apply[T](ser: ValueSerializer[_], opCode: OpCode): Unit = { + checkRule() + if (ser == null || ser.opCode != opCode) { + throwValidationException( + new InvalidOpCode(s"Cannot find serializer for Value with opCode = LastConstantCode + ${toUByte(opCode) - OpCodes.LastConstantCode}"), + Array(opCode)) + } } } object CheckIsSupportedIndexExpression extends ValidationRule(1003, "Check the index expression for accessing collection element is supported.") { - def apply[Ctx <: IRContext, T](ctx: Ctx)(coll: Value[SCollection[_]], i: IntValue, iSym: ctx.Rep[Int])(block: => T): T = { - def msg = s"Unsupported index expression $i when accessing collection $coll" - def args = Seq(coll, i) - validate(ctx.isSupportedIndexExpression(iSym), - new SigmaException(msg, i.sourceContext.toOption), - args, block) + final def apply[Ctx <: IRContext, T](ctx: Ctx)(coll: Value[SCollection[_]], i: IntValue, iSym: ctx.Rep[Int]): Unit = { + checkRule() + if (!ctx.isSupportedIndexExpression(iSym)) + throwValidationException( + new SigmaException(s"Unsupported index expression $i when accessing collection $coll", i.sourceContext.toOption), + Array(coll, i)) } } object CheckCostFunc extends ValidationRule(1004, "Cost function should contain only operations from specified list.") { - def apply[Ctx <: IRContext, T](ctx: Ctx)(costF: ctx.Rep[Any => Int])(block: => T): T = { - def args = Seq(costF) - lazy val verification = ctx.verifyCostFunc(ctx.asRep[Any => Int](costF)) - validate(verification.isSuccess, - verification.toEither.left.get, - args, block) + final def apply[Ctx <: IRContext, T](ctx: Ctx)(costF: ctx.Rep[Any => Int]): Unit = { + checkRule() + val verification = ctx.verifyCostFunc(ctx.asRep[Any => Int](costF)) + if (!verification.isSuccess) { + throwValidationException(verification.toEither.left.get, Array(costF)) + } } } object CheckCalcFunc extends ValidationRule(1005, "If SigmaProp.isProven method calls exists in the given function,\n then it is the last operation") { - def apply[Ctx <: IRContext, T](ctx: Ctx)(calcF: ctx.Rep[ctx.Context => Any])(block: => T): T = { - def args = Seq(calcF) - lazy val verification = ctx.verifyIsProven(calcF) - validate(verification.isSuccess, - verification.toEither.left.get, - args, block) + final def apply[Ctx <: IRContext, T](ctx: Ctx)(calcF: ctx.Rep[ctx.Context => Any]): Unit = { + checkRule() + val verification = ctx.verifyIsProven(calcF) + if (!verification.isSuccess) { + throwValidationException(verification.toEither.left.get, Array(calcF)) + } } } object CheckTupleType extends ValidationRule(1006, "Supported tuple type.") with SoftForkWhenReplaced { - def apply[Ctx <: IRContext, T](ctx: Ctx)(e: ctx.Elem[_])(block: => T): T = { - def msg = s"Invalid tuple type $e" - lazy val condition = e match { + final def apply[Ctx <: IRContext, T](ctx: Ctx)(e: ctx.Elem[_]): Unit = { + checkRule() + val condition = e match { case _: ctx.PairElem[_,_] => true case _ => false } - validate(condition, new SigmaException(msg), Seq[ctx.Elem[_]](e), block) + if (!condition) { + throwValidationException(new SigmaException(s"Invalid tuple type $e"), Array[ctx.Elem[_]](e)) + } } } object CheckPrimitiveTypeCode extends ValidationRule(1007, "Check the primitive type code is supported or is added via soft-fork") with SoftForkWhenCodeAdded { - def apply[T](code: Byte)(block: => T): T = { - val ucode = code.toUByte - def msg = s"Cannot deserialize primitive type with code $ucode" - validate(ucode > 0 && ucode < embeddableIdToType.length, new SerializerException(msg), Seq(code), block) + final def apply[T](code: Byte): Unit = { + checkRule() + val ucode = toUByte(code) + if (ucode <= 0 || ucode >= embeddableIdToType.length) { + throwValidationException( + new SerializerException(s"Cannot deserialize primitive type with code $ucode"), + Array(code)) + } } } object CheckTypeCode extends ValidationRule(1008, "Check the non-primitive type code is supported or is added via soft-fork") with SoftForkWhenCodeAdded { - def apply[T](typeCode: Byte)(block: => T): T = { - val ucode = typeCode.toUByte - def msg = s"Cannot deserialize the non-primitive type with code $ucode" - validate(ucode <= SGlobal.typeCode.toUByte, new SerializerException(msg), Seq(typeCode), block) + final def apply[T](typeCode: Byte): Unit = { + checkRule() + val ucode = toUByte(typeCode) + if (ucode > toUByte(SGlobal.typeCode)) { + throwValidationException( + new SerializerException(s"Cannot deserialize the non-primitive type with code $ucode"), + Array(typeCode)) + } } } object CheckSerializableTypeCode extends ValidationRule(1009, "Check the data values of the type (given by type code) can be serialized") with SoftForkWhenReplaced { - def apply[T](typeCode: Byte)(block: => T): T = { - val ucode = typeCode.toUByte - def msg = s"Data value of the type with the code $ucode cannot be deserialized." - validate(ucode <= OpCodes.LastDataType.toUByte, new SerializerException(msg), Seq(typeCode), block) + final def apply[T](typeCode: Byte): Unit = { + checkRule() + val ucode = toUByte(typeCode) + if (ucode > toUByte(OpCodes.LastDataType)) { + throwValidationException( + new SerializerException(s"Data value of the type with the code $ucode cannot be deserialized."), + Array(typeCode)) + } } } object CheckTypeWithMethods extends ValidationRule(1010, "Check the type (given by type code) supports methods") with SoftForkWhenCodeAdded { - def apply[T](typeCode: Byte, cond: => Boolean)(block: => T): T = { - val ucode = typeCode.toUByte - def msg = s"Type with code $ucode doesn't support methods." - validate(cond, new SerializerException(msg), Seq(typeCode), block) + final def apply[T](typeCode: Byte, cond: Boolean): Unit = { + checkRule() + val ucode = toUByte(typeCode) + if (!cond) { + throwValidationException( + new SerializerException(s"Type with code $ucode doesn't support methods."), + Array(typeCode)) + } } } object CheckAndGetMethod extends ValidationRule(1011, "Check the type has the declared method.") { - def apply[T](objType: STypeCompanion, methodId: Byte)(block: SMethod => T): T = { - def msg = s"The method with code $methodId doesn't declared in the type $objType." - lazy val methodOpt = objType.getMethodById(methodId) - validate(methodOpt.isDefined, new SerializerException(msg), Seq(objType, methodId), block(methodOpt.get)) + final def apply[T](objType: STypeCompanion, methodId: Byte): SMethod = { + checkRule() + val methodOpt = objType.getMethodById(methodId) + if (methodOpt.isDefined) methodOpt.get + else { + throwValidationException( + new SerializerException(s"The method with code $methodId doesn't declared in the type $objType."), + Array(objType, methodId)) + } } + override def isSoftFork(vs: SigmaValidationSettings, ruleId: Short, status: RuleStatus, @@ -218,10 +246,14 @@ object ValidationRules { object CheckHeaderSizeBit extends ValidationRule(1012, "For version greater then 0, size bit should be set.") with SoftForkWhenReplaced { - def apply(header: Byte): Unit = { - validate( - ErgoTree.getVersion(header) == 0 || ErgoTree.hasSize(header), - new SigmaException(s"Invalid ErgoTreeHeader $header, size bit is expected"), Seq(header), {}) + final def apply(header: Byte): Unit = { + checkRule() + val version = ErgoTree.getVersion(header) + if (version != 0 && !ErgoTree.hasSize(header)) { + throwValidationException( + new SigmaException(s"Invalid ErgoTreeHeader $header, size bit is expected for version $version"), + Array(header)) + } } } @@ -235,10 +267,13 @@ object ValidationRules { * checking consistency). */ object CheckCostFuncOperation extends ValidationRule(1013, "Check the opcode is allowed in cost function") with SoftForkWhenCodeAdded { - def apply[Ctx <: IRContext, T](ctx: Ctx)(opCode: OpCodeExtra)(block: => T): T = { - def msg = s"Not allowed opCode $opCode in cost function" - def args = Seq(opCode) - validate(ctx.isAllowedOpCodeInCosting(opCode), new CosterException(msg, None), args, block) + final def apply[Ctx <: IRContext, T](ctx: Ctx)(opCode: OpCodeExtra): Unit = { + checkRule() + if (!ctx.isAllowedOpCodeInCosting(opCode)) { + throwValidationException( + new CosterException(s"Not allowed opCode $opCode in cost function", None), + Array(opCode)) + } } override def isSoftFork(vs: SigmaValidationSettings, @@ -273,10 +308,14 @@ object ValidationRules { object CheckLoopLevelInCostFunction extends ValidationRule(1015, "Check that loop level is not exceeded.") with SoftForkWhenReplaced { - def apply(level: Int): Unit = { + final def apply(level: Int): Unit = { + checkRule() val max = MaxLoopLevelInCostFunction.value - validate(level <= max, - new CosterException(s"The loop level $level exceeds maximum $max", None), Seq(level), {}) + if (level > max) { + throwValidationException( + new CosterException(s"The loop level $level exceeds maximum $max", None), + Array(level)) + } } } diff --git a/src/main/scala/sigmastate/Values.scala b/src/main/scala/sigmastate/Values.scala index de458c3c23..ace5fb8e31 100644 --- a/src/main/scala/sigmastate/Values.scala +++ b/src/main/scala/sigmastate/Values.scala @@ -28,6 +28,7 @@ import special.sigma.Extensions._ import sigmastate.eval._ import sigmastate.eval.Extensions._ import sigma.util.Extensions.ByteOps +import spire.syntax.all.cfor import scala.language.implicitConversions import scala.reflect.ClassTag @@ -660,6 +661,7 @@ object Values { } } + /** @hotspot don't beautify this code */ override def parse(r: SigmaByteReader): SigmaBoolean = { val depth = r.level r.level = depth + 1 @@ -671,16 +673,25 @@ object Values { case ProveDHTupleCode => dhtSerializer.parse(r) case AndCode => val n = r.getUShort() - val children = (0 until n).map(_ => serializer.parse(r)) + val children = new Array[SigmaBoolean](n) + cfor(0)(_ < n, _ + 1) { i => + children(i) = serializer.parse(r) + } CAND(children) case OrCode => val n = r.getUShort() - val children = (0 until n).map(_ => serializer.parse(r)) + val children = new Array[SigmaBoolean](n) + cfor(0)(_ < n, _ + 1) { i => + children(i) = serializer.parse(r) + } COR(children) case AtLeastCode => val k = r.getUShort() val n = r.getUShort() - val children = (0 until n).map(_ => serializer.parse(r)) + val children = new Array[SigmaBoolean](n) + cfor(0)(_ < n, _ + 1) { i => + children(i) = serializer.parse(r) + } CTHRESHOLD(k, children) } r.level = r.level - 1 @@ -947,18 +958,33 @@ object Values { * instead of some Constant nodes. Otherwise, it may not contain placeholders. * It is possible to have both constants and placeholders in the tree, but for every placeholder * there should be a constant in `constants` array. + * @param givenComplexity structural complexity of the tree or 0 if is not specified at construction time. + * Access to this private value is provided via `complexity` property. + * In case of 0, the complexity is computed using ErgoTree deserializer, which can do this. + * When specified it should be computed as the sum of complexity values taken + * from ComplexityTable for all tree nodes. It approximates the time needed to process + * the tree by sigma compiler to obtain cost formula. Overly complex trees can thus + * be rejected even before compiler starts working. + * @param propositionBytes original bytes of this tree from which it has been deserialized. + * If null then the bytes are not provided, and will be lazily generated when `bytes` + * method is called. + * These bytes are obtained in two ways: + * 1) in the ErgoTreeSerializer from Reader + * 2) in the alternative constructor using ErgoTreeSerializer.serializeErgoTree + * */ case class ErgoTree private[sigmastate]( header: Byte, constants: IndexedSeq[Constant[SType]], root: Either[UnparsedErgoTree, SigmaPropValue], - givenComplexity: Int + private val givenComplexity: Int, + private val propositionBytes: Array[Byte] ) { def this(header: Byte, constants: IndexedSeq[Constant[SType]], root: Either[UnparsedErgoTree, SigmaPropValue]) = - this(header, constants, root, 0) + this(header, constants, root, 0, DefaultSerializer.serializeErgoTree(ErgoTree(header, constants, root, 0, null))) require(isConstantSegregation || constants.isEmpty) require(version == 0 || hasSize, s"For newer version the size bit is required: $this") @@ -969,13 +995,26 @@ object Values { @deprecated("Use toProposition instead", "v2.1") lazy val proposition: SigmaPropValue = toProposition(isConstantSegregation) - @inline def version: Byte = ErgoTree.getVersion(header) - @inline def isRightParsed: Boolean = root.isRight - @inline def isConstantSegregation: Boolean = ErgoTree.isConstantSegregation(header) - @inline def hasSize: Boolean = ErgoTree.hasSize(header) - @inline def bytes: Array[Byte] = DefaultSerializer.serializeErgoTree(this) + @inline final def version: Byte = ErgoTree.getVersion(header) + @inline final def isRightParsed: Boolean = root.isRight + @inline final def isConstantSegregation: Boolean = ErgoTree.isConstantSegregation(header) + @inline final def hasSize: Boolean = ErgoTree.hasSize(header) + + private var _bytes: Array[Byte] = propositionBytes + + /** Serialized bytes of this tree. */ + final def bytes: Array[Byte] = { + if (_bytes == null) { + _bytes = DefaultSerializer.serializeErgoTree(this) + } + _bytes + } private var _complexity: Int = givenComplexity + + /** Structural complexity estimation of this tree. + * @see ComplexityTable + */ lazy val complexity: Int = { if (_complexity == 0) { _complexity = DefaultSerializer.deserializeErgoTree(bytes).complexity @@ -1034,9 +1073,9 @@ object Values { /** Default header with constant segregation enabled. */ val ConstantSegregationHeader: Byte = (DefaultHeader | ConstantSegregationFlag).toByte - @inline def isConstantSegregation(header: Byte): Boolean = (header & ConstantSegregationFlag) != 0 - @inline def hasSize(header: Byte): Boolean = (header & SizeFlag) != 0 - @inline def getVersion(header: Byte): Byte = (header & VersionMask).toByte + @inline final def isConstantSegregation(header: Byte): Boolean = (header & ConstantSegregationFlag) != 0 + @inline final def hasSize(header: Byte): Boolean = (header & SizeFlag) != 0 + @inline final def getVersion(header: Byte): Byte = (header & VersionMask).toByte def substConstants(root: SValue, constants: IndexedSeq[Constant[SType]]): SValue = { val store = new ConstantStore(constants) diff --git a/src/main/scala/sigmastate/basics/BcDlogGroup.scala b/src/main/scala/sigmastate/basics/BcDlogGroup.scala index 8c3fc7d50a..c248788491 100644 --- a/src/main/scala/sigmastate/basics/BcDlogGroup.scala +++ b/src/main/scala/sigmastate/basics/BcDlogGroup.scala @@ -8,7 +8,7 @@ import org.bouncycastle.math.ec.custom.djb.Curve25519Point import org.bouncycastle.math.ec.custom.sec.{SecP256K1Point, SecP384R1Point, SecP521R1Point} import org.bouncycastle.math.ec.ECPoint import org.bouncycastle.util.BigIntegers - +import spire.syntax.all.cfor import scala.collection.mutable import scala.util.Try @@ -47,7 +47,7 @@ abstract class BcDlogGroup[ElemType <: ECPoint](val x9params: X9ECParameters) ex exponentiations += this.base // add the base - base^1 val two = new BigInteger("2") - (1 until 4).foreach { i => + cfor(1)(_ < 4, _ + 1) { i => exponentiations += exponentiate(exponentiations(i - 1), two) } @@ -60,7 +60,7 @@ abstract class BcDlogGroup[ElemType <: ECPoint](val x9params: X9ECParameters) ex private def prepareExponentiations(size: BigInteger): Unit = { //find log of the number - this is the index of the size-exponent in the exponentiation array val index = size.bitLength - 1 /* calculates the necessary exponentiations and put them in the exponentiations vector */ - (exponentiations.size to index).foreach { i => + cfor(exponentiations.size)(_ <= index, _ + 1) { i => exponentiations += exponentiate(exponentiations(i - 1), two) } } @@ -358,9 +358,9 @@ abstract class BcDlogGroup[ElemType <: ECPoint](val x9params: X9ECParameters) ex */ private def computeLoop(exponentiations: Array[BigInteger], w: Int, h: Int, preComp: Seq[Seq[ElemType]], result: ElemType, bitIndex: Int) = { var res = result - (0 until h).foreach { k => + cfor(0)(_ < h, _ + 1) { k => var e = 0 - (k * w until (k * w + w)).foreach { i => + cfor(k * w)(_ < (k * w + w), _ + 1) { i => if (i < exponentiations.length) { //if the bit is set, change the e value if (exponentiations(i).testBit(bitIndex)) { val twoPow = Math.pow(2, i - k * w).toInt @@ -381,9 +381,9 @@ abstract class BcDlogGroup[ElemType <: ECPoint](val x9params: X9ECParameters) ex //create the pre-computation table of size h*(2^(w)) val preComp: Seq[mutable.Seq[ElemType]] = Seq.fill(h)(mutable.Seq.fill(twoPowW)(identity)) - (0 until h).foreach { k => - (0 until twoPowW).foreach { e => - (0 until w).foreach { i => + cfor(0)(_ < h, _ + 1) { k => + cfor(0)(_ < twoPowW, _ + 1) { e => + cfor(0)(_ < w, _ + 1) { i => val baseIndex = k * w + i if (baseIndex < groupElements.length) { val base = groupElements(baseIndex) diff --git a/src/main/scala/sigmastate/eval/CostingDataContext.scala b/src/main/scala/sigmastate/eval/CostingDataContext.scala index 8149f68371..4e5641af19 100644 --- a/src/main/scala/sigmastate/eval/CostingDataContext.scala +++ b/src/main/scala/sigmastate/eval/CostingDataContext.scala @@ -16,7 +16,7 @@ import sigmastate.interpreter.{CryptoConstants, Interpreter} import special.collection.{Size, CSizeOption, SizeColl, CCostedBuilder, CollType, SizeOption, CostedBuilder, Coll} import special.sigma.{Box, _} import sigmastate.eval.Extensions._ - +import spire.syntax.all.cfor import scala.util.{Success, Failure} import scalan.RType import scorex.crypto.hash.{Digest32, Sha256, Blake2b256} @@ -198,9 +198,11 @@ case class CAvlTree(treeData: AvlTreeData) extends AvlTree with WrapperOf[AvlTre if (!isRemoveAllowed) { None } else { - val keysToRemove = operations.toArray.map(_.toArray) val bv = createVerifier(proof) - keysToRemove.foreach(key => bv.performOneOperation(Remove(ADKey @@ key))) + cfor(0)(_ < operations.length, _ + 1) { i => + val key = operations(i).toArray + bv.performOneOperation(Remove(ADKey @@ key)) + } bv.digest match { case Some(v) => Some(updateDigest(Colls.fromArray(v))) case _ => None @@ -461,8 +463,7 @@ class CCostModel extends CostModel { def PubKeySize: Long = CryptoConstants.EncodedGroupElementLength } -class CostingSigmaDslBuilder extends TestSigmaDslBuilder { - dsl => +class CostingSigmaDslBuilder extends TestSigmaDslBuilder { dsl => implicit val validationSettings = ValidationRules.currentSettings override val Costing: CostedBuilder = new CCostedBuilder { diff --git a/src/main/scala/sigmastate/eval/CostingRules.scala b/src/main/scala/sigmastate/eval/CostingRules.scala index c17cba7c42..a38d8176b8 100644 --- a/src/main/scala/sigmastate/eval/CostingRules.scala +++ b/src/main/scala/sigmastate/eval/CostingRules.scala @@ -455,7 +455,7 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => val info = obj.value.creationInfo val l = RCCostedPrim(info._1, IntZero, SizeInt) val r = RCCostedColl(info._2, HashInfo.costZeros, HashInfo.sizesColl, IntZero) - val cost = opCost(Pair(l, r), Seq(obj.cost), getRegisterCost) + val cost = opCost(Pair(l, r), Array(obj.cost), getRegisterCost) RCCostedPair(l, r, cost) } @@ -469,7 +469,7 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => implicit val elem = tT.eA val valueOpt = obj.value.getReg(i.value)(elem) val sReg = asSizeOption(sBox.getReg(downcast[Byte](i.value))(elem)) - RCCostedOption(valueOpt, SomeIntZero, sReg.sizeOpt, opCost(valueOpt, Seq(obj.cost), getRegisterCost)) + RCCostedOption(valueOpt, SomeIntZero, sReg.sizeOpt, opCost(valueOpt, Array(obj.cost), getRegisterCost)) } } diff --git a/src/main/scala/sigmastate/eval/Evaluation.scala b/src/main/scala/sigmastate/eval/Evaluation.scala index b84768f3f3..84c2895046 100644 --- a/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/src/main/scala/sigmastate/eval/Evaluation.scala @@ -332,14 +332,14 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => scope.schedule.foreach { te => te.rhs match { case op @ LoopOperation(bodyLam) => - CheckCostFuncOperation(this)(getOpCodeEx(op)) { true } + CheckCostFuncOperation(this)(getOpCodeEx(op)) val nextLevel = level + 1 CheckLoopLevelInCostFunction(nextLevel) traverseScope(bodyLam, nextLevel) case CollM.flatMap(_, Def(lam: Lambda[_,_])) => traverseScope(lam, level) // special case because the body is limited (so don't increase level) case op => - CheckCostFuncOperation(this)(getOpCodeEx(op)) { true } + CheckCostFuncOperation(this)(getOpCodeEx(op)) } } } diff --git a/src/main/scala/sigmastate/eval/RuntimeCosting.scala b/src/main/scala/sigmastate/eval/RuntimeCosting.scala index 3952c4dbbd..88f064bf1f 100644 --- a/src/main/scala/sigmastate/eval/RuntimeCosting.scala +++ b/src/main/scala/sigmastate/eval/RuntimeCosting.scala @@ -596,26 +596,26 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val zeros = colBuilder.replicate(len, IntZero) // in order to fail fast this line is computed first before the fold loop below - val preFoldCost = opCost(resV, Seq(xs.cost, zero.cost), len * CostTable.lambdaInvoke + CostTable.lambdaCost) + val preFoldCost = opCost(resV, Array(xs.cost, zero.cost), len * CostTable.lambdaInvoke + CostTable.lambdaCost) val Pair(resS, resC) = sizes.foldLeft(Pair(zero.size, preFoldCost), fun { in: Rep[((Size[b], Int), Size[a])] => val Pair(Pair(accSizeB, accCost), xSize) = in val sBA = RCSizePair(accSizeB, xSize) val size = sizeF(sBA) // unfold sizeF - val cost: Rep[Int] = opCost(size, Seq(accCost), asRep[Int](Apply(costF, Pair(IntZero, sBA), false)) + CostTable.lambdaInvoke) + val cost: Rep[Int] = opCost(size, Array(accCost), asRep[Int](Apply(costF, Pair(IntZero, sBA), false)) + CostTable.lambdaInvoke) val res = Pair(size, cost) res } ) - val cost = opCost(resV, Seq(preFoldCost), resC) + val cost = opCost(resV, Array(preFoldCost), resC) RCCostedPrim(resV, cost, resS) case CostedM.cost(Def(CCostedCollCtor(values, costs, _, accCost))) => accCost.rhs match { case _: OpCost => - opCost(values, Seq(accCost), costs.sum(intPlusMonoid)) // OpCost should be in args position + opCost(values, Array(accCost), costs.sum(intPlusMonoid)) // OpCost should be in args position case _ => opCost(values, Nil, costs.sum(intPlusMonoid) + accCost) } @@ -623,13 +623,13 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case CostedM.cost(Def(CCostedOptionCtor(v, costOpt, _, accCost))) => accCost.rhs match { case _: OpCost => - opCost(v, Seq(accCost), costOpt.getOrElse(Thunk(IntZero))) // OpCost should be in args position + opCost(v, Array(accCost), costOpt.getOrElse(Thunk(IntZero))) // OpCost should be in args position case _ => opCost(v, Nil, costOpt.getOrElse(Thunk(IntZero)) + accCost) } case CostedM.cost(Def(CCostedPairCtor(l, r, accCost))) => - val costs = Seq(l.cost, r.cost, accCost) + val costs = Array(l.cost, r.cost, accCost) val v = Pair(l.value, r.value) mkNormalizedOpCost(v, costs) @@ -725,7 +725,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val zero = IntZero val l = RCCostedPrim(p._1, zero, sPair.l) val r = RCCostedPrim(p._2, zero, sPair.r) - val newCost = if (c == zero) zero else opCost(Pair(l, r), Seq(c), zero) + val newCost = if (c == zero) zero else opCost(Pair(l, r), Array(c), zero) RCCostedPair(l, r, newCost) case _ => !!!(s"Expected RCSizePair node but was $s -> ${s.rhs}") @@ -792,7 +792,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => protected override def onReset(): Unit = { super.onReset() // WARNING: every lazy value should be listed here, otherwise bevavior after resetContext is undefined and may throw. - Seq(_sigmaDslBuilder, _colBuilder, _sizeBuilder, _costedBuilder, + Array(_sigmaDslBuilder, _colBuilder, _sizeBuilder, _costedBuilder, _monoidBuilder, _intPlusMonoid, _longPlusMonoid, _costedGlobal, _costOfProveDlog, _costOfDHTuple) .foreach(_.reset()) @@ -1067,7 +1067,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => * This is required to correctly handle tuple field accesses like `v._1` * and not to lose the cost of `v` in the cost of resulting value. */ def attachCost[T](source: RCosted[T], accCost: Rep[Int], cost: Rep[Int]): RCosted[T] = asRep[Costed[T]] { - def newCost(v: Sym, c: Rep[Int]) = opCost(v, Seq(accCost, c), cost) // put cost in dependency list + def newCost(v: Sym, c: Rep[Int]) = opCost(v, Array(accCost, c), cost) // put cost in dependency list source.elem.eVal match { case e: CollElem[a, _] => @@ -1139,7 +1139,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => xs: RCostedColl[T], condition: RCosted[T => SType#WrappedType], calcF: Rep[T => Any], accCost: Rep[Int]) = { - val args = Seq(xs.cost, condition.cost) + val args: Seq[Rep[Int]] = Array(xs.cost, condition.cost) val res = calcF.elem.eRange.asInstanceOf[Elem[_]] match { case BooleanElement => node match { @@ -1292,7 +1292,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case CreateProveDlog(In(_v)) => val vC = asRep[Costed[GroupElement]](_v) val resV: Rep[SigmaProp] = sigmaDslBuilder.proveDlog(vC.value) - val cost = opCost(resV, Seq(vC.cost), CostOfProveDlog) + val cost = opCost(resV, Array(vC.cost), CostOfProveDlog) RCCostedPrim(resV, cost, SizeSigmaProposition) case CreateProveDHTuple(In(_gv), In(_hv), In(_uv), In(_vv)) => @@ -1301,21 +1301,21 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val uvC = asRep[Costed[GroupElement]](_uv) val vvC = asRep[Costed[GroupElement]](_vv) val resV: Rep[SigmaProp] = sigmaDslBuilder.proveDHTuple(gvC.value, hvC.value, uvC.value, vvC.value) - val cost = opCost(resV, Seq(gvC.cost, hvC.cost, uvC.cost, vvC.cost), CostOfDHTuple) + val cost = opCost(resV, Array(gvC.cost, hvC.cost, uvC.cost, vvC.cost), CostOfDHTuple) RCCostedPrim(resV, cost, SizeSigmaProposition) case sigmastate.Exponentiate(In(_l), In(_r)) => val l = asRep[Costed[GroupElement]](_l) val r = asRep[Costed[BigInt]](_r) val value = l.value.exp(r.value) - val cost = opCost(value, Seq(l.cost, r.cost), costOf(node)) + val cost = opCost(value, Array(l.cost, r.cost), costOf(node)) RCCostedPrim(value, cost, SizeGroupElement) case sigmastate.MultiplyGroup(In(_l), In(_r)) => val l = asRep[Costed[GroupElement]](_l) val r = asRep[Costed[GroupElement]](_r) val value = l.value.multiply(r.value) - val cost = opCost(value, Seq(l.cost, r.cost), costOf(node)) + val cost = opCost(value, Array(l.cost, r.cost), costOf(node)) RCCostedPrim(value, cost, SizeGroupElement) case Values.GroupGenerator => @@ -1326,13 +1326,13 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val arr = arrC.value val value = sigmaDslBuilder.byteArrayToBigInt(arr) val size = arrC.size.dataSize - val cost = opCost(value, Seq(arrC.cost), costOf(node) + costOf("new_BigInteger_per_item", node.opType) * size.toInt) + val cost = opCost(value, Array(arrC.cost), costOf(node) + costOf("new_BigInteger_per_item", node.opType) * size.toInt) RCCostedPrim(value, cost, SizeBigInt) case sigmastate.LongToByteArray(In(_x)) => val xC = asRep[Costed[Long]](_x) val col = sigmaDslBuilder.longToByteArray(xC.value) // below we assume col.length == typeSize[Long] - val cost = opCost(col, Seq(xC.cost), costOf(node)) + val cost = opCost(col, Array(xC.cost), costOf(node)) LongBytesInfo.mkCostedColl(col, cost) // opt.get => @@ -1345,11 +1345,11 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => // opt.getOrElse => case utxo.OptionGetOrElse(In(_opt), In(_default)) => - OptionCoster(_opt, SOption.GetOrElseMethod, Seq(_default)) + OptionCoster(_opt, SOption.GetOrElseMethod, Array(_default)) case SelectField(In(_tup), fieldIndex) => val eTuple = _tup.elem.eVal.asInstanceOf[Elem[_]] - CheckTupleType(IR)(eTuple) {} + CheckTupleType(IR)(eTuple) eTuple match { case pe: PairElem[a,b] => assert(fieldIndex == 1 || fieldIndex == 2, s"Invalid field index $fieldIndex of the pair ${_tup}: $pe") @@ -1373,7 +1373,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case Values.Tuple(InSeq(Seq(x, y))) => val v = Pair(x, y) - val costs = Seq(x.cost, y.cost, CostTable.newPairValueCost: Rep[Int]) + val costs = Array(x.cost, y.cost, CostTable.newPairValueCost: Rep[Int]) val c = mkNormalizedOpCost(v, costs) RCCostedPair(x, y, c) @@ -1407,7 +1407,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case MapCollection(input, sfunc) => val inputC = evalNode(ctx, env, input) val mapper = evalNode(ctx, env, sfunc) - val res = CollCoster(inputC, SCollection.MapMethod, Seq(mapper)) + val res = CollCoster(inputC, SCollection.MapMethod, Array(mapper)) res case Fold(input, zero, sfunc) => @@ -1445,7 +1445,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val vals = inputC.values.slice(f, u) val costs = inputC.costs val sizes = inputC.sizes - RCCostedColl(vals, costs, sizes, opCost(vals, Seq(inputC.valuesCost), costOf(op))) + RCCostedColl(vals, costs, sizes, opCost(vals, Array(inputC.valuesCost), costOf(op))) case Append(In(_col1), In(_col2)) => val col1 = asRep[CostedColl[Any]](_col1) @@ -1453,12 +1453,12 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val values = col1.values.append(col2.values) val costs = col1.costs.append(col2.costs) val sizes = col1.sizes.append(col2.sizes) - RCCostedColl(values, costs, sizes, opCost(values, Seq(col1.cost, col2.cost), costOf(node))) + RCCostedColl(values, costs, sizes, opCost(values, Array(col1.cost, col2.cost), costOf(node))) case Filter(input, p) => val inputC = evalNode(ctx, env, input) val pC = evalNode(ctx, env, p) - val res = CollCoster(inputC, SCollection.FilterMethod, Seq(pC)) + val res = CollCoster(inputC, SCollection.FilterMethod, Array(pC)) res case Terms.Apply(f, Seq(x)) if f.tpe.isFunc => @@ -1485,7 +1485,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val sizeF = fC.sliceSize val value = xC.value val y: Rep[Any] = Apply(calcF, value, false) - val c: Rep[Int] = opCost(y, Seq(fC.cost, xC.cost), asRep[Int](Apply(costF, Pair(IntZero, xC.size), false)) + CostTable.lambdaInvoke) + val c: Rep[Int] = opCost(y, Array(fC.cost, xC.cost), asRep[Int](Apply(costF, Pair(IntZero, xC.size), false)) + CostTable.lambdaInvoke) val s: Rep[Size[Any]]= Apply(sizeF, xC.size, false) RCCostedPrim(y, c, s) } @@ -1496,12 +1496,12 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case CalcBlake2b256(In(input)) => val bytesC = asRep[Costed[Coll[Byte]]](input) val res = sigmaDslBuilder.blake2b256(bytesC.value) - val cost = opCost(res, Seq(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) + val cost = opCost(res, Array(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) HashInfo.mkCostedColl(res, cost) case CalcSha256(In(input)) => val bytesC = asRep[Costed[Coll[Byte]]](input) val res = sigmaDslBuilder.sha256(bytesC.value) - val cost = opCost(res, Seq(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) + val cost = opCost(res, Array(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) HashInfo.mkCostedColl(res, cost) case utxo.SizeOf(In(xs)) => @@ -1509,15 +1509,15 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case ce: CollElem[a,_] => val xsC = asRep[Costed[Coll[a]]](xs) val v = xsC.value.length - RCCostedPrim(v, opCost(v, Seq(xsC.cost), costOf(node)), SizeInt) + RCCostedPrim(v, opCost(v, Array(xsC.cost), costOf(node)), SizeInt) case se: StructElem[_] => val xsC = asRep[Costed[Struct]](xs) val v = se.fields.length - RCCostedPrim(v, opCost(v, Seq(xsC.cost), costOf(node)), SizeInt) + RCCostedPrim(v, opCost(v, Array(xsC.cost), costOf(node)), SizeInt) case pe: PairElem[a,b] => val xsC = asRep[Costed[(a,b)]](xs) val v: Rep[Int] = 2 - RCCostedPrim(v, opCost(v, Seq(xsC.cost), costOf(node)), SizeInt) + RCCostedPrim(v, opCost(v, Array(xsC.cost), costOf(node)), SizeInt) } case ByIndex(xs, i, defaultOpt) => @@ -1526,64 +1526,65 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val iV = iC.value val size = if (xs.tpe.elemType.isConstantSize) constantTypeSize(xsC.elem.eItem) - else - CheckIsSupportedIndexExpression(IR)(xs, i, iV) { - xsC.sizes(iV) - } + else { + CheckIsSupportedIndexExpression(IR)(xs, i, iV) + xsC.sizes(iV) + } + defaultOpt match { case Some(defaultValue) => val defaultC = asRep[Costed[Any]](eval(defaultValue)) val default = defaultC.value val value = xsC.value.getOrElse(iV, default) - val cost = opCost(value, Seq(xsC.cost, iC.cost, defaultC.cost), costOf(node)) + val cost = opCost(value, Array(xsC.cost, iC.cost, defaultC.cost), costOf(node)) RCCostedPrim(value, cost, size) case None => val value = xsC.value(iV) - RCCostedPrim(value, opCost(value, Seq(xsC.cost, iC.cost), costOf(node)), size) + RCCostedPrim(value, opCost(value, Array(xsC.cost, iC.cost), costOf(node)), size) } case SigmaPropIsProven(p) => val pC = asRep[Costed[SigmaProp]](eval(p)) val v = pC.value.isValid - val c = opCost(v, Seq(pC.cost), costOf(node)) + val c = opCost(v, Array(pC.cost), costOf(node)) RCCostedPrim(v, c, SizeBoolean) case SigmaPropBytes(p) => val pC = asRep[Costed[SigmaProp]](eval(p)) val v = pC.value.propBytes - SigmaPropBytesInfo.mkCostedColl(v, opCost(v, Seq(pC.cost), costOf(node))) + SigmaPropBytesInfo.mkCostedColl(v, opCost(v, Array(pC.cost), costOf(node))) case utxo.ExtractId(In(box)) => val boxC = asRep[Costed[Box]](box) val id = boxC.value.id - HashInfo.mkCostedColl(id, opCost(id, Seq(boxC.cost), costOf(node))) + HashInfo.mkCostedColl(id, opCost(id, Array(boxC.cost), costOf(node))) case utxo.ExtractBytesWithNoRef(In(box)) => val boxC = asRep[Costed[Box]](box) val v = boxC.value.bytesWithoutRef - BoxBytesWithoutRefsInfo.mkCostedColl(v, opCost(v, Seq(boxC.cost), costOf(node))) + BoxBytesWithoutRefsInfo.mkCostedColl(v, opCost(v, Array(boxC.cost), costOf(node))) case utxo.ExtractAmount(In(box)) => val boxC = asRep[Costed[Box]](box) val v = boxC.value.value - val c = opCost(v, Seq(boxC.cost), costOf(node)) + val c = opCost(v, Array(boxC.cost), costOf(node)) RCCostedPrim(v, c, SizeLong) case utxo.ExtractScriptBytes(In(box)) => val boxC = asRep[Costed[Box]](box) val bytes = boxC.value.propositionBytes - BoxPropositionBytesInfo.mkCostedColl(bytes, opCost(bytes, Seq(boxC.cost), costOf(node))) + BoxPropositionBytesInfo.mkCostedColl(bytes, opCost(bytes, Array(boxC.cost), costOf(node))) case utxo.ExtractBytes(In(box)) => val boxC = asRep[Costed[Box]](box) val bytes = boxC.value.bytes - BoxBytesInfo.mkCostedColl(bytes, opCost(bytes, Seq(boxC.cost), costOf(node))) + BoxBytesInfo.mkCostedColl(bytes, opCost(bytes, Array(boxC.cost), costOf(node))) case utxo.ExtractCreationInfo(In(box)) => BoxCoster(box, SBox.creationInfoMethod, Nil) case utxo.ExtractRegisterAs(In(box), regId, optTpe) => implicit val elem = stypeToElem(optTpe.elemType).asElem[Any] val i: RCosted[Int] = RCCostedPrim(regId.number.toInt, IntZero, SizeInt) - BoxCoster(box, SBox.getRegMethod, Seq(i), Seq(liftElem(elem))) + BoxCoster(box, SBox.getRegMethod, Array(i), Array(liftElem(elem))) case BoolToSigmaProp(bool) => val boolC = eval(bool) val value = sigmaDslBuilder.sigmaProp(boolC.value) - val cost = opCost(value, Seq(boolC.cost), costOf(node)) + val cost = opCost(value, Array(boolC.cost), costOf(node)) RCCostedPrim(value, cost, SizeSigmaProposition) case AtLeast(bound, input) => @@ -1595,7 +1596,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => } val boundC = eval(bound) val res = sigmaDslBuilder.atLeast(boundC.value, inputC.values) - val cost = opCost(res, Seq(boundC.cost, inputC.cost), costOf(node)) + val cost = opCost(res, Array(boundC.cost, inputC.cost), costOf(node)) RCCostedPrim(res, cost, SizeSigmaProposition) case op: ArithOp[t] if op.tpe == SBigInt => @@ -1621,7 +1622,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => v = xC.value.max(yC.value) case code => error(s"Cannot perform Costing.evalNode($op): unknown opCode ${code}", op.sourceContext.toOption) } - val c = opCost(v, Seq(xC.cost, yC.cost), costOf(op)) + val c = opCost(v, Array(xC.cost, yC.cost), costOf(op)) RCCostedPrim(v, c, SizeBigInt) case op: ArithOp[t] => @@ -1632,18 +1633,18 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val y = evalNode(ctx, env, op.right) (x, y) match { case (x: RCosted[a], y: RCosted[b]) => val v = ApplyBinOp(binop, x.value, y.value) - withConstantSize(v, opCost(v, Seq(x.cost, y.cost), costOf(op))) + withConstantSize(v, opCost(v, Array(x.cost, y.cost), costOf(op))) } case LogicalNot(input) => val inputC = evalNode(ctx, env, input) val v = ApplyUnOp(Not, inputC.value) - withConstantSize(v, opCost(v, Seq(inputC.cost), costOf(node))) + withConstantSize(v, opCost(v, Array(inputC.cost), costOf(node))) case ModQ(input) => val inputC = asRep[Costed[BigInt]](eval(input)) val v = inputC.value.modQ - RCCostedPrim(v, opCost(v, Seq(inputC.cost), costOf(node)), SizeBigInt) + RCCostedPrim(v, opCost(v, Array(inputC.cost), costOf(node)), SizeBigInt) case ModQArithOp(l, r, code) => val lC = asRep[Costed[BigInt]](eval(l)) @@ -1653,7 +1654,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case OpCodes.MinusModQCode => lC.value.minusModQ(rC.value) case code => error(s"unknown code for modular arithmetic op: $code") } - RCCostedPrim(v, opCost(v, Seq(lC.cost, rC.cost), costOf(node)), SizeBigInt) + RCCostedPrim(v, opCost(v, Array(lC.cost, rC.cost), costOf(node)), SizeBigInt) case OR(input) => input match { case ConcreteCollection(items, tpe) => @@ -1667,7 +1668,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val inputC = asRep[CostedColl[Boolean]](eval(input)) val res = sigmaDslBuilder.anyOf(inputC.value) val nOps = inputC.sizes.length - 1 - val cost = opCost(res, Seq(inputC.cost), perItemCostOf(node, nOps)) + val cost = opCost(res, Array(inputC.cost), perItemCostOf(node, nOps)) withConstantSize(res, cost) } @@ -1683,7 +1684,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val inputC = tryCast[CostedColl[Boolean]](eval(input)) val res = sigmaDslBuilder.allOf(inputC.value) val nOps = inputC.sizes.length - 1 - val cost = opCost(res, Seq(inputC.cost), perItemCostOf(node, nOps)) + val cost = opCost(res, Array(inputC.cost), perItemCostOf(node, nOps)) withConstantSize(res, cost) } @@ -1699,7 +1700,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val inputC = tryCast[CostedColl[Boolean]](eval(input)) val res = sigmaDslBuilder.xorOf(inputC.value) val nOps = inputC.sizes.length - 1 - val cost = opCost(res, Seq(inputC.cost), perItemCostOf(node, nOps)) + val cost = opCost(res, Array(inputC.cost), perItemCostOf(node, nOps)) withConstantSize(res, cost) } @@ -1707,21 +1708,21 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val lC = evalNode(ctx, env, l) val rC = RCostedThunk(Thunk(evalNode(ctx, env, r)), IntZero) val v = Or.applyLazy(lC.value, rC.value) - val c = opCost(v, Seq(lC.cost, rC.cost), costOf(node)) + val c = opCost(v, Array(lC.cost, rC.cost), costOf(node)) withConstantSize(v, c) case BinAnd(l, r) => val lC = evalNode(ctx, env, l) val rC = RCostedThunk(Thunk(evalNode(ctx, env, r)), IntZero) val v = And.applyLazy(lC.value, rC.value) - val c = opCost(v, Seq(lC.cost, rC.cost), costOf(node)) + val c = opCost(v, Array(lC.cost, rC.cost), costOf(node)) withConstantSize(v, c) case BinXor(l, r) => val lC = evalNode(ctx, env, l) val rC = evalNode(ctx, env, r) val v = BinaryXorOp.apply(lC.value, rC.value) - val c = opCost(v, Seq(lC.cost, rC.cost), costOf(node)) + val c = opCost(v, Array(lC.cost, rC.cost), costOf(node)) withConstantSize(v, c) case neg: Negation[SNumericType]@unchecked => @@ -1731,7 +1732,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val inputC = evalNode(ctx, env, neg.input) inputC match { case x: RCosted[a] => val v = ApplyUnOp(op, x.value) - withConstantSize(v, opCost(v, Seq(x.cost), costOf(neg))) + withConstantSize(v, opCost(v, Array(x.cost), costOf(neg))) } case SigmaAnd(items) => @@ -1753,7 +1754,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => def tC = evalNode(ctx, env, t) def eC = evalNode(ctx, env, e) val resV = IF (cC.value) THEN tC.value ELSE eC.value - val resCost = opCost(resV, Seq(cC.cost, tC.cost, eC.cost), costOf("If", SFunc(Vector(SBoolean, If.tT, If.tT), If.tT))) + val resCost = opCost(resV, Array(cC.cost, tC.cost, eC.cost), costOf("If", SFunc(Vector(SBoolean, If.tT, If.tT), If.tT))) RCCostedPrim(resV, resCost, tC.size) // TODO costing: implement tC.size max eC.size case rel: Relation[t, _] => @@ -1770,9 +1771,9 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => costOf(rel.opName, SBigInt.RelationOpType) } else costOf(rel) - opCost(value, Seq(x.cost, y.cost), opcost) + opCost(value, Array(x.cost, y.cost), opcost) } - else opCost(value, Seq(x.cost, y.cost), perKbCostOf(node, x.size.dataSize + y.size.dataSize)) + else opCost(value, Array(x.cost, y.cost), perKbCostOf(node, x.size.dataSize + y.size.dataSize)) val res = withConstantSize(value, cost) res } @@ -1823,17 +1824,17 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case sigmastate.Upcast(In(inputC), tpe) => val elem = stypeToElem(tpe.asNumType) val res = upcast(inputC.value)(elem) - withConstantSize(res, opCost(res, Seq(inputC.cost), costOf(node))) + withConstantSize(res, opCost(res, Array(inputC.cost), costOf(node))) case sigmastate.Downcast(In(inputC), tpe) => val elem = stypeToElem(tpe.asNumType) val res = downcast(inputC.value)(elem) - withConstantSize(res, opCost(res, Seq(inputC.cost), costOf(node))) + withConstantSize(res, opCost(res, Array(inputC.cost), costOf(node))) case ByteArrayToLong(In(arr)) => val arrC = asRep[Costed[Coll[Byte]]](arr) val value = sigmaDslBuilder.byteArrayToLong(arrC.value) - val cost = opCost(value, Seq(arrC.cost), costOf(node)) + val cost = opCost(value, Array(arrC.cost), costOf(node)) RCCostedPrim(value, cost, SizeLong) case Xor(InCollByte(l), InCollByte(r)) => @@ -1841,18 +1842,18 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => val sizes = r.sizes val len = sizes.length val costs = colBuilder.replicate(len, IntZero) - val cost = opCost(values, Seq(l.cost, r.cost), perKbCostOf(node, len.toLong)) + val cost = opCost(values, Array(l.cost, r.cost), perKbCostOf(node, len.toLong)) RCCostedColl(values, costs, sizes, cost) case SubstConstants(InCollByte(bytes), InCollInt(positions), InCollAny(newValues)) => val values = sigmaDslBuilder.substConstants(bytes.values, positions.values, newValues.values)(AnyElement) val len = bytes.size.dataSize + newValues.size.dataSize - val cost = opCost(values, Seq(bytes.cost, positions.cost, newValues.cost), perKbCostOf(node, len)) + val cost = opCost(values, Array(bytes.cost, positions.cost, newValues.cost), perKbCostOf(node, len)) mkCostedColl(values, len.toInt, cost) case DecodePoint(InCollByte(bytes)) => val res = sigmaDslBuilder.decodePoint(bytes.values) - RCCostedPrim(res, opCost(res, Seq(bytes.cost), costOf(node)), SizeGroupElement) + RCCostedPrim(res, opCost(res, Array(bytes.cost), costOf(node)), SizeGroupElement) // fallback rule for MethodCall, should be the last case in the list case Terms.MethodCall(obj, method, args, typeSubst) if method.objType.coster.isDefined => @@ -1901,7 +1902,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => try { assert(ruleStack.isEmpty) fun { ctxC: RCosted[Context] => - val env = envVals.mapValues(v => evalNode(ctxC, Map(), v)) + val env = envVals.mapValues(v => evalNode(ctxC, Map.empty, v)) val res = asCosted[T](evalNode(ctxC, env, tree)) res } diff --git a/src/main/scala/sigmastate/eval/TreeBuilding.scala b/src/main/scala/sigmastate/eval/TreeBuilding.scala index 9f0924437f..70a733d4a6 100644 --- a/src/main/scala/sigmastate/eval/TreeBuilding.scala +++ b/src/main/scala/sigmastate/eval/TreeBuilding.scala @@ -439,7 +439,7 @@ trait TreeBuilding extends RuntimeCosting { IR: IRContext => { val rhs = buildValue(ctx, mainG, curEnv, s, curId, constantsProcessing) curId += 1 - val vd = ValDef(curId, Seq(), rhs) + val vd = ValDef(curId, Nil, rhs) curEnv = curEnv + (s -> (curId, vd.tpe)) // assign valId to s, so it can be use in ValUse valdefs += vd } @@ -454,7 +454,7 @@ trait TreeBuilding extends RuntimeCosting { IR: IRContext => constantsProcessing: Option[ConstantStore] = None): Value[T] = { val Def(Lambda(lam,_,_,_)) = f val mainG = new PGraph(lam.y) - val block = processAstGraph(asRep[Context](lam.x), mainG, Map(), mainG, 0, constantsProcessing) + val block = processAstGraph(asRep[Context](lam.x), mainG, Map.empty, mainG, 0, constantsProcessing) block.asValue[T] } } diff --git a/src/main/scala/sigmastate/interpreter/Interpreter.scala b/src/main/scala/sigmastate/interpreter/Interpreter.scala index 21696f8306..91690f502f 100644 --- a/src/main/scala/sigmastate/interpreter/Interpreter.scala +++ b/src/main/scala/sigmastate/interpreter/Interpreter.scala @@ -58,9 +58,8 @@ trait Interpreter extends ScorexLogging { val (ctx1, script) = deserializeMeasured(context, scriptBytes) updateContext(ctx1) - CheckDeserializedScriptType(d, script) { - Some(script) - } + CheckDeserializedScriptType(d, script) + Some(script) case _ => None } else @@ -140,7 +139,7 @@ trait Interpreter extends ScorexLogging { val costF = costingRes.costF IR.onCostingResult(env, exp, costingRes) - CheckCostFunc(IR)(asRep[Any => Int](costF)) { } + CheckCostFunc(IR)(asRep[Any => Int](costF)) val costingCtx = context.toSigmaContext(IR, isCost = true) val estimatedCost = IR.checkCostWithContext(costingCtx, exp, costF, maxCost, initCost) @@ -150,7 +149,7 @@ trait Interpreter extends ScorexLogging { // check calc val calcF = costingRes.calcF - CheckCalcFunc(IR)(calcF) { } + CheckCalcFunc(IR)(calcF) val calcCtx = context.toSigmaContext(IR, isCost = false) val res = calcResult(calcCtx, calcF) SigmaDsl.toSigmaBoolean(res) -> estimatedCost @@ -241,7 +240,7 @@ trait Interpreter extends ScorexLogging { checkingResult -> cost }) if (outputComputedResults) { - res.foreach { case (ok, cost) => + res.foreach { case (_, cost) => val scaledCost = cost * 1 // this is the scale factor of CostModel with respect to the concrete hardware val timeMicro = t * 1000 // time in microseconds val error = if (scaledCost > timeMicro) { diff --git a/src/main/scala/sigmastate/lang/SigmaBuilder.scala b/src/main/scala/sigmastate/lang/SigmaBuilder.scala index eeb030796e..8817828247 100644 --- a/src/main/scala/sigmastate/lang/SigmaBuilder.scala +++ b/src/main/scala/sigmastate/lang/SigmaBuilder.scala @@ -28,7 +28,7 @@ import special.sigma.{AvlTree, GroupElement, SigmaProp} import sigmastate.lang.SigmaTyper.STypeSubst import sigmastate.serialization.OpCodes.OpCode import special.sigma.{GroupElement, SigmaProp} - +import spire.syntax.all.cfor import scala.util.DynamicVariable trait SigmaBuilder { @@ -651,13 +651,19 @@ class StdSigmaBuilder extends SigmaBuilder { trait TypeConstraintCheck { + /** @hotspot called during script deserialization (don't beautify this code) + * @consensus + */ def check2[T <: SType](left: Value[T], right: Value[T], - constraints: Seq[TypeConstraint2]): Unit = - constraints.foreach { c => + constraints: Seq[TypeConstraint2]): Unit = { + val n = constraints.length + cfor(0)(_ < n, _ + 1) { i => + val c = constraints(i) // to be efficient constraints should be WrappedArray (not List) if (!c(left.tpe, right.tpe)) throw new ConstraintFailed(s"Failed constraint $c for binary operation parameters ($left(tpe: ${left.tpe}), $right(tpe: ${right.tpe}))") } + } } trait TransformingSigmaBuilder extends StdSigmaBuilder with TypeConstraintCheck { @@ -677,16 +683,16 @@ trait TransformingSigmaBuilder extends StdSigmaBuilder with TypeConstraintCheck right: Value[T], cons: (Value[T], Value[T]) => R): R = { val (l, r) = applyUpcast(left, right) - check2(l, r, Seq(sameType2)) + check2(l, r, Array(sameType2)) cons(l, r) } override protected def comparisonOp[T <: SType, R](left: Value[T], right: Value[T], cons: (Value[T], Value[T]) => R): R = { - check2(left, right, Seq(onlyNumeric2)) + check2(left, right, Array(onlyNumeric2)) val (l, r) = applyUpcast(left, right) - check2(l, r, Seq(sameType2)) + check2(l, r, Array(sameType2)) cons(l, r) } @@ -703,21 +709,21 @@ trait CheckingSigmaBuilder extends StdSigmaBuilder with TypeConstraintCheck { override protected def equalityOp[T <: SType, R](left: Value[T], right: Value[T], cons: (Value[T], Value[T]) => R): R = { - check2(left, right, Seq(sameType2)) + check2(left, right, Array(sameType2)) cons(left, right) } override protected def comparisonOp[T <: SType, R](left: Value[T], right: Value[T], cons: (Value[T], Value[T]) => R): R = { - check2(left, right, Seq(onlyNumeric2, sameType2)) + check2(left, right, Array(onlyNumeric2, sameType2)) cons(left, right) } override protected def arithOp[T <: SNumericType, R](left: Value[T], right: Value[T], cons: (Value[T], Value[T]) => R): R = { - check2(left, right, Seq(sameType2)) + check2(left, right, Array(sameType2)) cons(left, right) } } diff --git a/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala b/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala index 7da4caa470..34fe32bfaa 100644 --- a/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala +++ b/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala @@ -33,6 +33,6 @@ class CosterException(message: String, source: Option[SourceContext], cause: Opt class InterpreterException(message: String, source: Option[SourceContext] = None, cause: Option[Throwable] = None) extends SigmaException(message, source, cause) -class CostLimitException(estimatedCost: Long, message: String, cause: Option[Throwable] = None) +class CostLimitException(val estimatedCost: Long, message: String, cause: Option[Throwable] = None) extends SigmaException(message, None, cause) diff --git a/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala b/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala index 3b5a092094..03b967d664 100644 --- a/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala @@ -4,6 +4,7 @@ import sigmastate.{SCollection, SBoolean, ArgInfo} import sigmastate.Values._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import SigmaByteWriter._ +import spire.syntax.all.cfor case class ConcreteCollectionBooleanConstantSerializer(cons: (IndexedSeq[Value[SBoolean.type]], SBoolean.type) => Value[SCollection[SBoolean.type]]) extends ValueSerializer[ConcreteCollection[SBoolean.type]] { @@ -19,9 +20,14 @@ case class ConcreteCollectionBooleanConstantSerializer(cons: (IndexedSeq[Value[S maxBitsInfo("bits", 0x1FFF, "Boolean values encoded as as bits (right most byte is zero-padded on the right)")) } + /** @hotspot don't beautify this code */ override def parse(r: SigmaByteReader): Value[SCollection[SBoolean.type]] = { - val size = r.getUShort() - val booleanConstants = r.getBits(size).map(v => BooleanConstant.fromBoolean(v)) - cons(booleanConstants, SBoolean) + val size = r.getUShort() // READ + val bits = r.getBits(size) // READ + val items = new Array[BoolValue](size) + cfor(0)(_ < size, _ + 1) { i => + items(i) = BooleanConstant.fromBoolean(bits(i)) + } + cons(items, SBoolean) } } diff --git a/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala b/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala index cc5d86adba..5449e717f4 100644 --- a/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala @@ -4,6 +4,7 @@ import sigmastate.{SCollection, SType, ArgInfo} import sigmastate.Values._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ +import spire.syntax.all.cfor case class ConcreteCollectionSerializer(cons: (IndexedSeq[Value[SType]], SType) => Value[SCollection[SType]]) extends ValueSerializer[ConcreteCollection[_ <: SType]] { @@ -15,10 +16,14 @@ case class ConcreteCollectionSerializer(cons: (IndexedSeq[Value[SType]], SType) foreach("numItems", cc.items)(w.putValue(_, ArgInfo("item_i", "expression in i-th position"))) } + /** @hotspot don't beautify this code */ override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { - val size = r.getUShort() - val tItem = r.getType() - val values = (1 to size).map(_ => r.getValue()) + val size = r.getUShort() // READ + val tItem = r.getType() // READ + val values = new Array[SValue](size) + cfor(0)(_ < size, _ + 1) { i => + values(i) = r.getValue() // READ + } assert(values.forall(_.tpe == tItem), s"Invalid type of collection value in $values") cons(values, tItem) } diff --git a/src/main/scala/sigmastate/serialization/ConstantStore.scala b/src/main/scala/sigmastate/serialization/ConstantStore.scala index 7ad509c94e..70e9bbf8a6 100644 --- a/src/main/scala/sigmastate/serialization/ConstantStore.scala +++ b/src/main/scala/sigmastate/serialization/ConstantStore.scala @@ -3,22 +3,21 @@ package sigmastate.serialization import sigmastate.SType import sigmastate.Values.{Constant, ConstantNode, ConstantPlaceholder} import sigmastate.lang.SigmaBuilder +import debox.Buffer -import scala.collection.mutable.ArrayBuffer +/** @hotspot used in deserialization (don't beautify this code) */ +class ConstantStore(private val constants: IndexedSeq[Constant[SType]] = Array[Constant[SType]]()) { -class ConstantStore(private val constants: IndexedSeq[Constant[SType]] = IndexedSeq()) { - - private val store: ArrayBuffer[Constant[SType]] = new ArrayBuffer[Constant[SType]]() - store ++= constants + private val store: Buffer[Constant[SType]] = Buffer.fromIterable(constants) def put[T <: SType](c: Constant[T])(implicit builder: SigmaBuilder): ConstantPlaceholder[T] = { store += c.asInstanceOf[Constant[SType]] val tpe = c.asInstanceOf[ConstantNode[T]].tpe - builder.mkConstantPlaceholder[tpe.type](store.size - 1, tpe) + builder.mkConstantPlaceholder[tpe.type](store.length - 1, tpe) .asInstanceOf[sigmastate.Values.ConstantPlaceholder[T]] } - def get(index: Int): Constant[SType] = store(index) + @inline final def get(index: Int): Constant[SType] = store(index) - def getAll: IndexedSeq[Constant[SType]] = store.toIndexedSeq + @inline final def getAll: IndexedSeq[Constant[SType]] = store.toArray() } diff --git a/src/main/scala/sigmastate/serialization/DataSerializer.scala b/src/main/scala/sigmastate/serialization/DataSerializer.scala index 479dd92a46..d409193fea 100644 --- a/src/main/scala/sigmastate/serialization/DataSerializer.scala +++ b/src/main/scala/sigmastate/serialization/DataSerializer.scala @@ -13,7 +13,7 @@ import sigmastate.eval.{Evaluation, _} import sigmastate.lang.exceptions.SerializerException import special.collection._ import special.sigma._ - +import spire.syntax.all.cfor import scala.collection.mutable /** This works in tandem with ConstantSerializer, if you change one make sure to check the other.*/ @@ -49,15 +49,19 @@ object DataSerializer { case SAvlTree => AvlTreeData.serializer.serialize(avlTreeToAvlTreeData(v.asInstanceOf[AvlTree]), w) case tColl: SCollectionType[a] => - val arr = v.asInstanceOf[tColl.WrappedType] - w.putUShort(arr.length) + val coll = v.asInstanceOf[tColl.WrappedType] + w.putUShort(coll.length) tColl.elemType match { case SBoolean => - w.putBits(arr.asInstanceOf[Coll[Boolean]].toArray) + w.putBits(coll.asInstanceOf[Coll[Boolean]].toArray) case SByte => - w.putBytes(arr.asInstanceOf[Coll[Byte]].toArray) + w.putBytes(coll.asInstanceOf[Coll[Byte]].toArray) case _ => - arr.toArray.foreach(x => serialize(x, tColl.elemType, w)) + val arr = coll.toArray + cfor(0)(_ < arr.length, _ + 1) { i => + val x = arr(i) + serialize(x, tColl.elemType, w) + } } case t: STuple => @@ -118,9 +122,8 @@ object DataSerializer { val coll = Colls.fromArray(arr)(RType.AnyType) Evaluation.toDslTuple(coll, tuple) case t => - CheckSerializableTypeCode(t.typeCode) { - throw new SerializerException(s"Not defined DataSerializer for type $t") - } + CheckSerializableTypeCode(t.typeCode) + throw new SerializerException(s"Not defined DataSerializer for type $t") }).asInstanceOf[T#WrappedType] r.level = r.level - 1 res diff --git a/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala b/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala index 8bf824e1dd..f49f7dd61c 100644 --- a/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala @@ -11,8 +11,7 @@ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigma.util.Extensions._ import sigmastate.Values.ErgoTree.EmptyConstants import sigmastate.utxo.ComplexityTable - -import scala.collection.mutable +import spire.syntax.all.cfor /** * Rationale for soft-forkable ErgoTree serialization. @@ -149,12 +148,19 @@ class ErgoTreeSerializer { r.constantStore = new ConstantStore(cs) val root = ValueSerializer.deserialize(r) - if (checkType) - CheckDeserializedScriptIsSigmaProp(root) {} + if (checkType) { + CheckDeserializedScriptIsSigmaProp(root) + } r.constantStore = previousConstantStore val complexity = r.complexity - new ErgoTree(h, cs, Right(root.asSigmaProp), complexity) + + // now we know the end position of propositionBytes, read them all at once into array + val treeSize = r.position - startPos + r.position = startPos + val propositionBytes = r.getBytes(treeSize) + + new ErgoTree(h, cs, Right(root.asSigmaProp), complexity, propositionBytes) } catch { case e: InputSizeLimitExceeded => @@ -169,7 +175,7 @@ class ErgoTreeSerializer { r.position = startPos val bytes = r.getBytes(numBytes) val complexity = ComplexityTable.OpCodeComplexity(Constant.opCode) - new ErgoTree(ErgoTree.DefaultHeader, EmptyConstants, Left(UnparsedErgoTree(bytes, ve)), complexity) + new ErgoTree(ErgoTree.DefaultHeader, EmptyConstants, Left(UnparsedErgoTree(bytes, ve)), complexity, bytes) case None => throw new SerializerException( s"Cannot handle ValidationException, ErgoTree serialized without size bit.", None, Some(ve)) @@ -194,16 +200,19 @@ class ErgoTreeSerializer { (header, sizeOpt) } - /** Deserialize constants section only. */ + private val constantSerializer = ConstantSerializer(DeserializationSigmaBuilder) + + /** Deserialize constants section only. + * @hotspot don't beautify this code + */ private def deserializeConstants(header: Byte, r: SigmaByteReader): Array[Constant[SType]] = { val constants = if (ErgoTree.isConstantSegregation(header)) { - val constantSerializer = ConstantSerializer(DeserializationSigmaBuilder) val nConsts = r.getUInt().toInt - val builder = mutable.ArrayBuilder.make[Constant[SType]]() - for (_ <- 0 until nConsts) { - builder += constantSerializer.deserialize(r) + val res = new Array[Constant[SType]](nConsts) + cfor(0)(_ < nConsts, _ + 1) { i => + res(i) = constantSerializer.deserialize(r) } - builder.result + res } else Array.empty[Constant[SType]] diff --git a/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala b/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala index ac7e5bc196..acf13b5078 100644 --- a/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala +++ b/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala @@ -6,6 +6,7 @@ import sigmastate.lang.SigmaTyper.STypeSubst import sigmastate.lang.Terms.MethodCall import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.ComplexityTable +import spire.syntax.all.cfor case class MethodCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value[SType]], STypeSubst) => Value[SType]) extends ValueSerializer[MethodCall] { @@ -31,6 +32,7 @@ case class MethodCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value[S * This is limitation of MethodCall, because we cannot use it to represent for example * def Box.getReg[T](id: Int): Option[T], which require serialization of expected type `T` * However it can be implemented using separate node type (new type code) and can be added via soft-fork. + * @hotspot don't beautify this code */ override def parse(r: SigmaByteReader): Value[SType] = { val typeId = r.getByte() @@ -40,7 +42,12 @@ case class MethodCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value[S val method = SMethod.fromIds(typeId, methodId) val complexity = ComplexityTable.MethodCallComplexity.getOrElse((typeId, methodId), ComplexityTable.MinimalComplexity) r.addComplexity(complexity) - val specMethod = method.specializeFor(obj.tpe, args.map(_.tpe)) - cons(obj, specMethod, args, Map()) + val nArgs = args.length + val types = new Array[SType](nArgs) + cfor(0)(_ < nArgs, _ + 1) { i => + types(i) = args(i).tpe + } + val specMethod = method.specializeFor(obj.tpe, types) + cons(obj, specMethod, args, Map.empty) } } diff --git a/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala b/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala index f66f58a053..bc99becd19 100644 --- a/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala +++ b/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala @@ -2,13 +2,11 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ -import sigmastate.serialization.OpCodes._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} case class TaggedVariableSerializer(cons: (Byte, SType) => Value[SType]) extends ValueSerializer[TaggedVariable[_ <: SType]] { override def opDesc = TaggedVariable - override def opCode: OpCode = TaggedVariableCode override def serialize(obj: TaggedVariable[_ <: SType], w: SigmaByteWriter): Unit = w.put(obj.varId) diff --git a/src/main/scala/sigmastate/serialization/TypeSerializer.scala b/src/main/scala/sigmastate/serialization/TypeSerializer.scala index 9bb7292da3..cf0e5dcd4b 100644 --- a/src/main/scala/sigmastate/serialization/TypeSerializer.scala +++ b/src/main/scala/sigmastate/serialization/TypeSerializer.scala @@ -17,10 +17,10 @@ object TypeSerializer extends ByteBufferSerializer[SType] { val embeddableIdToType = Array[SType](null, SBoolean, SByte, SShort, SInt, SLong, SBigInt, SGroupElement, SSigmaProp) - def getEmbeddableType(code: Int): SType = - CheckPrimitiveTypeCode(code.toByte) { - embeddableIdToType(code) - } + def getEmbeddableType(code: Int): SType = { + CheckPrimitiveTypeCode(code.toByte) + embeddableIdToType(code) + } override def serialize(tpe: SType, w: SigmaByteWriter) = tpe match { case p: SEmbeddable => w.put(p.typeCode) @@ -191,7 +191,8 @@ object TypeSerializer extends ByteBufferSerializer[SType] { case SPreHeader.typeCode => SPreHeader case SGlobal.typeCode => SGlobal case _ => - CheckTypeCode(c.toByte) { NoType } + CheckTypeCode(c.toByte) + NoType } } tpe diff --git a/src/main/scala/sigmastate/serialization/ValDefSerializer.scala b/src/main/scala/sigmastate/serialization/ValDefSerializer.scala index 0ed749cb28..084e49ec73 100644 --- a/src/main/scala/sigmastate/serialization/ValDefSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ValDefSerializer.scala @@ -6,7 +6,7 @@ import sigmastate.serialization.OpCodes._ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ -import scala.collection.mutable +import spire.syntax.all.cfor case class ValDefSerializer(override val opDesc: ValueCompanion) extends ValueSerializer[ValDef] { @@ -27,14 +27,14 @@ case class ValDefSerializer(override val opDesc: ValueCompanion) extends ValueSe val id = r.getUInt().toInt val tpeArgs: Seq[STypeVar] = opCode match { case FunDefCode => - val tpeArgsCount = r.getByte() - val inputsBuilder = mutable.ArrayBuilder.make[STypeVar]() - for (_ <- 0 until tpeArgsCount) { - inputsBuilder += r.getType().asInstanceOf[STypeVar] + val nTpeArgs = r.getByte() + val inputs = new Array[STypeVar](nTpeArgs) + cfor(0)(_ < nTpeArgs, _ + 1) { i => + inputs(i) = r.getType().asInstanceOf[STypeVar] } - inputsBuilder.result() + inputs case ValDefCode => - Seq() + Nil } val rhs = r.getValue() r.valDefTypeStore(id) = rhs.tpe diff --git a/src/main/scala/sigmastate/serialization/ValueSerializer.scala b/src/main/scala/sigmastate/serialization/ValueSerializer.scala index df8949a55b..a0b0ca03b5 100644 --- a/src/main/scala/sigmastate/serialization/ValueSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ValueSerializer.scala @@ -29,7 +29,7 @@ trait ValueSerializer[V <: Value[SType]] extends SigmaSerializer[Value[SType], V def opDesc: ValueCompanion /** Code of the corresponding tree node (Value.opCode) which is used to lookup this serizalizer * during deserialization. It is emitted immediately before the body of this node in serialized byte array. */ - def opCode: OpCode = opDesc.opCode + @inline final def opCode: OpCode = opDesc.opCode def opCost(opId: OperationId): ExpressionCost = sys.error(s"Operation opCost is not defined for AST node ${this.getClass}") @@ -157,7 +157,8 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { override def getSerializer(opCode: OpCode): ValueSerializer[_ <: Value[SType]] = { val serializer = serializers(opCode) - CheckValidOpCode(serializer, opCode) { serializer } + CheckValidOpCode(serializer, opCode) + serializer } def addSerializer(opCode: OpCode, ser: ValueSerializer[_ <: Value[SType]]) = { serializers.add(opCode, ser) @@ -382,14 +383,14 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { override def deserialize(r: SigmaByteReader): Value[SType] = { val depth = r.level r.level = depth + 1 - val firstByte = r.peekByte().toUByte + val firstByte = toUByte(r.peekByte()) val v = if (firstByte <= LastConstantCode) { // look ahead byte tell us this is going to be a Constant r.addComplexity(constantSerializer.complexity) constantSerializer.deserialize(r) } else { - val opCode = OpCode @@ r.getByte() + val opCode = r.getByte().asInstanceOf[OpCode] val ser = getSerializer(opCode) r.addComplexity(ser.complexity) ser.parse(r) diff --git a/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala b/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala index 175ed8de70..1270938404 100644 --- a/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala +++ b/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala @@ -2,7 +2,6 @@ package sigmastate.serialization.trees import sigmastate.Values._ import sigmastate._ -import sigmastate.lang.Terms._ import sigmastate.serialization.OpCodes._ import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} @@ -34,16 +33,17 @@ case class Relation2Serializer[S1 <: SType, S2 <: SType, R <: Value[SBoolean.typ } } + /** @hotspot don't beautify this code */ override def parse(r: SigmaByteReader): R = { if (r.peekByte() == ConcreteCollectionBooleanConstantCode) { val _ = r.getByte() // skip collection op code val booleans = r.getBits(2) - val firstArg = BooleanConstant.fromBoolean(booleans(0)).asValue[S1] - val secondArg = BooleanConstant.fromBoolean(booleans(1)).asValue[S2] + val firstArg = BooleanConstant.fromBoolean(booleans(0)).asInstanceOf[Value[S1]] + val secondArg = BooleanConstant.fromBoolean(booleans(1)).asInstanceOf[Value[S2]] constructor(firstArg, secondArg).asInstanceOf[R] } else { - val firstArg = r.getValue().asValue[S1] - val secondArg = r.getValue().asValue[S2] + val firstArg = r.getValue().asInstanceOf[Value[S1]] + val secondArg = r.getValue().asInstanceOf[Value[S2]] constructor(firstArg, secondArg).asInstanceOf[R] } } diff --git a/src/main/scala/sigmastate/types.scala b/src/main/scala/sigmastate/types.scala index ab2bd9f6fb..8a35f39011 100644 --- a/src/main/scala/sigmastate/types.scala +++ b/src/main/scala/sigmastate/types.scala @@ -254,7 +254,9 @@ trait STypeCompanion { * It delegate to getMethodById to lookup method. * @see getMethodById */ - def methodById(methodId: Byte): SMethod = ValidationRules.CheckAndGetMethod(this, methodId) { m => m } + def methodById(methodId: Byte): SMethod = { + ValidationRules.CheckAndGetMethod(this, methodId) + } def getMethodByName(name: String): SMethod = methods.find(_.name == name).get @@ -274,7 +276,7 @@ trait SProduct extends SType { /** This method should be overriden in derived classes to add new methods in addition to inherited. * Typical override: `super.getMethods() ++ Seq(m1, m2, m3)` */ - protected def getMethods(): Seq[SMethod] = Seq() + protected def getMethods(): Seq[SMethod] = Nil /** Returns all the methods of this type. */ lazy val methods: Seq[SMethod] = { @@ -401,9 +403,8 @@ object SMethod { } def fromIds(typeId: Byte, methodId: Byte): SMethod = { - val typeCompanion = ValidationRules.CheckTypeWithMethods(typeId, SType.types.contains(typeId)) { - SType.types(typeId) - } + ValidationRules.CheckTypeWithMethods(typeId, SType.types.contains(typeId)) + val typeCompanion = SType.types(typeId) val method = typeCompanion.methodById(methodId) method } @@ -1229,8 +1230,8 @@ case class STuple(items: IndexedSeq[SType]) extends SCollection[SAny.type] { override def mkConstant(v: Coll[Any]): Value[this.type] = Constant[STuple](v, this).asValue[this.type] - val typeParams = Seq() - val tparamSubst = Map() + val typeParams = Nil + val tparamSubst = Map.empty override def toTermString = s"(${items.map(_.toTermString).mkString(",")})" override def toString = s"(${items.mkString(",")})" @@ -1292,7 +1293,7 @@ case class SFunc(tDom: IndexedSeq[SType], tRange: SType, tpeParams: Seq[STypePa override def dataSize(v: SType#WrappedType) = 8L import SFunc._ val typeParams: Seq[STypeParam] = tpeParams - val tparamSubst: Map[STypeVar, SType] = Map() // defined in MethodCall.typeSubst + val tparamSubst: Map[STypeVar, SType] = Map.empty // defined in MethodCall.typeSubst def getGenericType: SFunc = { val typeParams: Seq[STypeParam] = tDom.zipWithIndex @@ -1307,7 +1308,7 @@ object SFunc { val tD = STypeVar("D") val tR = STypeVar("R") final val FuncTypeCode: TypeCode = OpCodes.FirstFuncType - def apply(tDom: SType, tRange: SType): SFunc = SFunc(IndexedSeq(tDom), tRange) + def apply(tDom: SType, tRange: SType): SFunc = SFunc(Array(tDom), tRange) // @hotspot val identity = { x: Any => x } } diff --git a/src/main/scala/sigmastate/utils/Helpers.scala b/src/main/scala/sigmastate/utils/Helpers.scala index 4feff629b7..d47596c987 100644 --- a/src/main/scala/sigmastate/utils/Helpers.scala +++ b/src/main/scala/sigmastate/utils/Helpers.scala @@ -66,6 +66,12 @@ object Helpers { result } + def castArray[A, B >: A : ClassTag](array: Array[A]): Array[B] = { + val result: Array[B] = new Array[B](array.length) + System.arraycopy(array, 0, result, 0, array.length) + result + } + def deepHashCode[T](arr: Array[T]): Int = arr match { case arr: Array[AnyRef] => util.Arrays.deepHashCode(arr) case arr: Array[Byte] => util.Arrays.hashCode(arr) diff --git a/src/main/scala/sigmastate/utils/SigmaByteReader.scala b/src/main/scala/sigmastate/utils/SigmaByteReader.scala index e54f30edf5..d133495663 100644 --- a/src/main/scala/sigmastate/utils/SigmaByteReader.scala +++ b/src/main/scala/sigmastate/utils/SigmaByteReader.scala @@ -120,22 +120,22 @@ class SigmaByteReader(val r: Reader, for (i <- 0 until size) { xs(i) = getValue() } - xs.toIndexedSeq + xs } private var positionLmt: Int = r.position + r.remaining - @inline def positionLimit: Int = positionLmt - @inline def positionLimit_=(v: Int): Unit = { + @inline final def positionLimit: Int = positionLmt + @inline final def positionLimit_=(v: Int): Unit = { positionLmt = v } private var _complexity: Int = 0 - @inline def complexity: Int = _complexity - @inline def complexity_=(v: Int): Unit = { + @inline final def complexity: Int = _complexity + @inline final def complexity_=(v: Int): Unit = { _complexity = v } - @inline def addComplexity(delta: Int): Unit = { + @inline final def addComplexity(delta: Int): Unit = { _complexity += delta } } diff --git a/src/test/scala/sigmastate/CostingSpecification.scala b/src/test/scala/sigmastate/CostingSpecification.scala index 24e274a665..a053be663a 100644 --- a/src/test/scala/sigmastate/CostingSpecification.scala +++ b/src/test/scala/sigmastate/CostingSpecification.scala @@ -357,8 +357,6 @@ class CostingSpecification extends SigmaTestingData { property("ErgoTree with TrueLeaf costs") { val tree = ErgoTree(16, IndexedSeq(TrueLeaf), BoolToSigmaProp(ConstantPlaceholder(0, SBoolean))) - tree.toString shouldBe - "ErgoTree(16,Vector(TrueLeaf$(127)),Right(BoolToSigmaProp(ConstantPlaceholder(0,SBoolean))),0)" val pr = interpreter.prove(tree, context, fakeMessage).get val expressionCost = @@ -377,4 +375,10 @@ class CostingSpecification extends SigmaTestingData { cost shouldBe expectedCost } + property("laziness of AND, OR costs") { + cost("{ val cond = getVar[Boolean](2).get; !(!cond && (1 / 0 == 1)) }")( + ContextVarAccess + constCost * 2 + logicCost * 3 + multiply + comparisonCost) + cost("{ val cond = getVar[Boolean](2).get; (cond || (1 / 0 == 1)) }")( + ContextVarAccess + constCost * 2 + logicCost + multiply + comparisonCost) + } } \ No newline at end of file diff --git a/src/test/scala/sigmastate/SoftForkabilitySpecification.scala b/src/test/scala/sigmastate/SoftForkabilitySpecification.scala index 69dbc4512a..e857645e46 100644 --- a/src/test/scala/sigmastate/SoftForkabilitySpecification.scala +++ b/src/test/scala/sigmastate/SoftForkabilitySpecification.scala @@ -317,7 +317,7 @@ class SoftForkabilitySpecification extends SigmaTestingData { checkRule(CheckCostFuncOperation, v2vs, { val costingRes = IR.doCostingEx(emptyEnv, exp, okRemoveIsProven = false) // use calcF as costing function to have forbidden (not allowed) op (Height) in the costing function - CheckCostFunc(IR)(IR.asRep[Any => Int](costingRes.calcF)) { } + CheckCostFunc(IR)(IR.asRep[Any => Int](costingRes.calcF)) }) } @@ -333,7 +333,7 @@ class SoftForkabilitySpecification extends SigmaTestingData { implicit val anyType = AnyElement val v1 = variable[Int] val costF = fun[Any, Int] {_ => opCost(v1, Seq(1), 2) } - CheckCostFunc(tIR)(asRep[Any => Int](costF)) { } + CheckCostFunc(tIR)(asRep[Any => Int](costF)) }) } } diff --git a/src/test/scala/sigmastate/helpers/ContextEnrichingProverInterpreter.scala b/src/test/scala/sigmastate/helpers/ContextEnrichingProverInterpreter.scala index df2d5fb521..ccd8c0d467 100644 --- a/src/test/scala/sigmastate/helpers/ContextEnrichingProverInterpreter.scala +++ b/src/test/scala/sigmastate/helpers/ContextEnrichingProverInterpreter.scala @@ -17,7 +17,7 @@ import scala.util.Try */ trait ContextEnrichingProverInterpreter extends ProverInterpreter { - def contextExtenders: Map[Byte, EvaluatedValue[_ <: SType]] = Map() + def contextExtenders: Map[Byte, EvaluatedValue[_ <: SType]] = Map.empty val knownExtensions = ContextExtension(contextExtenders) diff --git a/src/test/scala/sigmastate/serialization/DeserializationResilience.scala b/src/test/scala/sigmastate/serialization/DeserializationResilience.scala index 12a9ede23d..9a96af0403 100644 --- a/src/test/scala/sigmastate/serialization/DeserializationResilience.scala +++ b/src/test/scala/sigmastate/serialization/DeserializationResilience.scala @@ -2,25 +2,34 @@ package sigmastate.serialization import java.nio.ByteBuffer -import org.ergoplatform.{Outputs, ErgoBoxCandidate} import org.ergoplatform.validation.ValidationException +import org.ergoplatform.{ErgoBoxCandidate, ErgoLikeContext, Outputs} import org.scalacheck.Gen -import scorex.util.serialization.{VLQByteBufferReader, Reader} -import sigmastate.Values.{SigmaBoolean, Tuple, SValue, IntConstant} +import scalan.util.BenchmarkUtil +import scorex.util.serialization.{Reader, VLQByteBufferReader} +import sigmastate.Values.{BlockValue, ErgoTree, GetVarInt, IntConstant, SValue, SigmaBoolean, SigmaPropValue, Tuple, ValDef, ValUse} import sigmastate._ -import sigmastate.interpreter.CryptoConstants -import sigmastate.lang.exceptions.{InvalidTypePrefix, SerializerException, InputSizeLimitExceeded, DeserializeCallDepthExceeded} +import sigmastate.eval.Extensions._ +import sigmastate.eval._ +import sigmastate.helpers.{ErgoLikeTestInterpreter, SigmaTestingCommons} +import sigmastate.interpreter.Interpreter.{ScriptNameProp, emptyEnv} +import sigmastate.interpreter.{ContextExtension, CostedProverResult, CryptoConstants} +import sigmastate.lang.Terms._ +import sigmastate.lang.exceptions.{DeserializeCallDepthExceeded, InputSizeLimitExceeded, InvalidTypePrefix, SerializerException} import sigmastate.serialization.OpCodes._ import sigmastate.utils.SigmaByteReader import sigmastate.utxo.SizeOf -import sigmastate.eval._ -import sigmastate.eval.Extensions._ -import sigmastate.helpers.SigmaTestingCommons import scala.collection.mutable class DeserializationResilience extends SerializationSpecification with SigmaTestingCommons { + implicit lazy val IR: TestingIRContext = new TestingIRContext { + // substFromCostTable = false + saveGraphsInFile = false + // override val okPrintEvaluatedEntries = true + } + private def reader(bytes: Array[Byte], maxTreeDepth: Int): SigmaByteReader = { val buf = ByteBuffer.wrap(bytes) val r = new SigmaByteReader( @@ -233,4 +242,40 @@ class DeserializationResilience extends SerializationSpecification with SigmaTes an[InputSizeLimitExceeded] should be thrownBy ErgoBoxCandidate.serializer.parse(SigmaSerializer.startReader(bytes)) } + + private val recursiveScript: SigmaPropValue = BlockValue( + Vector( + ValDef(1, Plus(GetVarInt(4).get, ValUse(2, SInt))), + ValDef(2, Plus(GetVarInt(5).get, ValUse(1, SInt)))), + GE(Minus(ValUse(1, SInt), ValUse(2, SInt)), 0)).asBoolValue.toSigmaProp + + property("recursion caught during deserialization") { + assertExceptionThrown({ + checkSerializationRoundTrip(recursiveScript) + }, + { + case e: NoSuchElementException => e.getMessage.contains("key not found: 2") + case _ => false + }) + } + + property("recursion caught during verify") { + assertExceptionThrown({ + val verifier = new ErgoLikeTestInterpreter + val pr = CostedProverResult(Array[Byte](), ContextExtension(Map()), 0L) + val ctx = ErgoLikeContext.dummy(fakeSelf) + val (res, calcTime) = BenchmarkUtil.measureTime { + verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), + ErgoTree(ErgoTree.DefaultHeader, IndexedSeq(), recursiveScript), ctx, pr, fakeMessage) + } + res.fold(t => throw t, identity) + }, { + case e: NoSuchElementException => + // this is expected because of deserialization is forced when ErgoTree.complexity is accessed in verify + e.getMessage.contains("key not found: 2") + case _ => false + }) + } + + } diff --git a/src/test/scala/sigmastate/serialization/generators/ObjectGenerators.scala b/src/test/scala/sigmastate/serialization/generators/ObjectGenerators.scala index de73ca1db0..08561784f6 100644 --- a/src/test/scala/sigmastate/serialization/generators/ObjectGenerators.scala +++ b/src/test/scala/sigmastate/serialization/generators/ObjectGenerators.scala @@ -166,7 +166,7 @@ trait ObjectGenerators extends TypeGenerators with ValidationSpecification with def additionalRegistersGen(cnt: Byte): Seq[Gen[(NonMandatoryRegisterId, EvaluatedValue[SType])]] = { (0 until cnt) .map(_ + ErgoBox.startingNonMandatoryIndex) - .map(rI => ErgoBox.registerByIndex(rI.toByte).asInstanceOf[NonMandatoryRegisterId]) + .map(rI => ErgoBox.registerByIndex(rI).asInstanceOf[NonMandatoryRegisterId]) .map { r => for { arr <- byteArrayConstGen @@ -552,7 +552,7 @@ trait ObjectGenerators extends TypeGenerators with ValidationSpecification with val valDefGen: Gen[ValDef] = for { id <- unsignedIntGen rhs <- booleanExprGen - } yield ValDef(id, Seq(), rhs) + } yield ValDef(id, Nil, rhs) val funDefGen: Gen[ValDef] = for { id <- unsignedIntGen diff --git a/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala b/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala index 76196ff959..78e5ba71a7 100644 --- a/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala +++ b/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala @@ -487,13 +487,14 @@ class BasicOpsSpecification extends SigmaTestingCommons { ) } - check(BigInteger.TWO.negate().pow(255), false) - check(BigInteger.TWO.negate().pow(256).subtract(BigInteger.ONE), true) - check(BigInteger.TWO.pow(255).subtract(BigInteger.ONE), false) - check(BigInteger.TWO.pow(255), true) - check(BigInteger.TWO.pow(255).add(BigInteger.ONE), true) - check(BigInteger.TWO.pow(256), true) - check(BigInteger.TWO.negate().pow(256).subtract(BigInteger.ONE), true) + val two = BigInteger.valueOf(2) // BigInteger.TWO is not exported in JDK 1.8 + check(two.negate().pow(255), false) + check(two.negate().pow(256).subtract(BigInteger.ONE), true) + check(two.pow(255).subtract(BigInteger.ONE), false) + check(two.pow(255), true) + check(two.pow(255).add(BigInteger.ONE), true) + check(two.pow(256), true) + check(two.negate().pow(256).subtract(BigInteger.ONE), true) } property("ExtractCreationInfo") { @@ -620,4 +621,20 @@ class BasicOpsSpecification extends SigmaTestingCommons { true ) } + + property("lazy OR") { + test("lazy OR", env, ext, + "true || ((1/0) == 1)", + null, + true + ) + } + + property("lazy AND") { + test("lazy AND", env, ext, + "(false && ((1/0) == 1)) == false", + null, + true + ) + } } diff --git a/src/test/scala/sigmastate/utxo/SerializationRoundTripSpec.scala b/src/test/scala/sigmastate/utxo/SerializationRoundTripSpec.scala index 2cb71955ee..f7864771e1 100644 --- a/src/test/scala/sigmastate/utxo/SerializationRoundTripSpec.scala +++ b/src/test/scala/sigmastate/utxo/SerializationRoundTripSpec.scala @@ -1,11 +1,15 @@ package sigmastate.utxo -import org.ergoplatform.{ErgoBoxCandidate, ErgoLikeTransaction, _} +import org.ergoplatform.{ErgoLikeTransaction, ErgoBoxCandidate, _} import org.scalatest.prop.GeneratorDrivenPropertyChecks -import org.scalatest.{Matchers, PropSpec} +import org.scalatest.{PropSpec, Matchers} +import scalan.util.BenchmarkUtil import sigmastate.helpers.SigmaTestingCommons -import sigmastate.interpreter.{ContextExtension, ProverResult} +import sigmastate.interpreter.{ProverResult, ContextExtension} import sigmastate.serialization.generators.ObjectGenerators +import debox.{Buffer => DBuffer} +import spire.algebra._ +import spire.std.int._ class SerializationRoundTripSpec extends PropSpec with GeneratorDrivenPropertyChecks @@ -13,6 +17,28 @@ class SerializationRoundTripSpec extends PropSpec with ObjectGenerators with SigmaTestingCommons { + case class Run(size: Int, time: Long) + + implicit val orderRun = Order.by((r: Run) => r.size) + + property("ErgoBoxCandidate: Serializer round trip benchmark") { + val runs = DBuffer.empty[Run] + forAll(MinSuccessful(20)) { t: ErgoBoxCandidate => + val (_, time) = BenchmarkUtil.measureTime { + var i = 0 + while (i < 100) { + roundTripTest(t)(ErgoBoxCandidate.serializer) + i += 1 + } + } + runs += Run(t.bytesWithNoRef.length, time) + } + runs.sort + for (r <- runs) { + println(s"Size: ${r.size}, Time: ${r.time}") + } + } + property("ErgoBoxCandidate: Serializer round trip") { forAll { t: ErgoBoxCandidate => roundTripTest(t)(ErgoBoxCandidate.serializer) } forAll { t: ErgoBoxCandidate => roundTripTestWithPos(t)(ErgoBoxCandidate.serializer) } diff --git a/src/test/scala/sigmastate/utxo/SpamSpecification.scala b/src/test/scala/sigmastate/utxo/SpamSpecification.scala index 54b9426588..e69de29bb2 100644 --- a/src/test/scala/sigmastate/utxo/SpamSpecification.scala +++ b/src/test/scala/sigmastate/utxo/SpamSpecification.scala @@ -1,1466 +0,0 @@ -package sigmastate.utxo - -import org.ergoplatform.ErgoBox._ -import org.ergoplatform.ErgoConstants.{MaxPropositionBytes, ScriptCostLimit} -import org.ergoplatform._ -import org.ergoplatform.validation.ValidationRules.CheckLoopLevelInCostFunction -import org.ergoplatform.validation.{ValidationException, ValidationRules} -import org.scalacheck.Gen -import scalan.util.BenchmarkUtil -import scalan.util.BenchmarkUtil.measureTime -import scorex.crypto.authds.avltree.batch.{BatchAVLProver, Insert, Lookup} -import scorex.crypto.authds.{ADKey, ADValue} -import scorex.crypto.hash.{Blake2b256, Digest32} -import scorex.util.encode.Base16 -import scorex.utils.Random -import sigmastate.SCollection.SByteArray -import sigmastate.Values._ -import sigmastate._ -import sigmastate.basics.DLogProtocol.ProveDlog -import sigmastate.eval._ -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, ErgoLikeTestProvingInterpreter, SigmaTestingCommons} -import sigmastate.interpreter.CryptoConstants.dlogGroup -import sigmastate.interpreter.Interpreter._ -import sigmastate.interpreter.{ContextExtension, CostedProverResult} -import sigmastate.lang.Terms._ -import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer -import sigmastate.serialization.SigmaSerializer -import sigmastate.serialization.generators.ObjectGenerators -import special.collection.Coll - -import scala.annotation.tailrec -import scala.util.{Success, Try} - -/** - * Suite of tests where a malicious prover tries to feed a verifier with a script which is costly to verify - */ -class SpamSpecification extends SigmaTestingCommons with ObjectGenerators { - implicit lazy val IR: TestingIRContext = new TestingIRContext { - // substFromCostTable = false - saveGraphsInFile = false - // override val okPrintEvaluatedEntries = true - } - - //we assume that verifier must finish verification of any script in less time than 1M hash calculations - // (for the Blake2b256 hash function over a single block input) - val Timeout: Long = { - val block = Array.fill(16)(0: Byte) - val hf = Blake2b256 - - //just in case to heat up JVM - (1 to 1000000).foreach(_ => hf(block)) - - val t0 = System.currentTimeMillis() - (1 to 1500000).foreach(_ => hf(block)) - val t = System.currentTimeMillis() - t - t0 - } - val NumInputs: Int = 10 - val NumOutputs: Int = 10 - val InputCostDefault: Int = 2000 - val CostLimit: Long = ScriptCostLimit.value - val Longs: Array[Long] = Array[Long](1, 2, 3, Long.MaxValue, Long.MinValue) - lazy val alice = new ContextEnrichingTestProvingInterpreter - lazy val alicePubKey: ProveDlog = alice.dlogSecrets.head.publicImage - val hugeSizeColl: Array[Byte] = Array.fill(1000000)(1.toByte) - val maxSizeColl: Array[Byte] = Array.fill(SigmaSerializer.MaxPropositionSize - 50)(2.toByte) - val coll10: Array[Byte] = Array.fill(10)(10.toByte) - val coll100: Array[Byte] = Array.fill(100)(100.toByte) - val coll1000: Array[Byte] = Array.fill(1000)(1000.toByte) - lazy val maxSizeCollEnv: ScriptEnv = Map( - "alice" -> alice.dlogSecrets.head.publicImage, - "alice2" -> alice.dlogSecrets(1).publicImage, - "alice3" -> alice.dlogSecrets(2).publicImage, - "alice4" -> alice.dlogSecrets(3).publicImage, - "coll5" -> Colls.fromArray(Array.fill(5)(5.toByte)), - "coll10" -> Colls.fromArray(coll10), - "coll100" -> Colls.fromArray(coll100), - "coll1000" -> Colls.fromArray(coll1000), - "maxSizeColl" -> Colls.fromArray(maxSizeColl) - ) - - def termination[T](fn: () => T): (T, Boolean) = { - val t0 = System.currentTimeMillis() - val res = fn() - val t = System.currentTimeMillis() - (res, (t - t0) < Timeout) - } - - def measuredScriptAndSize(spamScript: SigmaPropValue): Try[(ErgoTree, Int)] = Try { - val tree = ErgoTree.fromProposition(spamScript) - val bytes = DefaultSerializer.serializeErgoTree(tree) - if (bytes.length > MaxPropositionBytes.value) { - val msg = s"Script size ${bytes.length} is too big, fix the test" - println(msg) - throw new Exception(msg) - } - val measuredTree = DefaultSerializer.deserializeErgoTree(bytes) - assert(measuredTree.complexity > 0) - (measuredTree, bytes.length) - } - - def initializationCost(scriptSize: Int): Long = { - CostTable.interpreterInitCost - } - - /** - * Checks that regardless of the script structure, it's verification always consumes at most `Timeout` ms - */ - private def checkScript(spamScript: SigmaPropValue, emptyProofs: Boolean = true): Boolean = { - val (measuredTree, scriptSize) = measuredScriptAndSize(spamScript) match { - case Success(x) => x - case _ => return false - } - - val ctx = { - - val output = ErgoBox(1, alicePubKey, 10, Nil, - Map( - R4 -> ByteConstant(1), - R5 -> SigmaPropConstant(alicePubKey), - R6 -> IntConstant(10), - R7 -> ByteArrayConstant(coll10), - R8 -> ByteArrayConstant(maxSizeColl) - ) - ) - - val input = ErgoBox(1, measuredTree, 10, Nil, - Map( - R4 -> ByteConstant(1), - R5 -> SigmaPropConstant(alicePubKey), - R6 -> IntConstant(10), - R7 -> ByteArrayConstant(coll10), - R8 -> ByteArrayConstant(maxSizeColl) - ) - ) - val outBoxes: IndexedSeq[ErgoBoxCandidate] = IndexedSeq.fill(NumOutputs)(output) - val inBoxes: IndexedSeq[ErgoBox] = IndexedSeq.fill(NumOutputs)(input) - //normally this transaction would invalid (why?), but we're not checking it in this test - val tx = createTransaction(outBoxes) - - ErgoLikeContext( - currentHeight = 10, - lastBlockUtxoRoot = AvlTreeData.dummy, - minerPubkey = ErgoLikeContext.dummyPubkey, - boxesToSpend = inBoxes, - spendingTransaction = tx, - self = inBoxes(0) // what is the use of self? - ) - } - - val pr = if (emptyProofs) { - // do not spend time to create a proof - CostedProverResult(Array[Byte](), ContextExtension( - Map( - 1.toByte -> ByteArrayConstant(hugeSizeColl), - 2.toByte -> ByteArrayConstant(coll10), - 3.toByte -> TrueLeaf, - 4.toByte -> IntConstant(12345), - 5.toByte -> BigIntConstant(Long.MaxValue), - 6.toByte -> ByteArrayConstant(coll100), - 7.toByte -> ByteArrayConstant(coll1000), - ) - ), 0L) - - } else { - // generate a correct proof using a prover without a cost limit - val pr = new ContextEnrichingTestProvingInterpreter() - .withSecrets(alice.dlogSecrets) - .prove(emptyEnv + (ScriptNameProp -> "prove"), measuredTree, ctx.withCostLimit(Long.MaxValue), fakeMessage).get - println(s"Prover cost: ${pr.cost}") - pr - } - - val initCost = initializationCost(scriptSize) - println(s"Initalization Cost: $initCost; Complexity: ${measuredTree.complexity}") - val verifier = new ErgoLikeTestInterpreter - val (res, calcTime) = BenchmarkUtil.measureTime { - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), measuredTree, ctx.withInitCost(initCost), pr, fakeMessage) - } - checkResult(res, calcTime, scriptSize) - true - } - - def checkResult(res: Try[(Boolean, Long)], calcTime: Long, scriptSize: Int) = { - println(s"Verify time: $calcTime millis; SerializedSize: $scriptSize; Defs: ${IR.defCount}") - println(s"Timeout: $Timeout millis") - res.fold(t => { - val cause = rootCause(t) - println(s"Rejection cause: $cause") - }, r => { - println(s"Result: $res") - }) - calcTime should be < Timeout - println("----------------------------") - } - - def warmUpScenario() = { - val ctx = ErgoLikeContext.dummy(fakeSelf) - val check = "i >= 0" - val prop = compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | maxSizeColl.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp - new ContextEnrichingTestProvingInterpreter() - .withSecrets(alice.dlogSecrets) - .prove(emptyEnv + (ScriptNameProp -> "prove"), prop, ctx.withCostLimit(Long.MaxValue), fakeMessage).get - } - - lazy val warmUpPrecondition = { - val (_, t) = measureTime(warmUpScenario()) - println(s"Warmup time: $t") - true - } - - def repeatScript(name: String, scaleLimit: Int, scaleStep: Int = 1)(scriptBuilder: Int => SigmaPropValue): Unit = { - IR.resetContext() - System.gc() - (1 to(scaleLimit, scaleStep)) foreach { scale => - println(s"ErgoTree Scale: $scale") - val prop = scriptBuilder(scale) - if(!checkScript(prop)) return - } - } - - val recursiveScript = BlockValue( - Vector( - ValDef(1, Plus(GetVarInt(4).get, ValUse(2, SInt))), - ValDef(2, Plus(GetVarInt(5).get, ValUse(1, SInt)))), - GE(Minus(ValUse(1, SInt), ValUse(2, SInt)), 0) ).asBoolValue.toSigmaProp - - property("recursion catched during deserialization") { - assertExceptionThrown({ - checkSerializationRoundTrip(recursiveScript) - }, - { - case e: NoSuchElementException => e.getMessage.contains("key not found: 2") - case _ => false - }) - } - - property("recursion catched during verify") { - assertExceptionThrown({ - val verifier = new ErgoLikeTestInterpreter - val pr = CostedProverResult(Array[Byte](), ContextExtension( Map( ) ), 0L) - val ctx = ErgoLikeContext.dummy(fakeSelf) - val (res, calcTime) = BenchmarkUtil.measureTime { - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), - ErgoTree(ErgoTree.DefaultHeader, IndexedSeq(), recursiveScript), ctx, pr, fakeMessage) - } - res.fold(t => throw t, identity) - }, { - case e: NoSuchElementException => - // this is expected bacause of deserialization is forced when ErgoTree.complexity is accessed in verify - e.getMessage.contains("key not found: 2") - case _ => false - }) - } - - property("Context extension with big coll") { - assert(warmUpPrecondition) - val name = "Context extension with big coll" - repeatScript(name, 86, 5) { scale => - val script = (1 to scale).map(_ => s"getVar[Coll[Byte]](2).get.forall({(i:Byte) => getVar[Coll[Byte]](2).get.forall({(j:Byte) => i == j})})").mkString(" && ") - compile(maxSizeCollEnv + (ScriptNameProp -> name), script).asBoolValue.toSigmaProp - } - } - - // TODO increase scaleLimit after https://github.com/ScorexFoundation/sigmastate-interpreter/issues/550 - property("Context extension: valid scripts") { - assert(warmUpPrecondition) - val check = "getVar[Boolean](3).get && getVar[Int](4).get > i && getVar[BigInt](5).get >= getVar[Int](4).get" - repeatScript(check, 70, 5) { scale => - val script = (1 to scale).map(_ => s"getVar[Coll[Byte]](6).get.forall({(i:Byte) => $check})").mkString(" && ") - compile(maxSizeCollEnv + (ScriptNameProp -> check), script).asBoolValue.toSigmaProp - } - } - - property("Too costly flatMap") { - assert(warmUpPrecondition) - repeatScript("Too costly flatMap", 30, 5) { scale => - val script = (1 to scale).map(j => - s"""INPUTS.flatMap({ (in: Box) => in.propositionBytes }) - | .forall({(i:Byte) => - | OUTPUTS.flatMap({ (out: Box) => out.propositionBytes }).size != i + $j - | })""".stripMargin).mkString(" && \n") - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("Too costly flatMap2") { - assert(warmUpPrecondition) - repeatScript("Too costly flatMap2", 46, 5) { size => - val prefix = s"val outBytes = OUTPUTS.flatMap({ (out: Box) => out.propositionBytes })" - val body = (1 to size).map(j => - s"""INPUTS.flatMap({ (in: Box) => in.propositionBytes }) - | .forall({(i:Byte) => - | outBytes.size != i + $j - | })""".stripMargin).mkString(" && \n") - val script = - s"""{ - | $prefix - | $body - |}""".stripMargin - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("map") { - assert(warmUpPrecondition) - repeatScript("map", 30, 5) { scale => - val script = (1 to scale).map(j => - s"""OUTPUTS.map({ (in: Box) => in.R7[Coll[Byte]].get}) - | .forall({(c:Coll[Byte]) => - | c.forall({(c2: Byte) => c2 + ${j + 3} > 0}) - | })""".stripMargin).mkString(" && \n") - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("map 2") { - assert(warmUpPrecondition) - repeatScript("map 2", 1, 1) { scale => - val script = (1 to scale).map(j => - s"""OUTPUTS(0).R7[Coll[Byte]].get.forall({(i:Byte) => - | OUTPUTS.map({ (in: Box) => in.R7[Coll[Byte]].get}) != INPUTS.map({ (in: Box) => in.R7[Coll[Byte]].get}) || i > 0 - |})""".stripMargin).mkString(" && \n") - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: int comparison") { - val check = "i >= 0" - val prop = compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp - checkScript(prop) - } - - property("int comparison, cost a bit lower than limit ") { - assert(warmUpPrecondition) - val check = "i >= 0" - repeatScript(check, 86, 5) { scale => - val script = (1 to scale).map(_ => s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})").mkString(" && ") - compile(maxSizeCollEnv + (ScriptNameProp -> check), script).asBoolValue.toSigmaProp - } - } - - property("large loop: addition") { - val check = "i + i + i + i + i + i + i + i + i + i > i + i + i + i + i + i + i + i + i" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("addition, cost a bit lower than limit") { - repeatScript("addition, cost a bit lower than limit", 40) { scale => - val check = genNestedScript("1 < i", "", " + i", scale) - val script = s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})" - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: multiplication") { - val check = "i * i * i * i * i * i * i * i * i * i >= i * i * i * i * i * i * i * i * i" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("multiplication, cost a bit lower than limit") { - repeatScript("multiplication, cost a bit lower than limit", 40) { scale => - val check = genNestedScript("i ", "", " * i", scale + 1) + " >= " + genNestedScript("i ", "", " * i", scale) - val script = s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})" - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("multiplication, cost a bit lower than limit 2") { - repeatScript("multiplication, cost a bit lower than limit 2", 30) { scale => - val check = genNestedScript("true ", "", " && i * 2 < i * i * i", scale) - val script = s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})" - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: negotiation") { - val check = "(! (i != 1)) || (! (i != 2)) || (! (i != 3)) || (! (i != 4))" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("negotiation, cost a bit lower than limit") { - repeatScript("negotiation, cost a bit lower than limit", 50) { scale => - val check = genNestedScript("(! (i != 2)) ", "(! (i != 1)) || ", "", scale) - val script = s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})" - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: comparison of the same elements") { - val check = "i == i" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("comparison of the same elements, cost a bit lower than limit") { - repeatScript("comparison of the same elements, cost a bit lower than limit", 50) { scale => - val check = "OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => i == i })" - val script = genNestedScript("123 == 123 ", "", s" && $check", scale) - compile(maxSizeCollEnv, "{" + script + "}").asBoolValue.toSigmaProp - } - } - - property("large loop: blake2b256") { - val check = "blake2b256(blake2b256(Coll(i))) != blake2b256(Coll(i))" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: collection element by index") { - val check = "OUTPUTS(0).R8[Coll[Byte]].get(i.toInt) == OUTPUTS(0).R8[Coll[Byte]].get(4000 - j)" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | coll5.forall({(j:Byte) => - | $check - | }) - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("collection element by index, cost a bit lower than limit") { - assert(warmUpPrecondition) - repeatScript("collection element by index", 36, 5) { scale => - val check = "OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => OUTPUTS(0).R8[Coll[Byte]].get(i.toInt) == OUTPUTS(0).R8[Coll[Byte]].get(3000) })" - val script = genNestedScript("true ", "", s" && $check", scale) - compile(maxSizeCollEnv + (ScriptNameProp -> check), "{" + script + "}").asBoolValue.toSigmaProp - } - } - - property("large loop: collection.slice") { - assert(warmUpPrecondition) - repeatScript("large loop: collection.slice", 50, 5) { scale => - val check = "OUTPUTS(0).R7[Coll[Byte]].get.slice(1, 8) == OUTPUTS(0).R7[Coll[Byte]].get.slice(2, 9)" - val script = (0 to scale).map(_ => s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})").mkString(" && ") - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - // TODO looks like not consensus-critical, but fix is needed - ignore("stack overflow") { - val check = "OUTPUTS(0).R7[Coll[Byte]].get.slice(1, 8) == OUTPUTS(0).R7[Coll[Byte]].get.slice(2, 9)" - val script = (0 to 300).map(_ => s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})").mkString(" && ") - an[StackOverflowError] should be thrownBy checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), script).asBoolValue.toSigmaProp) - } - - property("large loop: collection.indices") { - assert(warmUpPrecondition) - repeatScript("large loop: collection.indices", 40) { scale => - val check = "OUTPUTS(0).R7[Coll[Byte]].get.indices.getOrElse(i, i) == i" - val script = (0 to scale).map(_ => s"OUTPUTS(0).R8[Coll[Byte]].get.indices.forall({(i:Int) => $check})").mkString(" && ") - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: collection.filter") { - assert(warmUpPrecondition) - repeatScript("large loop: collection.filter", 40) { scale => - val check = "i == 1" - val script = (0 to scale).map(_ => s"OUTPUTS(0).R8[Coll[Byte]].get.filter({(i:Byte) => $check}).size == 0").mkString(" && ") - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: collection allocation") { - assert(warmUpPrecondition) - repeatScript("large loop: collection allocation", 400, 100) { scale => - val check = genNestedScript("Coll(i", "", ",i", scale) + s") == Coll(i)" - val script = s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})" - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: extract registers") { - assert(warmUpPrecondition) - repeatScript("large loop: extract registers", 16, 5) { i => - val check = "(SELF.R0[Long].get == SELF.R9[Long].getOrElse(SELF.R0[Long].get)) && (SELF.R4[Byte].get == (i - 1)) && INPUTS(0).R5[SigmaProp].get == OUTPUTS(0).R5[SigmaProp].get && INPUTS(0).R6[Int].get == OUTPUTS(0).R6[Int].get" - val script = (1 to i).map(_ => - s"OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => $check})" - ).mkString(" && \n") - compile(maxSizeCollEnv, script).asBoolValue.toSigmaProp - } - } - - property("large loop: if") { - val check = - s""" - | if(i > 0) { - | if(i == 1) { - | false - | } else { - | if(i != 1) { - | true - | } else { - | false - | } - | } - | } else { - | false - | } - """ - - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: tuple operations") { - val check = "(i, 9223372036854775800L)._1 == (12, 2)._2 && (i, 0)._1 == (2, 0)._1 && (i, 3)._1 == (2, 0)._1" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: SELF.creationInfo") { - val check = "SELF.creationInfo._2 != Coll(i, j) && SELF.creationInfo._2 != Coll(j)" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | coll5.forall({(j:Byte) => - | $check - | }) - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: val allocation") { - val check = - s""" - | val X = j + i - | val Y = X + 1123 + j + j - | val Z = Y + X + 1 - | val A = (Z * 2) + X + 1 - | val B = (A + i) % 1000 - | val C = (A + B) % 1000 - | val D = (i + C) % 1000 - | D < 1000 - """ - - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | coll100.forall({(j:Byte) => - | $check - | }) - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: numeric casts") { - val check = "(HEIGHT.toLong + i.toLong).toInt == (HEIGHT.toByte + i).toInt" - - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: options") { - val check = "getVar[Int](1).isDefined || getVar[Int](2).isDefined || SELF.R4[Int].isDefined || SELF.R0[Long].isDefined" - - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: fold") { - val check = "i && b == 1" - assert(warmUpPrecondition) - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> "fold 1"), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.fold(true, {(i:Boolean, b:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: fold with complex logic") { - val check = "(i && b == 1) || (b == 2 && !i) || anyOf(Coll(i || b > 3, b == 0, b == -1, b == -2 && i))" - assert(warmUpPrecondition) - checkScript( - compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.fold(true, {(i:Boolean, b:Byte) => - | $check - | }) - |}""".stripMargin).asBoolValue.toSigmaProp - ) - } - - property("large loop: repeat fold with complex logic") { - val check = "(i && b == 1) || (b == 2 && !i) || anyOf(Coll(i || b > 3, b == 0, b == -1, b == -2 && i))" - repeatScript("collection element by index", 36, 5) { scale => - compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.fold(true, {(i:Boolean, b:Byte) => - | $check - | }) - |}""".stripMargin).asBoolValue.toSigmaProp - } - } - - property("large loop: exp") { - checkScript{ - compile( - maxSizeCollEnv ++ Map( - ScriptNameProp -> "large loop: exp", - "x1" -> SigmaDsl.BigInt((BigInt(Blake2b256("hello"))).bigInteger), - "y1" -> SigmaDsl.BigInt((BigInt(Blake2b256("world"))).bigInteger), - "g1" -> dlogGroup.generator, - "g2" -> dlogGroup.generator.add(dlogGroup.generator) - ), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(b:Byte) => - | val ex = if (b == 10) x1 else y1 - | g1.exp(ex) != g2 - | }) - |} - """.stripMargin - ).asBoolValue.toSigmaProp - } - } - - property("repeat large loop: exp") { - repeatScript("repeat large loop: exp", 25) { scale => - val innerCheck = "g1.exp(if (b == 10) x1 else y1) != g2" - val check = genNestedScript(s"$innerCheck", "", s" && $innerCheck", scale) - val script = s"OUTPUTS(0).R7[Coll[Byte]].get.forall({(b:Byte) => $check})" - compile(maxSizeCollEnv + - ("x1" -> SigmaDsl.BigInt((BigInt(Blake2b256("hello"))+scale).bigInteger)) + - ("y1" -> SigmaDsl.BigInt((BigInt(Blake2b256("world"))+scale*2).bigInteger)) + - ("g1" -> dlogGroup.generator) + - ("g2" -> dlogGroup.generator.add(dlogGroup.generator)), script).asBoolValue.toSigmaProp - } - } - - property("not having certain types (BigInt) in env") { - // NOT a spam test (yet). - /* value z below is not present in environment, nor is there any information that is is invalid. - - Perhaps this is expected behavior as the type of z is BigInt as opposed to SigmaDsl.BigInt. - - However, this could be better handled as follows: - If the environment contains disallowed types, an error (or warning) could be given. - - Actual error with uncommented code: - Cannot assign type for variable 'z' because it is not found in env - - */ - checkScript(compile( - Map( - "x" -> SigmaDsl.BigInt(BigInt(Blake2b256("hello")).bigInteger), - "y" -> SigmaDsl.BigInt(BigInt(Blake2b256("world")).bigInteger), -// "z" -> BigInt(Blake2b256("world")), // Using z in code gives error "z is not present in environment" - "g1" -> dlogGroup.generator, - "g2" -> dlogGroup.generator.add(dlogGroup.generator), - ScriptNameProp -> "exp" - ), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(b:Byte) => - | val ex = if (b == 10) x else y - | // val ex = if (b == 10) x else z // uncommenting this line will give error - | g1.exp(ex) != g2 - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("declared type is ignored") { - // NOT a spam test (yet). This is to check if the declared type is correctly handled - /* - The declared types are ignored in the below two lines in the code - - val x:Boolean = // considers x as Int - ... - val z:BigInt = // considers z as Int - - So this appears to be a bug. - infers ex as Int rather than BigInt - Need to add .toBigInt in the code for this to work. - - Note that the same error occurs outside the forall and for types other than BigInt - For instance the following line before the forall also has same effect - - */ - checkScript(compile( - Map( - ScriptNameProp -> "exp", - "g1" -> dlogGroup.generator, - "g2" -> dlogGroup.generator.add(dlogGroup.generator) - ), - s"""{ - | val x:Boolean = 1000 // x is considered as Int, the Boolean declaration is ignored - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(b:Byte) => - | val y:BigInt = if (x >= 1000 || b == 10) 10000.toBigInt else 20000.toBigInt // z is BigInt - | val z:BigInt = if (x >= 1000 || b == 10) 10000 else 20000 // z is condiered an Int - | g1.exp(y) != g2 - | // g1.exp(z) != g2 // uncommenting this causes an error - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("large loop: binary operations") { - val check = - s""" - | val T1 = i == 2; - | val T2 = (i * 3) % 5 == 1 - | val T3 = true - | val T4 = T1 ^ false - | val F1 = T1 ^ T2 - | val F2 = T4 && F1 - | val F3 = F1 || F2 - | val F4 = (F1 ^ F3) || (T4 ^ T3) - | T1 && T2 && T3 && T4 && (! (F1 ^ F2)) && (! F3) && (! F4) - """ - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - - property("complex sigma proposition") { - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> "proveDLog"), - s"""{ - | (alice && alice2) || (alice && alice3) || (alice && alice4) || (alice && alice3) || - | allOf(Coll(alice, alice2, alice3 ,alice4)) || atLeast(2, Coll(alice, alice2, alice3 ,alice4)) || - | anyOf(Coll(alice, alice2, alice3 ,alice4)) || atLeast(3, Coll(alice, alice2, alice3 ,alice4)) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("ring signature") { - val publicImages = alice.dlogSecrets.head.publicImage +: (1 to 1000).map { _ => - new ContextEnrichingTestProvingInterpreter().dlogSecrets.head.publicImage - } - // should not consume too much time for valid number of keys in a ring - checkScript(OR(publicImages.take(35).map(image => SigmaPropConstant(image).isProven)).toSigmaProp, emptyProofs = false) - } - - // todo construct transaction with at least one output and check the same properties for outputs - property("large loop: INPUTS.propositionBytes.size") { - val check = "INPUTS.exists({(x:Box) => x.propositionBytes.size >= 0})" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> check), - s"""{ - | OUTPUTS(0).R8[Coll[Byte]].get.forall({(i:Byte) => - | $check - | }) - |} - """.stripMargin).asBoolValue.toSigmaProp) - } - - property("nested blake2b256") { - val nestedBlake = genNestedScript("OUTPUTS(0).R8[Coll[Byte]].get", "blake2b256(", ")", 100) - val script = s"$nestedBlake != OUTPUTS(0).R8[Coll[Byte]].get" - checkScript(compile(maxSizeCollEnv + (ScriptNameProp -> script), script).asBoolValue.toSigmaProp) - } - - property("nested loops 1") { - val largeColl: Coll[Int] = Colls.fromArray((0 until 500).toArray) - val env = Map( - ScriptNameProp -> "nested loops 1", - "largeColl" -> largeColl - ) - val spamScript = compile(env, - """{ - | val valid = largeColl.forall({(i:Int) => - | largeColl.exists({(j:Int) => - | i != j - | } - | ) && - | largeColl.exists({(j:Int) => - | largeColl.forall({(k:Int) => - | k != i + j - | } - | ) && - | i != j - | } - | ) && - | OUTPUTS.exists({(x:Box) => - | x.propositionBytes.size >= i - | } - | ) - | } - | ) - | ! valid - |} - """.stripMargin).asBoolValue.toSigmaProp - - checkScript(spamScript) - } - - property("nested loops 2") { - val largeColl = Colls.fromArray((1 to 65).toArray) - val env = Map( - ScriptNameProp -> "nested loops 2", - "alice" -> alice.dlogSecrets.head.publicImage, - "largeColl" -> largeColl - ) - val spamScript = compile(env, - """{ - | val valid = largeColl.forall({(i:Int) => - | largeColl.exists({(j:Int) => - | largeColl.forall({(k:Int) => - | k != i + j - | } - | ) - | } - | ) - | } - | ) - | alice && valid - |} - """.stripMargin).asBoolValue.toSigmaProp - - checkScript(spamScript) - } - - property("large num of inputs 1") { - // runtime is high, test failing - /* - TIMEOUT IS 2150 - TIME IS 598 - */ - val alice = new ContextEnrichingTestProvingInterpreter - val alicePubKey: ProveDlog = alice.dlogSecrets.head.publicImage - val env = Map( - ScriptNameProp -> "Script", - "alice" -> alicePubKey, - "minNumInputs" -> NumInputs, - "minNumOutputs" -> NumOutputs - ) - val spamScript = compile(env, - """{ - | val valid = INPUTS.exists({(ib:Box) => - | OUTPUTS.exists({(ob:Box) => - | OUTPUTS.exists({(ob2:Box) => - | val ib_r4 = ib.R4[Byte].get - | val ib_r5 = ib.R5[SigmaProp].get - | val ib_r6 = ib.R6[Int].get - | val ib_r7 = ib.R7[Coll[Long]].get - | val ib_r8 = ib.R8[Coll[Byte]].get - | val ob_r4 = ob.R4[Byte].get - | val ob_r5 = ob.R5[SigmaProp].get - | val ob_r6 = ob.R6[Int].get - | val ob_r7 = ob.R7[Coll[Long]].get - | val ob_r8 = ob.R8[Coll[Byte]].get - | val ob2_r4 = ob2.R4[Byte].get - | val ob2_r5 = ob2.R5[SigmaProp].get - | val ob2_r6 = ob2.R6[Int].get - | val ob2_r7 = ob2.R7[Coll[Long]].get - | val ob2_r8 = ob2.R8[Coll[Byte]].get - | ib.propositionBytes == ob.propositionBytes && ob2.propositionBytes.size <= SELF.propositionBytes.size && - | ib_r4 == ob_r4 && ob_r4 == ob2_r4 && - | ib_r5 == ob_r5 && ob_r5 == ob2_r5 && - | ib_r6 == ob_r6 && ob_r6 == ob2_r6 && - | ib_r7 == ob_r7 && ob_r7 == ob2_r7 && - | ib_r8 == ob_r8 && ob_r8 == ob2_r8 - | } - | ) - | } - | ) - | } - | ) && INPUTS.size >= minNumInputs && OUTPUTS.size >= minNumOutputs - | alice && !valid - |} - """.stripMargin).asBoolValue.toSigmaProp - - val output = ErgoBox(1, alicePubKey, 10, Nil, - Map( - R4 -> ByteConstant(1), - R5 -> SigmaPropConstant(alicePubKey), - R6 -> IntConstant(10), - R7 -> LongArrayConstant(Longs), - R8 -> ByteArrayConstant(Base16.decode("123456123456123456123456123456123456123456123456123456123456123456").get), - ) - ) - - val input = ErgoBox(1, spamScript, 10, Nil, - Map( - R4 -> ByteConstant(1), - R5 -> SigmaPropConstant(alicePubKey), - R6 -> IntConstant(10), - R7 -> LongArrayConstant(Longs), - R8 -> ByteArrayConstant(Base16.decode("123456123456123456123456123456123456123456123456123456123456123456").get) - ) - ) - val outBoxes: IndexedSeq[ErgoBoxCandidate] = IndexedSeq.fill(NumOutputs)(output) - val inBoxes: IndexedSeq[ErgoBox] = IndexedSeq.fill(NumOutputs)(input) - //normally this transaction would invalid (why?), but we're not checking it in this test - val tx = createTransaction(outBoxes) - - val context = ErgoLikeContext( - currentHeight = 10, - lastBlockUtxoRoot = AvlTreeData.dummy, - minerPubkey = ErgoLikeContext.dummyPubkey, - boxesToSpend = inBoxes, - spendingTransaction = tx, - self = inBoxes(0) // what is the use of self? - ) - - - val prover = new ContextEnrichingTestProvingInterpreter() - - assertExceptionThrown({ - val pr = prover - .withSecrets(alice.dlogSecrets) - .prove(emptyEnv + (ScriptNameProp -> "prove"), - spamScript, context, fakeMessage).get - - val verifier = new ErgoLikeTestInterpreter - val (res, terminated) = termination(() => - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), spamScript, context, pr, fakeMessage) - ) - res.isFailure shouldBe true - terminated shouldBe true - }, { - case ve: ValidationException if ve.rule.id == CheckLoopLevelInCostFunction.id => true - case _ => false - }) - - } - - property("large num of inputs 2") { - /* - Also failing, but time is high - - TIMEOUT IS 2062 - TIME IS 1476 - - */ - val alice = new ContextEnrichingTestProvingInterpreter - val alicePubKey: ProveDlog = alice.dlogSecrets.head.publicImage - val minNumInputs = 60 - val minNumOutputs = 60 - val env = Map( - ScriptNameProp -> "Script", - "alice" -> alicePubKey, - "minNumInputs" -> minNumInputs, - "minNumOutputs" -> minNumOutputs - ) - val spamScript = compile(env, - """{ - | val valid = INPUTS.exists({(ib:Box) => - | OUTPUTS.exists({(ob:Box) => - | val ib_r4 = ib.R4[Byte].get - | val ib_r5 = ib.R5[SigmaProp].get - | val ib_r6 = ib.R6[Int].get - | val ib_r7 = ib.R7[Coll[Long]].get - | val ib_r8 = ib.R8[Coll[Byte]].get - | val ib_r9 = ib.R9[Coll[Coll[Byte]]].get - | val ob_r4 = ob.R4[Byte].get - | val ob_r5 = ob.R5[SigmaProp].get - | val ob_r6 = ob.R6[Int].get - | val ob_r7 = ob.R7[Coll[Long]].get - | val ob_r8 = ob.R8[Coll[Byte]].get - | val ob_r9 = ob.R9[Coll[Coll[Byte]]].get - | ib.propositionBytes == ob.propositionBytes && ob.propositionBytes.size <= SELF.propositionBytes.size && - | ib_r4 == ob_r4 && - | ib_r5 == ob_r5 && - | ib_r6 == ob_r6 && - | ib_r7 == ob_r7 && - | ib_r8 == ob_r8 && - | ib_r9 != ob_r9 - | } - | ) - | } - | ) && INPUTS.size >= minNumInputs && OUTPUTS.size >= minNumOutputs - | alice && !valid - |} - """.stripMargin).asBoolValue.toSigmaProp - - val collCollByte = Colls.fromItems(Colls.fromArray((1 to 100).map(_.toByte).toArray)) - val longs = (1 to 100).map(_.toLong).toArray - - object ByteArrayArrayConstant { - def apply(value: Coll[Coll[Byte]]): CollectionConstant[SByteArray.type] = CollectionConstant[SByteArray.type](value, SByteArray) - } - - - val output = ErgoBox(1, alicePubKey, 10, Nil, - Map( - R4 -> ByteConstant(1), - R5 -> SigmaPropConstant(alicePubKey), - R6 -> IntConstant(10), - R7 -> LongArrayConstant(longs), - R8 -> ByteArrayConstant(Base16.decode("123456123456123456123456123456123456123456123456123456123456123456").get), - R9 -> ByteArrayArrayConstant(collCollByte) - ) - ) - - val input = ErgoBox(1, spamScript, 10, Nil, - Map( - R4 -> ByteConstant(1), - R5 -> SigmaPropConstant(alicePubKey), - R6 -> IntConstant(10), - R7 -> LongArrayConstant(longs), - R8 -> ByteArrayConstant(Base16.decode("123456123456123456123456123456123456123456123456123456123456123456").get), - R9 -> ByteArrayArrayConstant(collCollByte) - ) - ) - val outBoxes: IndexedSeq[ErgoBoxCandidate] = IndexedSeq.fill(minNumOutputs)(output) - val inBoxes: IndexedSeq[ErgoBox] = IndexedSeq.fill(minNumOutputs)(input) - //normally this transaction would invalid (why?), but we're not checking it in this test - val tx = createTransaction(outBoxes) - - val context = ErgoLikeContext( - currentHeight = 10, - lastBlockUtxoRoot = AvlTreeData.dummy, - minerPubkey = ErgoLikeContext.dummyPubkey, - boxesToSpend = inBoxes, - spendingTransaction = tx, - self = inBoxes(0) // what is the use of self? - ) - - - assertExceptionThrown({ - val prover = new ContextEnrichingTestProvingInterpreter() - - val pr = prover.withSecrets(alice.dlogSecrets).prove(emptyEnv + (ScriptNameProp -> "prove"), spamScript, context, fakeMessage).get - - val verifier = new ErgoLikeTestInterpreter - val (res, terminated) = termination(() => - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), spamScript, context, pr, fakeMessage) - ) - res.isFailure shouldBe true - terminated shouldBe true - }, - { - case ve: ValidationException if ve.rule.id == CheckLoopLevelInCostFunction.id => true - case _ => false - }) - } - - property("huge byte array") { - //TODO coverage: make value dependent on CostTable constants, not magic constant - val ba = Random.randomBytes(10000000) - - val id = 11: Byte - val id2 = 12: Byte - - val prover = new ContextEnrichingTestProvingInterpreter() - .withContextExtender(id, ByteArrayConstant(ba)) - .withContextExtender(id2, ByteArrayConstant(ba)) - - val spamScript = EQ(CalcBlake2b256(GetVarByteArray(id).get), CalcBlake2b256(GetVarByteArray(id2).get)).toSigmaProp - - val ctx = ErgoLikeContext.dummy(fakeSelf) - - val pr = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), spamScript, ctx.withCostLimit(CostTable.ScriptLimit * 10), fakeMessage).get - - val verifier = new ErgoLikeTestInterpreter - val (res, terminated) = termination(() => - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), spamScript, ctx, pr, fakeMessage) - ) - - res.isFailure shouldBe true - terminated shouldBe true - } - - /** This case verifies behavior of script interpreter when given enormously deep tree. - * Below it is at least 150 levels. - * When transaction is validated the script is deserialized for execution. - * It should be checked by deserializer for it's depth. - * The scripts with more than 150 levels are considered malicious. - */ - property("big byte array with a lot of operations") { - - val ba = Random.randomBytes(5000000) - - val id = 21: Byte - - val prover = new ContextEnrichingTestProvingInterpreter().withContextExtender(id, ByteArrayConstant(ba)) - - val bigSubScript = (1 to 100).foldLeft(CalcBlake2b256(GetVarByteArray(id).get)) { case (script, _) => - CalcBlake2b256(script) - } - - val spamScript = NEQ(bigSubScript, CalcBlake2b256(ByteArrayConstant(Array.fill(32)(0: Byte)))).toSigmaProp - - val ctx = ErgoLikeContext.dummy(fakeSelf).withCostLimit(CostTable.ScriptLimit * 10) - - val prt = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), spamScript, ctx, fakeMessage) - prt.isSuccess shouldBe true - - val pr = prt.get - - val ctxv = ctx.withExtension(pr.extension) - - val verifier = new ErgoLikeTestInterpreter - val (_, terminated) = termination(() => - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), spamScript, ctxv, pr.proof, fakeMessage) - ) - terminated shouldBe true - } - - property("ring signature - maximum ok ring size") { - val prover = new ContextEnrichingTestProvingInterpreter() - val verifier = new ErgoLikeTestInterpreter - val secret = prover.dlogSecrets.head - - val simulated = (1 to 50).map { _ => - new ContextEnrichingTestProvingInterpreter().dlogSecrets.head.publicImage - } - - val ctx = ErgoLikeContext.dummy(fakeSelf).withCostLimit(CostTable.ScriptLimit * 2) - - val publicImages = secret.publicImage +: simulated - val prop = OR(publicImages.map(image => SigmaPropConstant(image).isProven)).toSigmaProp - - val proof = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), prop, ctx, fakeMessage).get - - val (_, terminated) = termination(() => - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), prop, ctx, proof, fakeMessage) - ) - terminated shouldBe true - } - - property("transaction with many outputs") { - forAll(Gen.choose(10, 200), Gen.choose(200, 5000)) { case (orCnt, outCnt) => - whenever(orCnt > 10 && outCnt > 200) { - val orCnt = 10 - val outCnt = 5 - val prover = new ContextEnrichingTestProvingInterpreter() - - val propToCompare = OR((1 to orCnt).map(_ => EQ(LongConstant(6), LongConstant(5)))).toSigmaProp - - val spamProp = OR((1 until orCnt).map(_ => EQ(LongConstant(6), LongConstant(5))) :+ - EQ(LongConstant(6), LongConstant(6))).toSigmaProp - - val spamScript = - Exists(Outputs, - FuncValue(Vector((1, SBox)), - AND( - GE(ExtractAmount(ValUse(1, SBox)), LongConstant(10)), - EQ( - ExtractScriptBytes(ValUse(1, SBox)), - ByteArrayConstant(propToCompare.treeWithSegregation.bytes)) - ) - ) - ).toSigmaProp - - val txOutputs = ((1 to outCnt) map (_ => ErgoBox(11, spamProp, 0))) :+ ErgoBox(11, propToCompare, 0) - val tx = createTransaction(txOutputs) - - val ctx = ErgoLikeContext.dummy(createBox(0, propToCompare)) - .withTransaction(tx) - .withCostLimit(CostTable.ScriptLimit * 1000000L) - - val pt0 = System.currentTimeMillis() - val proof = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), spamScript, ctx, fakeMessage).get - val pt = System.currentTimeMillis() - println(s"Prover time: ${(pt - pt0) / 1000.0} seconds") - - val verifier = new ErgoLikeTestInterpreter - val (_, terminated) = termination(() => - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), spamScript, ctx, proof, fakeMessage)) - terminated shouldBe true - } - } - } - - def printTime(name: String, t: Long) = { - println(s"$name: ${t / 1000.0} seconds") - } - - def runSpam(name: String, ctx: ErgoLikeContext, prop: ErgoTree, genProofs: Boolean = false, doUnlimitedRun: Boolean = false) - (prover: ErgoLikeTestProvingInterpreter, verifier: ErgoLikeTestInterpreter): Unit = { - printTime("Timeout", Timeout) - - val ctxWithoutLimit = ctx.withCostLimit(Long.MaxValue) - val pr = if (!genProofs) { - // do not spend time to create a proof - CostedProverResult(Array[Byte](), ContextExtension.empty, 0L) - } else { - val (pr, proveTime) = measureTime( - prover.prove(emptyEnv + (ScriptNameProp -> (name + "_prove")), prop, ctxWithoutLimit, fakeMessage).get - ) - printTime("Proof Generation Time", proveTime) - pr - } - - // check that execution terminated within timeout due to costing exception and cost limit - val (res, verifyTime) = measureTime( - verifier.verify(emptyEnv + (ScriptNameProp -> (name + "_reject_verify")), prop, ctx, pr, fakeMessage) - ) - printTime("Verifier reject time", verifyTime) - - assert(verifyTime < Timeout, s"Script rejection time $verifyTime is longer than timeout $Timeout") - -// assertExceptionThrown( -// res.fold(t => throw t, identity), -// { -// case ce: CostLimitException => true -// case se: verifier.IR.StagingException => -// val cause = rootCause(se) -// println(s"Rejection cause: $cause") -// cause.isInstanceOf[CostLimitException] && cause.getMessage.contains("Estimated execution cost") -// case _ => false -// } -// ) - - // measure time required to fully execute the script itself and check it is more then Timeout - // this is necessary to nurture a more realistic suite of test cases - if (doUnlimitedRun) { - val (_, calcTime) = measureTime { - verifier.verify(emptyEnv + (ScriptNameProp -> (name + "_full_verify")), prop, ctxWithoutLimit, pr, fakeMessage) - } - printTime("Full time to verify", calcTime) - if (calcTime < Timeout) - println(s"Script full execution time $calcTime is less than timeout $Timeout") - } - } - - property("transaction with many inputs and outputs") { - assert(warmUpPrecondition) - implicit lazy val IR = new TestingIRContext { - override val okPrintEvaluatedEntries = false - } - val prover = new ContextEnrichingTestProvingInterpreter() - val verifier = new ContextEnrichingTestProvingInterpreter() - - (100 to(500, 100)) foreach { n => - println(s"Scale: $n") - - val prop = Exists(Inputs, - FuncValue(Vector((1, SBox)), - Exists(Outputs, - FuncValue(Vector((2, SBox)), - EQ(ExtractScriptBytes(ValUse(1, SBox)), - ExtractScriptBytes(ValUse(2, SBox))))))).toSigmaProp - val (measuredTree, scriptSize) = measuredScriptAndSize(prop).get - - val inputScript = OR((1 to 200).map(_ => EQ(LongConstant(6), LongConstant(5)))).toSigmaProp - val outputScript = OR((1 to 200).map(_ => EQ(LongConstant(6), LongConstant(6)))).toSigmaProp - - val inputs = ErgoBox(11, measuredTree, 0) +: // the box we are going to spend - ((1 to n) map (_ => ErgoBox(11, inputScript, 0))) :+ // non equal boxes - ErgoBox(11, outputScript, 0) // the last one is equal to output - - val outputs = (1 to n) map (_ => ErgoBox(11, outputScript, 0)) - - // force propositionBytes computation here (to exclude it from verify time) - inputs.foreach {in => in.propositionBytes } - outputs.foreach {out => out.propositionBytes } - - val tx = createTransaction(outputs) - - - val initCost = initializationCost(scriptSize) - val ctx = new ErgoLikeContext(currentHeight = 0, - lastBlockUtxoRoot = AvlTreeData.dummy, - minerPubkey = ErgoLikeContext.dummyPubkey, - dataBoxes = ErgoLikeContext.noBoxes, - headers = ErgoLikeContext.noHeaders, - preHeader = ErgoLikeContext.dummyPreHeader, - boxesToSpend = inputs, - spendingTransaction = tx, - self = inputs(0), - extension = ContextExtension.empty, - validationSettings = ValidationRules.currentSettings, - costLimit = CostLimit, - initCost = initCost) - - runSpam("t1", ctx, measuredTree, false, false)(prover, verifier) - println("----------------------") - } - } - - property("too heavy avl tree lookup") { - val reg1 = ErgoBox.nonMandatoryRegisters.head - - def genKey(str: String): ADKey = ADKey @@ Blake2b256("key: " + str) - - def genValue(str: String): ADValue = ADValue @@ Blake2b256("val: " + str) - - implicit lazy val IR = new TestingIRContext { - override val okPrintEvaluatedEntries = false - } - val prover = new ContextEnrichingTestProvingInterpreter() - val verifier = new ErgoLikeTestInterpreter - - val pubkey = prover.dlogSecrets.head.publicImage - - val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) - - (1 to 100000).foreach { i => - avlProver.performOneOperation(Insert(genKey(s"key$i"), genValue(s"value$i"))) - } - avlProver.generateProof() - - val digest = avlProver.digest - - (1 to 100000).foreach { i => - avlProver.performOneOperation(Lookup(genKey(s"key$i"))) - } - - val proof = avlProver.generateProof() - - println("proof size: " + proof.length) - - val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) - - val key1 = genKey("key1") - val value1 = genValue("value1") - - val prop = ErgoTree(ErgoTree.DefaultHeader, ErgoTree.EmptyConstants, - EQ( - IR.builder.mkMethodCall( - ExtractRegisterAs[SAvlTree.type](Self, reg1).get, - SAvlTree.getMethod, - IndexedSeq(ByteArrayConstant(key1), ByteArrayConstant(proof))).asOption[SByteArray].get, - ByteArrayConstant(value1) - ).toSigmaProp - ) - - val newBox1 = ErgoBox(10, pubkey, 0) - val newBoxes = IndexedSeq(newBox1) - - val spendingTransaction = createTransaction(newBoxes) - - val s = ErgoBox(20, ErgoScriptPredef.TrueProp, 0, Seq(), Map(reg1 -> AvlTreeConstant(treeData))) - - val ctx = ErgoLikeContext( - currentHeight = 50, - lastBlockUtxoRoot = AvlTreeData.dummy, - minerPubkey = ErgoLikeContext.dummyPubkey, - boxesToSpend = IndexedSeq(s), - spendingTransaction, - self = s) - - // val pr = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), prop, ctx.withCostLimit(Long.MaxValue), fakeMessage).get - // println("Cost: " + pr.cost) - // verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), prop, ctx, pr, fakeMessage).isFailure shouldBe true - runSpam("avltree", ctx, prop)(prover, verifier) - } - - property("nested loops") { - val alice = new ContextEnrichingTestProvingInterpreter - val alicePubKey: ProveDlog = alice.dlogSecrets.head.publicImage - val largeColl = Colls.fromArray((1 to 50).toArray) - val env = Map( - ScriptNameProp -> "nested loops", - "alice" -> alicePubKey, - "largeColl" -> largeColl - ) - val spamScript = compile(env, - """{ - | val valid = largeColl.forall({(i:Int) => - | largeColl.exists({(j:Int) => - | i != j - | } - | ) && - | largeColl.exists({(j:Int) => - | largeColl.forall({(k:Int) => - | k != i + j - | } - | ) && - | i != j - | } - | ) && - | OUTPUTS.exists({(x:Box) => - | x.propositionBytes.size >= i - | } - | ) - | } - | ) - | ! valid - |} - """.stripMargin).asBoolValue.toSigmaProp - - //todo: make value dependent on CostTable constants, not magic constant - val ba = Random.randomBytes(10000000) - - val id = 11: Byte - val id2 = 12: Byte - - val prover = new ContextEnrichingTestProvingInterpreter() - .withContextExtender(id, ByteArrayConstant(ba)) - .withContextExtender(id2, ByteArrayConstant(ba)) - - //val spamScript = EQ(CalcBlake2b256(GetVarByteArray(id).get), CalcBlake2b256(GetVarByteArray(id2).get)).toSigmaProp - - val ctx = ErgoLikeContext.dummy(fakeSelf) - - val pr = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), spamScript, ctx.withCostLimit(Long.MaxValue), fakeMessage).get - - val verifier = new ErgoLikeTestInterpreter - val (_, calcTime) = measureTime { - verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), spamScript, ctx, pr, fakeMessage) - } - println(s"calc time: $calcTime millis") - calcTime < Timeout shouldBe true - } - - @tailrec - private def genNestedScript(current: String, left: String, right: String, i: Int): String = if (i > 0) { - genNestedScript(s"$left$current$right", left, right, i - 1) - } else { - current - } - -} \ No newline at end of file diff --git a/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala b/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala index 7ac111e76f..dc0543e29b 100644 --- a/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala @@ -156,14 +156,16 @@ class CoinEmissionSpecification extends SigmaTestingCommons with ScorexLogging { ut.toSigned(IndexedSeq(proverResult)) } - val st = System.currentTimeMillis() + var st = System.currentTimeMillis() def chainGen(state: ValidationState, emissionBox: ErgoBox, height: Int, hLimit: Int): Unit = if (height < hLimit) { - if (height % 1000 == 0) { - println(s"block $height in ${System.currentTimeMillis() - st} ms, ${emissionBox.value} coins remain") + if (height % 100 == 0) { + val t = System.currentTimeMillis() + println(s"block $height in ${t - st} ms, ${emissionBox.value} coins remain") + st = t IR.resetContext() } val tx = genCoinbaseLikeTransaction(state, emissionBox, height) diff --git a/src/test/scala/special/sigma/SigmaDslTest.scala b/src/test/scala/special/sigma/SigmaDslTest.scala index 131ee847c6..15599989ed 100644 --- a/src/test/scala/special/sigma/SigmaDslTest.scala +++ b/src/test/scala/special/sigma/SigmaDslTest.scala @@ -993,4 +993,41 @@ class SigmaDslTest extends PropSpec forAll { x: (Boolean, Boolean) => eq(x) } } + property("lazy || and && boolean equivalence") { + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => x || (1 / 0 == 1) }")) + { (x: Boolean) => x || (1 / 0 == 1) }(true) + + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => x && (1 / 0 == 1) }")) + { (x: Boolean) => x && (1 / 0 == 1) }(false) + + // nested + + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => x && (x || (1 / 0 == 1)) }")) + { (x: Boolean) => x && (x || (1 / 0 == 1)) }(true) + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => x && (x && (x || (1 / 0 == 1))) }")) + { (x: Boolean) => x && (x && (x || (1 / 0 == 1))) }(true) + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => x && (x && (x && (x || (1 / 0 == 1)))) }")) + { (x: Boolean) => x && (x && (x && (x || (1 / 0 == 1)))) }(true) + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => !(!x && (1 / 0 == 1)) && (x || (1 / 0 == 1)) }")) + { (x: Boolean) => !(!x && (1 / 0 == 1)) && (x || (1 / 0 == 1)) }(true) + + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => (x || (1 / 0 == 1)) && x }")) + { (x: Boolean) => (x || (1 / 0 == 1)) && x }(true) + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => (x || (1 / 0 == 1)) && (x || (1 / 0 == 1)) }")) + { (x: Boolean) => (x || (1 / 0 == 1)) && (x || (1 / 0 == 1)) }(true) + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => (!(!x && (1 / 0 == 1)) || (1 / 0 == 0)) && (x || (1 / 0 == 1)) }")) + { (x: Boolean) => (!(!x && (1 / 0 == 1)) || (1 / 0 == 0)) && (x || (1 / 0 == 1)) }(true) + checkEq(func[Boolean, Boolean]( + "{ (x: Boolean) => (!(!x && (1 / 0 == 1)) || (1 / 0 == 0)) && (!(!x && (1 / 0 == 1)) || (1 / 0 == 1)) }")) + { (x: Boolean) => (!(!x && (1 / 0 == 1)) || (1 / 0 == 0)) && (!(!x && (1 / 0 == 1)) || (1 / 0 == 1)) }(true) + } }