| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751 |
- /= nock-common /common/nock-common
- /= * /common/zeke
- ::
- => :* stark-engine
- nock-common=nock-common
- ==
- ~% %stark-verifier ..stark-engine-jet-hook ~
- |%
- :: copied from sur/verifier.hoon because of => stark-engine
- +$ verify-result [commitment=noun-digest:tip5 nonce=noun-digest:tip5]
- +$ elem-list (list [idx=@ trace-elems=(list belt) comp-elems=(list felt) deep-elem=felt])
- ::
- ++ verify
- =| test-mode=_|
- |= [=proof override=(unit (list term)) verifier-eny=@]
- ^- ?
- =/ args [proof override verifier-eny test-mode]
- -:(mule |.((verify-inner args)))
- ::
- ++ verify-inner
- ~/ %verify-inner
- |= [=proof override=(unit (list term)) verifier-eny=@ test-mode=?]
- ^- verify-result
- ?> =(~ hashes.proof)
- =^ puzzle proof
- =^(c proof ~(pull proof-stream proof) ?>(?=(%puzzle -.c) c^proof))
- =/ [s=* f=*] (puzzle-nock commitment.puzzle nonce.puzzle len.puzzle)
- ::
- :: get computation in raw noun form
- ?> (based-noun p.puzzle)
- ::
- =. table-names %- sort
- :_ t-order
- ?~ override
- gen-table-names:nock-common
- u.override
- ::~& table-names+table-names
- ::
- ::
- :: compute dynamic table widths
- =. table-base-widths (compute-base-widths override)
- =. table-full-widths (compute-full-widths override)
- ::
- :: get table heights
- =^ heights proof
- =^(h proof ~(pull proof-stream proof) ?>(?=(%heights -.h) p.h^proof))
- =/ num-tables (lent heights)
- ?> =(num-tables (lent core-table-names:nock-common))
- ::~& table-heights+heights
- ::
- =/ c constraints
- =/ pre=preprocess-0 prep.stark-config
- ::
- ::
- :: remove preprocess data for unused tables
- =. pre
- (remove-unused-constraints:nock-common pre table-names override)
- ::
- =/ clc ~(. calc heights cd.pre)
- ::
- :: verify size of proof
- =/ expected-num-proof-items=@ud
- ;: add
- ::
- :: number of static items in proof-data
- ::
- 10
- ::
- :: number of items written by FRI
- ::
- %+ add num-rounds:fri:clc
- (mul num-rounds:fri:clc num-spot-checks:fri:clc)
- ::
- :: number of merkle lookups into the deep codeword
- ::
- (mul 4 num-spot-checks:fri:clc)
- ==
- =/ actual-num-proof-items=@ud (lent objects.proof)
- ?> =(expected-num-proof-items actual-num-proof-items)
- ::
- :: get merkle root of base tables
- =^ base-root proof
- =^(b proof ~(pull proof-stream proof) ?>(?=(%m-root -.b) p.b^proof))
- ::
- =/ rng ~(verifier-fiat-shamir proof-stream proof)
- ::
- :: get coefficients for table extensions
- :: challenges: a, b, c, alpha
- =^ chals-rd1=(list belt) rng (belts:rng num-chals-rd1:chal)
- ::
- :: get merkle root of extension tables
- =^ ext-root proof
- =^(e proof ~(pull proof-stream proof) ?>(?=(%m-root -.e) p.e^proof))
- ::
- =. rng ~(verifier-fiat-shamir proof-stream proof)
- ::
- :: get coefficients for table extensions
- :: challenges: beta, z
- =^ chals-rd2=(list belt) rng (belts:rng num-chals-rd2:chal)
- =/ challenges (weld chals-rd1 chals-rd2)
- =/ chal-map=(map term belt)
- (bp-zip-chals-list:chal chal-names-basic:chal challenges)
- ::
- =/ [alf=pelt a=pelt b=pelt c=pelt d=pelt z=pelt]
- :* (got-pelt chal-map %alf)
- (got-pelt chal-map %a)
- (got-pelt chal-map %b)
- (got-pelt chal-map %c)
- (got-pelt chal-map %d)
- (got-pelt chal-map %z)
- ==
- ::
- =/ subj-data
- (build-tree-data:fock s alf)
- =/ form-data
- (build-tree-data:fock f alf)
- =/ prod-data
- (build-tree-data:fock p.puzzle alf)
- ::
- :: get merkle root of mega-extension tables
- =^ mega-ext-root proof
- =^(m proof ~(pull proof-stream proof) ?>(?=(%m-root -.m) p.m^proof))
- ::
- =. rng ~(verifier-fiat-shamir proof-stream proof)
- ::
- :: get terminals
- =^ terminals proof
- =^(t proof ~(pull proof-stream proof) ?>(?=(%terms -.t) p.t^proof))
- ::
- :: verify that len.terminals is as expected
- ?. .= len.terminals
- %- lent
- %+ roll all-terminal-names:nock-common
- |= [terms=(list term) acc=(list term)]
- ^- (list term)
- (weld acc terms)
- ~& "len.terminals is wrong"
- !!
- ::
- :: verify that len.terminals is the length of the data buffer of the bpoly
- ?. ~(chck bop terminals)
- ~& "len.terminals is not equal to the data buffer"
- !!
- ::
- =. rng ~(verifier-fiat-shamir proof-stream proof)
- ::
- =/ [terminal-map=(map term belt) dyn-map=(map @ bpoly)]
- =- [term-map dyn-map]
- %+ roll all-terminal-names:nock-common
- |= [terms=(list term) table-num=@ idx=@ term-map=(map term belt) dyn-map=(map @ bpoly)]
- :^ +(table-num)
- (add idx (lent terms))
- %- ~(gas by term-map)
- %+ iturn terms
- |= [i=@ t=term]
- [t (~(snag bop terminals) (add idx i))]
- %- ~(put by dyn-map)
- [table-num (~(swag bop terminals) idx (lent terms))]
- ::
- ?. (linking-checks subj-data form-data prod-data a b c d z terminal-map)
- ~& "failed input linking checks" !!
- ::
- ::
- ::
- :: We now use the randomness to compute the expected fingerprints of the compute stack and product stack based on the given [s f] and product, respectively.
- :: We then dynamically generate constraints that force the cs and ps to be equivalent to the expected fingerprints.
- :: As long as the prover replicates this exact protocol, the opened indicies should match up.
- :: The boundary constraint then ensures that the computation in cleartext is linked to the computation in the trace.
- ::
- :: generate scalars for the random linear combination of the composition polynomial
- =/ num-constraints=@
- %+ roll (range num-tables)
- |= [i=@ acc=@]
- =/ cs (~(got by count-map.pre) i)
- ;: add
- acc
- boundary.cs
- row.cs
- transition.cs
- terminal.cs
- ==
- =^ comp-weights=bpoly rng
- =^ belts rng (belts:rng (mul 2 num-constraints))
- [(init-bpoly belts) rng]
- =/ composition-chals=(map @ bpoly)
- %- ~(gas by *(map @ bpoly))
- =- -<
- %+ roll (range num-tables)
- |= [i=@ acc=(list [@ bpoly]) num=@]
- =/ cs (~(got by count-map.pre) i)
- =/ num-constraints=@
- ;: add
- boundary.cs
- row.cs
- transition.cs
- terminal.cs
- ==
- :_ (add num (mul 2 num-constraints))
- [[i (~(scag bop (~(slag bop comp-weights) num)) (mul 2 num-constraints))] acc]
- ::~& max-degree+max-degree:clc
- ::
- :: read the composition piece codewords
- =^ comp-root proof
- =^(c proof ~(pull proof-stream proof) ?>(?=(%comp-m -.c) [p.c num.c]^proof))
- ::
- =. rng ~(verifier-fiat-shamir proof-stream proof)
- ::
- :: generate the DEEP challenge
- =^ deep-challenge=felt rng
- =^ deep-candidate=felt rng $:felt:rng
- =/ n fri-domain-len:clc
- =/ exp-offset (lift (bpow generator:stark-engine n))
- |-
- =/ exp-deep-can (fpow deep-candidate n)
- :: reject if deep-candidate is in the evaluation domain H or FRI domain D
- ?. ?|(=(exp-deep-can f1) =(exp-deep-can exp-offset))
- [deep-candidate rng]
- =^ felt rng $:felt:rng
- $(deep-candidate felt)
- ::
- :: read the trace evaluations at the DEEP challenge point
- =^ trace-evaluations=fpoly proof
- =^(t proof ~(pull proof-stream proof) ?>(?=(%evals -.t) p.t^proof))
- ::
- :: check that the size of the evaluations is exactly twice the total number of
- :: columns across all tables
- =/ total-cols=@
- %+ roll table-full-widths
- |= [w=@ acc=@]
- (add w acc)
- ?> =(len.trace-evaluations (mul 2 total-cols))
- ?> ~(chck fop trace-evaluations)
- ::
- :: read the composition piece evaluations at the DEEP challenge point
- =^ composition-piece-evaluations=fpoly proof
- =^(c proof ~(pull proof-stream proof) ?>(?=(%evals -.c) p.c^proof))
- ::
- :: check that there are the correct number of composition piece evaluations and no more
- ?. ?& =(len.composition-piece-evaluations +.comp-root)
- ~(chck fop composition-piece-evaluations)
- ==
- ~& >> %num-composition-piece-evals-wrong
- !!
- =. rng ~(verifier-fiat-shamir proof-stream proof)
- ::
- :: verify the composition polynomial equals the composition pieces by evaluating each side
- :: at the DEEP challenge point
- =/ composition-eval=felt
- %- eval-composition-poly
- :* trace-evaluations
- heights
- constraint-map.pre
- count-map.pre
- dyn-map
- composition-chals
- challenges
- max-degree:clc
- deep-challenge
- table-full-widths
- s
- f
- ==
- ::
- =/ decomposition-eval=felt
- %+ roll (range +.comp-root)
- |= [i=@ acc=_(lift 0)]
- ~| 'A crash here sometimes indicates that one of the constraints is degree 5 or higher'
- %+ fadd acc
- (fmul (fpow deep-challenge i) (~(snag fop composition-piece-evaluations) i))
- ::
- ?. =(composition-eval decomposition-eval)
- ~& %composition-eval-failed
- ~& %invalid-proof !!
- ::~& %composition-eval-passed
- ::
- :: generate random weights for DEEP composition polynomial
- =^ deep-weights=fpoly rng
- =^ felt-list rng
- %- felts:rng
- (add len.trace-evaluations len.composition-piece-evaluations)
- [(init-fpoly felt-list) rng]
- ::
- :: read the merkle root of the DEEP composition polynomial
- =^ deep-root proof
- =^(d proof ~(pull proof-stream proof) ?>(?=(%m-root -.d) p.d^proof))
- ::
- :: verify the DEEP composition polynomial is low degree
- =^ [fri-indices=(list @) merks=(list merk-data:merkle) deep-cosets=(map @ fpoly) fri-res=?] proof
- (verify:fri:clc proof deep-root)
- ::
- ?. =(fri-res %.y)
- ~& %deep-composition-polynomial-is-not-low-degree
- ~& %invalid-proof !!
- ::~& %deep-composition-polynomial-is-low-degree
- ::
- ::
- :: verify the DEEP codeword is actually the codeword of the DEEP composition polynomial by evaluating
- :: it at all the top level FRI points by opening the trace and piece polynomials and comparing with the
- :: deep codeword. This convinces the verifier that it ran FRI on the correct polynomial.
- ::
- :: Open trace and composition piece polynomials at the top level FRI indices
- ::
- =^ [elems=elem-list merk-proofs=(list merk-data:merkle)]
- proof
- %+ roll fri-indices
- |= $: idx=@
- $: l=(list [idx=@ trace-elems=(list belt) comp-elems=(list felt) deep-elem=felt])
- proofs=_merks
- ==
- proof=_proof
- ==
- =/ axis (index-to-axis:merkle (xeb fri-domain-len:clc) idx)
- =^ base-trace-opening proof
- =^(mp proof ~(pull proof-stream proof) ?>(?=(%m-pathbf -.mp) p.mp^proof))
- =^ ext-opening proof
- =^(mp proof ~(pull proof-stream proof) ?>(?=(%m-pathbf -.mp) p.mp^proof))
- =^ mega-ext-opening proof
- =^(mp proof ~(pull proof-stream proof) ?>(?=(%m-pathbf -.mp) p.mp^proof))
- =^ comp-opening proof
- =^(mp proof ~(pull proof-stream proof) ?>(?=(%m-pathbf -.mp) p.mp^proof))
- ::
- =. proofs
- :*
- :* (hash-hashable:tip5 (hashable-bpoly:tip5 leaf.base-trace-opening))
- axis base-root path.base-trace-opening
- ==
- ::
- :* (hash-hashable:tip5 (hashable-bpoly:tip5 leaf.ext-opening))
- axis ext-root path.ext-opening
- ==
- ::
- :* (hash-hashable:tip5 (hashable-bpoly:tip5 leaf.mega-ext-opening))
- axis mega-ext-root path.mega-ext-opening
- ==
- ::
- :* (hash-hashable:tip5 (hashable-bpoly:tip5 leaf.comp-opening))
- axis -.comp-root path.comp-opening
- ==
- ::
- proofs
- ==
- =/ base-elems (bpoly-to-list leaf.base-trace-opening)
- ?> (levy base-elems based)
- =/ ext-elems (bpoly-to-list leaf.ext-opening)
- ?> (levy ext-elems based)
- =/ mega-ext-elems (bpoly-to-list leaf.mega-ext-opening)
- ?> (levy mega-ext-elems based)
- =/ trace-elems=(list belt)
- %- zing
- :: combines base, ext, and mega-ext openings, divided by table
- ^- (list (list belt))
- %- turn :_ weld
- %+ zip-up
- (clev base-elems table-base-widths-static:nock-common)
- %- turn :_ weld
- %+ zip-up
- (clev ext-elems table-ext-widths-static:nock-common)
- (clev mega-ext-elems table-mega-ext-widths-static:nock-common)
- =/ comp-elems (bpoly-to-list leaf.comp-opening)
- ?> (levy comp-elems based)
- ::
- :: The openings to the deep codeword itself were already read out of the proof
- :: during FRI. verify:fri returned deep-cosets=(map @ fpoly), which is all the cosets
- :: read from the deep codeword, keyed by coset-idx. We will use this map to find the
- :: deep codeword point instead of wasting proof space by writing it into the proof twice.
- ::
- =/ coset-idx (mod idx (div fri-domain-len:clc folding-deg:fri:clc))
- =/ entry (div idx (div fri-domain-len:clc folding-deg:fri:clc))
- =/ coset=fpoly (~(got by deep-cosets) coset-idx)
- =/ deep-elem=felt (~(snag fop coset) entry)
- ::
- :_ proof
- :- [[idx trace-elems comp-elems deep-elem] l]
- proofs
- ::
- ?: &(=(test-mode %.n) !(verify-merk-proofs merk-proofs verifier-eny))
- ~& %failed-to-verify-merk-proofs !!
- ::
- :: evaluate DEEP polynomial at the indices
- =/ omega=felt (lift omega:clc)
- =/ eval-res=?
- %+ roll elems
- |= [[idx=@ trace-elems=(list belt) comp-elems=(list belt) deep-elem=felt] acc=?]
- ^- ?
- =/ deep-eval
- %- evaluate-deep
- :* trace-evaluations
- composition-piece-evaluations
- trace-elems
- comp-elems
- +.comp-root
- deep-weights
- heights
- table-full-widths
- omega
- idx
- deep-challenge
- ==
- ~| "DEEP codeword doesn't match evaluation"
- ?> =(deep-eval deep-elem)
- &(acc %.y)
- ~| "DEEP codeword doesn't match evaluation"
- ::
- ?> =(eval-res %.y)
- ::~& %deep-codeword-matches
- ::~& %proof-verified
- [commitment nonce]:puzzle
- ::
- ++ compute-base-widths
- ~/ %compute-base-widths
- |= override=(unit (list term))
- ^- (list @)
- ?~ override
- core-table-base-widths-static:nock-common
- (custom-table-base-widths-static:nock-common table-names)
- ::
- ++ compute-full-widths
- ~/ %compute-full-widths
- |= override=(unit (list term))
- ?~ override
- core-table-full-widths-static:nock-common
- (custom-table-full-widths-static:nock-common table-names)
- ::
- ++ linking-checks
- ~/ %linking-checks
- |= $: s=tree-data f=tree-data p=tree-data
- a=pelt b=pelt c=pelt d=pelt z=pelt
- mp=(map term belt)
- ==
- ^- ?
- =/ ifp-f (compress-pelt ~[a b c] ~[size dyck leaf]:f)
- =/ ifp-s (compress-pelt ~[a b c] ~[size dyck leaf]:s)
- ?&
- =; bool
- ?: bool bool
- ~&("memory table node count input check failed" bool)
- .= ?@ n.s
- z
- (pmul z z)
- (got-pelt mp %memory-nc)
- ::
- =; bool
- ?: bool bool
- ~&("memory table kvs input check failed" bool)
- .= ?@ n.s
- (pmul z (padd ifp-f (pscal 0 d)))
- %+ padd
- (pmul z (padd ifp-s (pscal 1 d)))
- :(pmul z z (padd ifp-f (pscal 0 d)))
- (got-pelt mp %memory-kvs)
- ::
- =; bool
- ?: bool bool
- ~&("compute table subject size input check failed" bool)
- =(size.s (got-pelt mp %compute-s-size))
- ::
- =; bool
- ?: bool bool
- ~&("compute table subject dyck word input check failed" bool)
- .= dyck.s
- (got-pelt mp %compute-s-dyck)
- ::
- =; bool
- ?: bool bool
- ~&("compute table subject leaf vector input check failed" bool)
- =(leaf.s (got-pelt mp %compute-s-leaf))
- ::
- =; bool
- ?: bool bool
- ~&("compute table formula size input check failed" bool)
- =(size.f (got-pelt mp %compute-f-size))
- ::
- =; bool
- ?: bool bool
- ~&("compute table formula dyck word input check failed" bool)
- =(dyck.f (got-pelt mp %compute-f-dyck))
- ::
- =; bool
- ?: bool bool
- ~&("compute table formula leaf vector input check failed" bool)
- =(leaf.f (got-pelt mp %compute-f-leaf))
- ::
- =; bool
- ?: bool bool
- ~&("compute table product size input check failed" bool)
- =(size.p (got-pelt mp %compute-e-size))
- ::
- =; bool
- ?: bool bool
- ~&("compute table product dyck word input check failed" bool)
- =(dyck.p (got-pelt mp %compute-e-dyck))
- ::
- =; bool
- ?: bool bool
- ~&("compute table product leaf vector input check failed" bool)
- =(leaf.p (got-pelt mp %compute-e-leaf))
- ::
- =; bool
- ?: bool bool
- ~&("decode multiset terminal comparison check failed" bool)
- =((got-pelt mp %compute-decode-mset) (got-pelt mp %memory-decode-mset))
- ::
- =; bool
- ?: bool bool
- ~&("Nock 0 multiset terminal comparison check failed" bool)
- =((got-pelt mp %compute-op0-mset) (got-pelt mp %memory-op0-mset))
- ==
- ::
- ++ eval-composition-poly
- ~/ %eval-composition-poly
- |= $: trace-evaluations=fpoly
- heights=(list @)
- constraint-map=(map @ constraints)
- constraint-counts=(map @ constraint-counts)
- dyn-map=(map @ bpoly)
- composition-chals=(map @ bpoly)
- challenges=(list belt)
- max-degree=@
- deep-challenge=felt
- table-full-widths=(list @)
- s=*
- f=*
- ==
- ^- felt
- =/ max-height=@
- %- bex %- xeb %- dec
- (roll heights max)
- =/ dp (degree-processing heights constraint-map)
- =/ boundary-zerofier=felt
- (finv (fsub deep-challenge (lift 1)))
- =/ chal-map=(map @ belt)
- (make-challenge-map:chal challenges s f)
- |^
- =- -<
- %^ zip-roll (range (lent heights)) heights
- |= [[i=@ height=@] acc=_(lift 0) evals=_trace-evaluations]
- =/ width=@ (snag i table-full-widths)
- =/ omicron (lift (ordered-root height))
- =/ last-row (fsub deep-challenge (finv omicron))
- =/ terminal-zerofier (finv last-row) :: f(X)=1/(X-g^{-1})
- =/ chals=bpoly (~(got by composition-chals) i)
- =/ height=@ (snag i heights)
- =/ constraints (~(got by constraint-w-deg-map.dp) i)
- =/ counts (~(got by constraint-counts) i)
- =/ dyns (~(got by dyn-map) i)
- =/ row-zerofier (finv (fsub (fpow deep-challenge height) (lift 1))) :: f(X)=1/(X^N-1)
- =/ transition-zerofier :: f(X)=(X-g^{-1})/(X^N-1)
- (fmul last-row row-zerofier)
- ::
- =/ current-evals=fpoly (~(scag fop evals) (mul 2 width))
- :_ (~(slag fop evals) (mul 2 width))
- ;: fadd
- acc
- ::
- %+ fmul boundary-zerofier
- %- evaluate-constraints
- :* boundary.constraints
- dyns
- chal-map
- current-evals
- (~(scag bop chals) (mul 2 boundary.counts))
- ==
- ::
- %+ fmul row-zerofier
- %- evaluate-constraints
- :* row.constraints
- dyns
- chal-map
- current-evals
- ::
- %+ ~(swag bop chals)
- (mul 2 boundary.counts)
- (mul 2 row.counts)
- ::
- ==
- ::
- %+ fmul transition-zerofier
- %- evaluate-constraints
- :* transition.constraints
- dyns
- chal-map
- current-evals
- ::
- %+ ~(swag bop chals)
- (mul 2 (add boundary.counts row.counts))
- (mul 2 transition.counts)
- ::
- ==
- ::
- %+ fmul terminal-zerofier
- %- evaluate-constraints
- :* terminal.constraints
- dyns
- chal-map
- current-evals
- ::
- %- ~(slag bop chals)
- %+ mul 2
- ;: add
- boundary.counts
- row.counts
- transition.counts
- ==
- ::
- ==
- ==
- ::
- ++ evaluate-constraints
- |= $: constraints=(list [(list @) mp-ultra])
- dyns=bpoly
- chal-map=(map @ belt)
- evals=fpoly
- chals=bpoly
- ==
- ^- felt
- =- acc
- %+ roll constraints
- |= [[degs=(list @) c=mp-ultra] [idx=@ acc=_(lift 0)]]
- ::
- :: evaled is a list because the %comp constraint type
- :: can contain multiple mp-mega constraints.
- =/ evaled=(list felt) (mpeval-ultra %ext c evals chal-map dyns)
- %+ roll
- (zip-up degs evaled)
- |= [[deg=@ eval=felt] [idx=_idx acc=_acc]]
- :- +(idx)
- ::
- :: Each constraint corresponds to two weights: alpha and beta. The verifier
- :: samples 2*num_constraints random values and we assume that the alpha
- :: and beta weights for a given constraint are situated next to each other
- :: in the array.
- ::
- =/ alpha (~(snag bop chals) (mul 2 idx))
- =/ beta (~(snag bop chals) (add 1 (mul 2 idx)))
- ::
- :: TODO: I've removed the degree adjustments but left it commented out. Once we figrue
- :: out exactly how to do it we can put it back in.
- %+ fadd acc
- %+ fmul eval
- %+ fadd (lift beta)
- %+ fmul (lift alpha)
- (fpow deep-challenge (sub fri-deg-bound.dp deg))
- -- ::+eval-composition-poly
- ::
- ++ evaluate-deep
- ~/ %evaluate-deep
- |= $: trace-evaluations=fpoly
- comp-evaluations=fpoly
- trace-elems=(list belt)
- comp-elems=(list belt)
- num-comp-pieces=@
- weights=fpoly
- heights=(list @)
- full-widths=(list @)
- omega=felt
- index=@
- deep-challenge=felt
- ==
- ^- felt
- =/ omega-pow (fmul (lift g) (fpow omega index))
- |^
- =; dat=[acc=felt num=@ @]
- =- -<
- =/ denom (fsub omega-pow (fpow deep-challenge num-comp-pieces))
- %- process-belt
- :* comp-elems
- comp-evaluations
- (~(slag fop weights) num.dat)
- num-comp-pieces
- 0
- denom
- acc.dat
- ==
- %^ zip-roll (range (lent heights)) heights
- |= [[i=@ height=@] acc=_(lift 0) num=@ total-full-width=@]
- =/ full-width (snag i full-widths)
- =/ omicron (lift (ordered-root height))
- =/ current-trace-elems (swag [total-full-width full-width] trace-elems)
- =/ dat=[acc=felt num=@] [acc num]
- :: first row trace columns
- =/ denom (fsub omega-pow deep-challenge)
- =. dat
- %- process-belt
- :* current-trace-elems
- trace-evaluations
- weights
- full-width
- num.dat
- denom
- acc.dat
- ==
- :: second row trace columns obtained by shifting by omicron
- =. denom (fsub omega-pow (fmul deep-challenge omicron))
- =/ acc acc.dat
- =. dat
- %- process-belt
- :* current-trace-elems
- trace-evaluations
- weights
- full-width
- num.dat
- denom
- acc.dat
- ==
- [acc.dat num.dat (add total-full-width full-width)]
- ::
- ++ process-belt
- |= $: elems=(list belt)
- evals=fpoly
- weights=fpoly
- width=@
- num=@
- denom=felt
- acc=felt
- ==
- ^- [felt @]
- %+ roll (range width)
- |= [i=@ acc=_acc num=_num]
- :_ +(num)
- %+ fadd acc
- %+ fmul (~(snag fop weights) num)
- %+ fdiv
- (fsub (lift (snag i elems)) (~(snag fop evals) num))
- denom
- -- ::+evaluate-deep
- ::
- :: verify a list of merkle proofs in a random order. This is to guard against DDOS attacks.
- ++ verify-merk-proofs
- ~/ %verify-merk-proofs
- |= [ps=(list merk-data:merkle) eny=@]
- ^- ?
- =/ tog-eny (new:tog:tip5 sponge:(absorb:(new:sponge:tip5) (mod eny p)^~))
- =/ lst=(list [@ merk-data:merkle])
- =- l
- %+ roll ps
- |= [m=merk-data:merkle rng=_tog-eny l=(list [@ merk-data:merkle])]
- ^+ [tog:tip5 *(list [@ merk-data:merkle])]
- =^ rnd rng (belts:rng 1)
- :- rng
- [[(head rnd) m] l]
- =/ sorted=(list [@ m=merk-data:merkle])
- %+ sort lst
- |= [x=[@ *] y=[@ *]]
- (lth -.x -.y)
- |-
- ?~ sorted %.y
- =/ res (verify-merk-proof:merkle m.i.sorted)
- ?. res
- %.n
- $(sorted t.sorted)
- --
|