57 constexpr static size_t kMaxRunLen = (1 << 25);
59 constexpr static size_t kMaxNumDigests = (1 << 25);
66 param((c.ninputs - c.npub_in) + ZkCommon<Field>::pad_size(c), c.nl,
73 return Digest::kLength +
75 proof.size() * Field::kBytes +
77 com_proof.block * 2 * Field::kBytes +
78 com_proof.nreq * com_proof.nrow * Field::kBytes +
79 com_proof.nreq * com_proof.mc_pathlen * Digest::kLength;
82 void write(std::vector<uint8_t> &buf,
const Field &F)
const {
83 size_t s0 = buf.size();
84 write_com(com, buf, F);
85 size_t s1 = buf.size();
86 write_sc_proof(proof, buf, F);
87 size_t s2 = buf.size();
88 write_com_proof(com_proof, buf, F);
89 size_t s3 = buf.size();
91 "com:%zu, sc:%zu, com_proof:%zu [%zu el, %zu el, %zu d in %zu "
93 s1 - s0, s2 - s1, s3 - s2, 2 * com_proof.block,
94 com_proof.nreq * com_proof.nrow, com_proof.merkle.path.size(),
100 if (!read_com(com, buf, F))
return false;
101 if (!read_sc_proof(proof, buf, F))
return false;
102 if (!read_com_proof(com_proof, buf, F))
return false;
106 void write_sc_proof(
const Proof<Field> &pr, std::vector<uint8_t> &buf,
107 const Field &F)
const {
108 check(c.logc == 0,
"cannot write sc proof with logc != 0");
109 for (
size_t i = 0; i < pr.l.size(); ++i) {
110 for (
size_t wi = 0; wi < c.l[i].logw; ++wi) {
111 for (
size_t k = 0; k < 3; ++k) {
114 write_elt(pr.l[i].hp[0][wi].t_[k], buf, F);
115 write_elt(pr.l[i].hp[1][wi].t_[k], buf, F);
119 write_elt(pr.l[i].wc[0], buf, F);
120 write_elt(pr.l[i].wc[1], buf, F);
125 const Field &F)
const {
126 buf.insert(buf.end(), com0.root.data, com0.root.data + Digest::kLength);
130 const Field &F)
const {
131 for (
size_t i = 0; i < pr.block; ++i) {
132 write_elt(pr.y_ldt[i], buf, F);
134 for (
size_t i = 0; i < pr.dblock; ++i) {
135 write_elt(pr.y_dot[i], buf, F);
137 for (
size_t i = 0; i < pr.r; ++i) {
138 write_elt(pr.y_quad_0[i], buf, F);
140 for (
size_t i = 0; i < pr.dblock - pr.block; ++i) {
141 write_elt(pr.y_quad_2[i], buf, F);
145 for (
size_t i = 0; i < pr.nreq; ++i) {
146 write_nonce(pr.merkle.nonce[i], buf);
153 bool subfield_run =
false;
154 while (ci < pr.nreq * pr.nrow) {
156 while (ci + runlen < pr.nreq * pr.nrow && runlen < kMaxRunLen &&
157 F.in_subfield(pr.req[ci + runlen]) == subfield_run) {
160 write_size(runlen, buf);
161 for (
size_t i = ci; i < ci + runlen; ++i) {
163 write_subfield_elt(pr.req[i], buf, F);
165 write_elt(pr.req[i], buf, F);
169 subfield_run = !subfield_run;
172 write_size(pr.merkle.path.size(), buf);
173 for (
size_t i = 0; i < pr.merkle.path.size(); ++i) {
174 write_digest(pr.merkle.path[i], buf);
179 void write_elt(
const Elt &x, std::vector<uint8_t> &buf,
180 const Field &F)
const {
181 uint8_t tmp[Field::kBytes];
182 F.to_bytes_field(tmp, x);
183 buf.insert(buf.end(), tmp, tmp + Field::kBytes);
186 void write_subfield_elt(
const Elt &x, std::vector<uint8_t> &buf,
187 const Field &F)
const {
188 uint8_t tmp[Field::kSubFieldBytes];
189 F.to_bytes_subfield(tmp, x);
190 buf.insert(buf.end(), tmp, tmp + Field::kSubFieldBytes);
193 void write_digest(
const Digest &x, std::vector<uint8_t> &buf)
const {
194 buf.insert(buf.end(), x.data, x.data + Digest::kLength);
197 void write_nonce(
const MerkleNonce &x, std::vector<uint8_t> &buf)
const {
198 buf.insert(buf.end(), x.bytes, x.bytes + MerkleNonce::kLength);
203 void write_size(
size_t g, std::vector<uint8_t> &buf)
const {
204 for (
size_t i = 0; i < 4; ++i) {
205 buf.push_back(
static_cast<uint8_t
>(g & 0xff));
211 if (c.logc != 0)
return false;
212 for (
size_t i = 0; i < pr.l.size(); ++i) {
213 size_t needed = (c.l[i].logw * (3 - 1) * 2 + 2) * Field::kBytes;
214 if (!buf.have(needed))
return false;
215 for (
size_t wi = 0; wi < c.l[i].logw; ++wi) {
216 for (
size_t k = 0; k < 3; ++k) {
219 for (
size_t hi = 0; hi < 2; ++hi) {
220 auto v = read_elt(buf, F);
222 pr.l[i].hp[hi][wi].t_[k] = v.value();
228 pr.l[i].hp[0][wi].t_[k] = F.zero();
229 pr.l[i].hp[1][wi].t_[k] = F.zero();
233 for (
size_t wi = 0; wi < 2; ++wi) {
234 auto v = read_elt(buf, F);
236 pr.l[i].wc[wi] = v.value();
247 if (!buf.have(Digest::kLength))
return false;
248 read_digest(buf, com0.root);
253 if (!buf.have(pr.block * Field::kBytes))
return false;
254 for (
size_t i = 0; i < pr.block; ++i) {
255 auto v = read_elt(buf, F);
257 pr.y_ldt[i] = v.value();
263 if (!buf.have(pr.dblock * Field::kBytes))
return false;
264 for (
size_t i = 0; i < pr.dblock; ++i) {
265 auto v = read_elt(buf, F);
267 pr.y_dot[i] = v.value();
273 if (!buf.have(pr.r * Field::kBytes))
return false;
274 for (
size_t i = 0; i < pr.r; ++i) {
275 auto v = read_elt(buf, F);
277 pr.y_quad_0[i] = v.value();
283 if (!buf.have((pr.dblock - pr.block) * Field::kBytes))
return false;
284 for (
size_t i = 0; i < pr.dblock - pr.block; ++i) {
285 auto v = read_elt(buf, F);
287 pr.y_quad_2[i] = v.value();
293 if (!buf.have(pr.nreq * MerkleNonce::kLength))
return false;
294 for (
size_t i = 0; i < pr.nreq; ++i) {
295 read_nonce(buf, pr.merkle.nonce[i]);
300 bool subfield_run =
false;
301 while (ci < pr.nreq * pr.nrow) {
302 if (!buf.have(4))
return false;
303 size_t runlen = read_size(buf);
304 if (runlen >= kMaxRunLen || ci + runlen > pr.nreq * pr.nrow)
return false;
306 if (!buf.have(runlen * Field::kSubFieldBytes))
return false;
307 for (
size_t i = ci; i < ci + runlen; ++i) {
308 auto v = read_subfield_elt(buf, F);
310 pr.req[i] = v.value();
316 if (!buf.have(runlen * Field::kBytes))
return false;
317 for (
size_t i = ci; i < ci + runlen; ++i) {
318 auto v = read_elt(buf, F);
320 pr.req[i] = v.value();
327 subfield_run = !subfield_run;
330 if (!buf.have(4))
return false;
331 size_t sz = read_size(buf);
334 if (sz < pr.nreq || sz >= kMaxNumDigests)
return false;
335 if (!buf.have(sz * Digest::kLength))
return false;
340 if (sz > pr.nreq * pr.mc_pathlen)
return false;
342 pr.merkle.path.resize(sz);
343 for (
size_t i = 0; i < sz; ++i) {
344 read_digest(buf, pr.merkle.path[i]);
349 std::optional<Elt> read_elt(
ReadBuffer &buf,
const Field &F)
const {
350 return F.of_bytes_field(buf.next(Field::kBytes));
353 std::optional<Elt> read_subfield_elt(
ReadBuffer &buf,
const Field &F)
const {
354 return F.of_bytes_subfield(buf.next(Field::kSubFieldBytes));
358 buf.next(Digest::kLength, x.data);
362 buf.next(MerkleNonce::kLength, x.bytes);
365 size_t read_size(
ReadBuffer &buf) {
return u32_of_le(buf.next(4)); }