74 constexpr static size_t kMaxLayers = 10000;
79 static constexpr size_t kBytesWritten = 3;
81 explicit CircuitRep(
const Field& f, FieldID field_id)
82 : f_(f), field_id_(field_id) {}
84 void to_bytes(
const Circuit<Field>& sc_c, std::vector<uint8_t>& bytes) {
87 serialize_field_id(bytes, field_id_);
88 serialize_size(bytes, sc_c.nv);
89 serialize_size(bytes, sc_c.nc);
90 serialize_size(bytes, sc_c.npub_in);
91 serialize_size(bytes, sc_c.subfield_boundary);
92 serialize_size(bytes, sc_c.ninputs);
93 serialize_size(bytes, sc_c.l.size());
97 std::vector<uint8_t> quadb;
98 quadb.reserve(1 << 24);
99 for (
const auto& layer : sc_c.l) {
100 serialize_size(quadb, layer.logw);
101 serialize_size(quadb, layer.nw);
102 serialize_size(quadb, layer.quad->n_);
104 QuadCorner prevg(0), prevh0(0), prevh1(0);
105 for (
size_t i = 0; i < layer.quad->n_; ++i) {
106 serialize_index(quadb, layer.quad->c_[i].g, prevg);
107 prevg = layer.quad->c_[i].g;
108 serialize_index(quadb, layer.quad->c_[i].h[0], prevh0);
109 prevh0 = layer.quad->c_[i].h[0];
110 serialize_index(quadb, layer.quad->c_[i].h[1], prevh1);
111 prevh1 = layer.quad->c_[i].h[1];
112 serialize_num(quadb, eh.kstore(layer.quad->c_[i].v));
116 serialize_size(bytes, eh.constants_.size());
117 for (
const auto& v : eh.constants_) {
118 uint8_t buf[Field::kBytes];
119 f_.to_bytes_field(buf, v);
120 bytes.insert(bytes.end(), buf, buf + Field::kBytes);
123 bytes.insert(bytes.end(), quadb.begin(), quadb.end());
124 bytes.insert(bytes.end(), sc_c.id, sc_c.id + 32);
132 std::unique_ptr<Circuit<Field>> from_bytes(
ReadBuffer& buf,
133 bool enforce_circuit_id) {
134 if (!buf.have(8 * kBytesWritten + 1)) {
138 uint8_t version = *buf.next(1);
143 size_t fid_as_size_t = read_field_id(buf);
144 size_t nv = read_size(buf);
145 size_t nc = read_size(buf);
146 size_t npub_in = read_size(buf);
147 size_t subfield_boundary = read_size(buf);
148 size_t ninputs = read_size(buf);
149 size_t nl = read_size(buf);
150 size_t numconst = read_size(buf);
153 if (fid_as_size_t !=
static_cast<size_t>(field_id_) || npub_in > ninputs ||
154 subfield_boundary > ninputs || nl > kMaxLayers) {
159 auto need = checked_mul(numconst, Field::kBytes);
160 if (!need || !buf.have(need.value())) {
164 std::vector<Elt> constants(numconst);
165 for (
size_t i = 0; i < numconst; ++i) {
167 auto vv = f_.of_bytes_field(buf.next(Field::kBytes));
168 if (!vv.has_value()) {
171 constants[i] = vv.value();
174 auto c = std::make_unique<Circuit<Field>>();
183 .subfield_boundary = subfield_boundary,
189 for (
size_t ly = 0; ly < nl; ++ly) {
191 if (!buf.have(3 * kBytesWritten)) {
195 size_t lw = read_size(buf);
196 size_t nw = read_size(buf);
197 size_t nq = read_size(buf);
200 need = checked_mul(4 * kBytesWritten, nq);
201 if (!need || !buf.have(need.value())) {
205 auto qq = std::make_unique<Quad<Field>>(nq);
206 size_t prevg = 0, prevhl = 0, prevhr = 0;
207 for (
size_t i = 0; i < nq; ++i) {
208 size_t g = read_index(buf, prevg);
213 size_t hl = read_index(buf, prevhl);
214 size_t hr = read_index(buf, prevhr);
215 if (hl > nw || hr > nw) {
220 size_t vi = read_num(buf);
221 if (vi >= numconst) {
226 QuadCorner(g), {QuadCorner(hl), QuadCorner(hr)}, constants[vi]};
231 .quad = std::unique_ptr<const Quad<Field>>(std::move(qq))});
240 if (enforce_circuit_id) {
242 circuit_id(idtmp, *c, f_);
243 if (memcmp(idtmp, c->id, 32) != 0) {
251 static constexpr uint64_t kMaxValue = (1ULL << (kBytesWritten * 8)) - 1;
254 template <
typename T>
255 std::optional<T> checked_mul(T a, T b) {
257 if (a == 0 || ab / a == b)
return ab;
261 static void serialize_field_id(std::vector<uint8_t>& bytes, FieldID
id) {
262 serialize_num(bytes,
static_cast<size_t>(
id));
265 static void serialize_size(std::vector<uint8_t>& bytes,
size_t sz) {
266 serialize_num(bytes, sz);
277 static void serialize_index(std::vector<uint8_t>& bytes, QuadCorner ind0,
278 QuadCorner prev_ind0) {
279 size_t ind =
static_cast<size_t>(ind0);
280 size_t prev_ind =
static_cast<size_t>(prev_ind0);
285 if (ind >= prev_ind) {
286 serialize_num(bytes, 2u * (ind - prev_ind));
288 serialize_num(bytes, 2u * (prev_ind - ind) + 1u);
292 static void serialize_num(std::vector<uint8_t>& bytes,
size_t g) {
293 check(g < kMaxValue,
"Violating small wire-label assumption");
294 uint8_t tmp[kBytesWritten];
295 for (
size_t i = 0; i < kBytesWritten; ++i) {
296 tmp[i] =
static_cast<uint8_t
>(g & 0xff);
299 bytes.insert(bytes.end(), tmp, tmp + kBytesWritten);
309 static size_t read_field_id(
ReadBuffer& buf) {
return read_num(buf); }
311 static size_t read_size(
ReadBuffer& buf) {
return read_num(buf); }
313 static size_t read_index(
ReadBuffer& buf,
size_t prev_ind) {
314 size_t delta = read_num(buf);
316 return prev_ind - (delta >> 1);
318 return prev_ind + (delta >> 1);
324 const uint8_t* p = buf.next(kBytesWritten);
325 for (
size_t i = 0; i < kBytesWritten; ++i) {
326 r |= (p[i] << (i * 8));
331 check(r < SIZE_MAX,
"Violating small wire-label assumption");
332 return static_cast<size_t>(r);
339 explicit EHash(
const Field& f) : f_(f) {}
340 size_t operator()(
const Elt& k)
const {
return elt_hash(k, f_); }
346 std::vector<Elt> constants_;
348 explicit EltHash(
const Field& f) : f_(f), table_(1000, EHash(f)) {}
350 size_t kstore(
const Elt& k) {
351 if (
auto search = table_.find(k); search != table_.end()) {
352 return search->second;
355 size_t ki = constants_.size();
356 constants_.push_back(k);
363 std::unordered_map<Elt, size_t, EHash> table_;