72 constexpr static size_t kMaxLayers = 10000;
77 static constexpr size_t kBytesWritten = 3;
79 explicit CircuitRep(
const Field& f, FieldID field_id)
80 : f_(f), field_id_(field_id) {}
82 void to_bytes(
const Circuit<Field>& sc_c, std::vector<uint8_t>& bytes) {
85 serialize_field_id(bytes, field_id_);
86 serialize_size(bytes, sc_c.nv);
87 serialize_size(bytes, sc_c.nc);
88 serialize_size(bytes, sc_c.npub_in);
89 serialize_size(bytes, sc_c.subfield_boundary);
90 serialize_size(bytes, sc_c.ninputs);
91 serialize_size(bytes, sc_c.l.size());
95 std::vector<uint8_t> quadb;
96 quadb.reserve(1 << 24);
97 for (
const auto& layer : sc_c.l) {
98 serialize_size(quadb, layer.logw);
99 serialize_size(quadb, layer.nw);
100 serialize_size(quadb, layer.quad->n_);
102 QuadCorner prevg(0), prevh0(0), prevh1(0);
103 for (
size_t i = 0; i < layer.quad->n_; ++i) {
104 serialize_index(quadb, layer.quad->c_[i].g, prevg);
105 prevg = layer.quad->c_[i].g;
106 serialize_index(quadb, layer.quad->c_[i].h[0], prevh0);
107 prevh0 = layer.quad->c_[i].h[0];
108 serialize_index(quadb, layer.quad->c_[i].h[1], prevh1);
109 prevh1 = layer.quad->c_[i].h[1];
110 serialize_num(quadb, eh.kstore(layer.quad->c_[i].v));
114 serialize_size(bytes, eh.constants_.size());
115 for (
const auto& v : eh.constants_) {
116 uint8_t buf[Field::kBytes];
117 f_.to_bytes_field(buf, v);
118 bytes.insert(bytes.end(), buf, buf + Field::kBytes);
121 bytes.insert(bytes.end(), quadb.begin(), quadb.end());
122 bytes.insert(bytes.end(), sc_c.id, sc_c.id + 32);
127 std::unique_ptr<Circuit<Field>> from_bytes(
ReadBuffer& buf) {
128 if (!buf.have(8 * kBytesWritten + 1)) {
132 uint8_t version = *buf.next(1);
137 size_t fid_as_size_t = read_field_id(buf);
138 size_t nv = read_size(buf);
139 size_t nc = read_size(buf);
140 size_t npub_in = read_size(buf);
141 size_t subfield_boundary = read_size(buf);
142 size_t ninputs = read_size(buf);
143 size_t nl = read_size(buf);
144 size_t numconst = read_size(buf);
147 if (fid_as_size_t !=
static_cast<size_t>(field_id_) || npub_in > ninputs ||
148 subfield_boundary > ninputs || nl > kMaxLayers) {
153 auto need = checked_mul(numconst, Field::kBytes);
154 if (!need || !buf.have(need.value())) {
158 std::vector<Elt> constants(numconst);
159 for (
size_t i = 0; i < numconst; ++i) {
161 auto vv = f_.of_bytes_field(buf.next(Field::kBytes));
162 if (!vv.has_value()) {
165 constants[i] = vv.value();
168 auto c = std::make_unique<Circuit<Field>>();
177 .subfield_boundary = subfield_boundary,
183 for (
size_t ly = 0; ly < nl; ++ly) {
185 if (!buf.have(3 * kBytesWritten)) {
189 size_t lw = read_size(buf);
190 size_t nw = read_size(buf);
191 size_t nq = read_size(buf);
194 need = checked_mul(4 * kBytesWritten, nq);
195 if (!need || !buf.have(need.value())) {
199 auto qq = std::make_unique<Quad<Field>>(nq);
200 size_t prevg = 0, prevhl = 0, prevhr = 0;
201 for (
size_t i = 0; i < nq; ++i) {
202 size_t g = read_index(buf, prevg);
207 size_t hl = read_index(buf, prevhl);
208 size_t hr = read_index(buf, prevhr);
209 if (hl > nw || hr > nw) {
214 size_t vi = read_num(buf);
215 if (vi >= numconst) {
220 QuadCorner(g), {QuadCorner(hl), QuadCorner(hr)}, constants[vi]};
225 .quad = std::unique_ptr<const Quad<Field>>(std::move(qq))});
237 static constexpr uint64_t kMaxValue = (1ULL << (kBytesWritten * 8)) - 1;
240 template <
typename T>
241 std::optional<T> checked_mul(T a, T b) {
243 if (a == 0 || ab / a == b)
return ab;
247 static void serialize_field_id(std::vector<uint8_t>& bytes, FieldID
id) {
248 serialize_num(bytes,
static_cast<size_t>(
id));
251 static void serialize_size(std::vector<uint8_t>& bytes,
size_t sz) {
252 serialize_num(bytes, sz);
263 static void serialize_index(std::vector<uint8_t>& bytes, QuadCorner ind0,
264 QuadCorner prev_ind0) {
265 size_t ind =
static_cast<size_t>(ind0);
266 size_t prev_ind =
static_cast<size_t>(prev_ind0);
271 if (ind >= prev_ind) {
272 serialize_num(bytes, 2u * (ind - prev_ind));
274 serialize_num(bytes, 2u * (prev_ind - ind) + 1u);
278 static void serialize_num(std::vector<uint8_t>& bytes,
size_t g) {
279 check(g < kMaxValue,
"Violating small wire-label assumption");
280 uint8_t tmp[kBytesWritten];
281 for (
size_t i = 0; i < kBytesWritten; ++i) {
282 tmp[i] =
static_cast<uint8_t
>(g & 0xff);
285 bytes.insert(bytes.end(), tmp, tmp + kBytesWritten);
295 static size_t read_field_id(
ReadBuffer& buf) {
return read_num(buf); }
297 static size_t read_size(
ReadBuffer& buf) {
return read_num(buf); }
299 static size_t read_index(
ReadBuffer& buf,
size_t prev_ind) {
300 size_t delta = read_num(buf);
302 return prev_ind - (delta >> 1);
304 return prev_ind + (delta >> 1);
310 const uint8_t* p = buf.next(kBytesWritten);
311 for (
size_t i = 0; i < kBytesWritten; ++i) {
312 r |= (p[i] << (i * 8));
317 check(r < SIZE_MAX,
"Violating small wire-label assumption");
318 return static_cast<size_t>(r);
325 explicit EHash(
const Field& f) : f_(f) {}
326 size_t operator()(
const Elt& k)
const {
return elt_hash(k, f_); }
332 std::vector<Elt> constants_;
334 explicit EltHash(
const Field& f) : f_(f), table_(1000, EHash(f)) {}
336 size_t kstore(
const Elt& k) {
337 if (
auto search = table_.find(k); search != table_.end()) {
338 return search->second;
341 size_t ki = constants_.size();
342 constants_.push_back(k);
349 std::unordered_map<Elt, size_t, EHash> table_;