forked from arkworks-rs/snark
-
Notifications
You must be signed in to change notification settings - Fork 17
/
proof_aggregator.rs
303 lines (279 loc) · 10.9 KB
/
proof_aggregator.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
//! Utilities for proof post-processing of `GeneralPCD`, i.e. SimpleMarlin and
//! FinalDarlin PCD, using batch verification and aggregation of their dlog hard parts.
use crate::darlin::{
accumulators::{
dlog::{DLogItem, DLogItemAccumulator},
AccumulationProof, ItemAccumulator,
},
pcd::{DualPCDVerifierKey, GeneralPCD, PCD},
};
use algebra::{AffineCurve, ToConstraintField};
use digest::Digest;
use marlin::VerifierKey as MarlinVerifierKey;
use poly_commit::ipa_pc::{
CommitterKey as DLogCommitterKey, InnerProductArgPC, VerifierKey as DLogVerifierKey,
};
use rand::RngCore;
use rayon::prelude::*;
/// Given a set of PCDs, their corresponding Marlin verification keys, and the DLogCommitterKey(s)
/// over two groups of a curve cycle, compute and return the associated accumulators via the
/// succinct verification of them.
/// In case of failure, return the indices of the proofs that have caused the failure (if it's possible
/// to establish it).
/// The PCDs are allowed to use different size restrictions of the DLogCommitterKey `g1_ck` and `g2_ck`.
pub(crate) fn get_accumulators<G1, G2, D: Digest>(
pcds: &[GeneralPCD<G1, G2, D>],
vks: &[MarlinVerifierKey<G1::ScalarField, InnerProductArgPC<G1, D>>],
g1_ck: &DLogCommitterKey<G1>,
g2_ck: &DLogCommitterKey<G2>,
) -> Result<(Vec<DLogItem<G1>>, Vec<DLogItem<G2>>), Option<Vec<usize>>>
where
G1: AffineCurve<BaseField = <G2 as AffineCurve>::ScalarField>
+ ToConstraintField<<G2 as AffineCurve>::ScalarField>,
G2: AffineCurve<BaseField = <G1 as AffineCurve>::ScalarField>
+ ToConstraintField<<G1 as AffineCurve>::ScalarField>,
{
let accumulators_time = start_timer!(|| "Compute accumulators");
if pcds.is_empty() {
log::error!("No proof to aggregate");
return Err(None);
}
if vks.is_empty() {
log::error!("No vk specified");
return Err(None);
}
if pcds.len() != vks.len() {
log::error!(
"Proofs and vks length mismatch. Proofs: {}, Vks: {}",
pcds.len(),
vks.len()
);
return Err(None);
}
let (accs, failing_indices): (Vec<_>, Vec<_>) = pcds
.into_par_iter()
.zip(vks)
.enumerate()
.map(|(i, (pcd, vk))| {
// recall that we use FinalDarlinVerifierKeys to handle
// polymorphic verification of final Darlin/simpleM arlin PCDs
let vk = DualPCDVerifierKey::<G1, G2, D> {
final_darlin_vk: vk,
dlog_vks: (g1_ck, g2_ck),
};
// No need to trim the vk here to the specific segment size used
// to generate the proof for this pcd, as the IPA succinct_check
// function doesn't use vk.comm_key at all.
pcd.succinct_verify(&vk).map_err(|e| {
log::error!(
"Failed verification (succint part) for proof with index {}: {:?}",
i,
e
);
i
})
})
.partition(Result::is_ok);
end_timer!(accumulators_time);
let accs = accs.into_iter().map(Result::unwrap).collect::<Vec<_>>();
let mut failing_indices = failing_indices
.into_iter()
.map(Result::unwrap_err)
.collect::<Vec<_>>();
if failing_indices.is_empty() {
// All succinct verifications passed: collect and return the accumulators
let accs_g1 = accs
.iter()
.flat_map(|acc| acc.0.clone())
.collect::<Vec<_>>();
let accs_g2 = accs.into_iter().flat_map(|acc| acc.1).collect::<Vec<_>>();
Ok((accs_g1, accs_g2))
} else {
// Otherwise, collect and return as error the indices of all the failing proofs
// sorted in ascending order
failing_indices.sort_unstable();
Err(Some(failing_indices))
}
}
/// Given a set of PCDs, their corresponding Marlin verification keys, and the DLogCommitterKey(s)
/// from both groups of our EC cycle, compute and return an accumulation proof(s) for
/// the dlog accumulators/"items".
/// In case of failure, returns the indices of the proofs which caused it (if possible).
/// The PCDs are allowed to use different size restrictions of the DLogCommitterKey
/// `g1_ck` and `g2_ck`.
pub fn accumulate_proofs<G1, G2, D: Digest>(
pcds: &[GeneralPCD<G1, G2, D>],
vks: &[MarlinVerifierKey<G1::ScalarField, InnerProductArgPC<G1, D>>],
g1_ck: &DLogCommitterKey<G1>,
g2_ck: &DLogCommitterKey<G2>,
) -> Result<(Option<AccumulationProof<G1>>, Option<AccumulationProof<G2>>), Option<Vec<usize>>>
where
G1: AffineCurve<BaseField = <G2 as AffineCurve>::ScalarField>
+ ToConstraintField<<G2 as AffineCurve>::ScalarField>,
G2: AffineCurve<BaseField = <G1 as AffineCurve>::ScalarField>
+ ToConstraintField<<G1 as AffineCurve>::ScalarField>,
{
let accumulation_time = start_timer!(|| "Accumulate proofs");
// Get accumulators from pcds
let (accs_g1, accs_g2) =
get_accumulators::<G1, G2, D>(pcds, vks, g1_ck, g2_ck).map_err(|e| {
end_timer!(accumulation_time);
e
})?;
// Create accumulation proofs
let acc_proof_g1 = if accs_g1.is_empty() {
None
} else {
Some(
DLogItemAccumulator::<G1, D>::accumulate_items(g1_ck, accs_g1)
.map_err(|e| {
log::error!("Error during accumulation of proofs over G1: {:?}", e);
end_timer!(accumulation_time);
None
})?
.1,
)
};
let acc_proof_g2 = if accs_g2.is_empty() {
None
} else {
Some(
DLogItemAccumulator::<G2, D>::accumulate_items(g2_ck, accs_g2)
.map_err(|e| {
log::error!("Error during accumulation of proofs over G2: {:?}", e);
end_timer!(accumulation_time);
None
})?
.1,
)
};
end_timer!(accumulation_time);
Ok((acc_proof_g1, acc_proof_g2))
}
/// Verifies a set of PCDs which is augmented by an accumulation proof for their
/// dlog items. (This is cheaper than batch verification, as it doesn't need to
/// do any batching of witnesses.)
/// In case of failure, returns the indices of the proofs which caused it (if possible).
/// The PCDs are allowed to use different size restrictions of the DLogCommitterKey
/// `g1_ck` and `g2_ck`.
pub fn verify_aggregated_proofs<G1, G2, D: Digest, R: RngCore>(
pcds: &[GeneralPCD<G1, G2, D>],
vks: &[MarlinVerifierKey<G1::ScalarField, InnerProductArgPC<G1, D>>],
accumulation_proof_g1: &Option<AccumulationProof<G1>>,
accumulation_proof_g2: &Option<AccumulationProof<G2>>,
g1_vk: &DLogVerifierKey<G1>,
g2_vk: &DLogVerifierKey<G2>,
rng: &mut R,
) -> Result<bool, Option<Vec<usize>>>
where
G1: AffineCurve<BaseField = <G2 as AffineCurve>::ScalarField>
+ ToConstraintField<<G2 as AffineCurve>::ScalarField>,
G2: AffineCurve<BaseField = <G1 as AffineCurve>::ScalarField>
+ ToConstraintField<<G1 as AffineCurve>::ScalarField>,
{
let verification_time = start_timer!(|| "Verify aggregated proofs");
// Do the succinct verification of the PCDs and get their accumulators
let (accs_g1, accs_g2) =
get_accumulators::<G1, G2, D>(pcds, vks, g1_vk, g2_vk).map_err(|e| {
end_timer!(verification_time);
e
})?;
// fully verify the dlog aggregation proof in G1, if present.
let result_accumulate_g1 = if accumulation_proof_g1.is_some() {
let dummy_g1 = DLogItem::<G1>::default();
DLogItemAccumulator::<G1, D>::verify_accumulated_items::<R>(
&dummy_g1,
g1_vk,
accs_g1,
accumulation_proof_g1.as_ref().unwrap(),
rng,
)
.map_err(|e| {
log::error!(
"Error during verification (hard part) of accumulated proof over G1: {:?}",
e
);
end_timer!(verification_time);
None
})?
} else {
true
};
// fully verify the dlog aggregation proof in G2, if present.
let result_accumulate_g2 = if accumulation_proof_g2.is_some() {
let dummy_g2 = DLogItem::<G2>::default();
DLogItemAccumulator::<G2, D>::verify_accumulated_items::<R>(
&dummy_g2,
g2_vk,
accs_g2,
accumulation_proof_g2.as_ref().unwrap(),
rng,
)
.map_err(|e| {
log::error!(
"Error during verification (hard part) of accumulated proof over G2: {:?}",
e
);
end_timer!(verification_time);
None
})?
} else {
true
};
end_timer!(verification_time);
Ok(result_accumulate_g1 && result_accumulate_g2)
}
/// Batch verification of PCDs consisting of FinalDarlin/SimpleMarlin PCDs.
/// The succinct parts are processed in serial, the dlog items (in both of the groups G1
/// and G2) are verified in batch.
/// In case of failure, returns the indices of the proofs which caused it (if possible).
/// The PCDs are allowed to use different size restrictions of the DLogCommitterKey
/// `g1_ck` and `g2_ck`.
pub fn batch_verify_proofs<G1, G2, D: Digest, R: RngCore>(
pcds: &[GeneralPCD<G1, G2, D>],
vks: &[MarlinVerifierKey<G1::ScalarField, InnerProductArgPC<G1, D>>],
g1_vk: &DLogVerifierKey<G1>,
g2_vk: &DLogVerifierKey<G2>,
rng: &mut R,
) -> Result<bool, Option<Vec<usize>>>
where
G1: AffineCurve<BaseField = <G2 as AffineCurve>::ScalarField>
+ ToConstraintField<<G2 as AffineCurve>::ScalarField>,
G2: AffineCurve<BaseField = <G1 as AffineCurve>::ScalarField>
+ ToConstraintField<<G1 as AffineCurve>::ScalarField>,
{
let verification_time = start_timer!(|| "Batch verify proofs");
// Do the succinct verification of the PCDs and get their accumulators
let (accs_g1, accs_g2) =
get_accumulators::<G1, G2, D>(pcds, vks, g1_vk, g2_vk).map_err(|e| {
end_timer!(verification_time);
e
})?;
// Verify accumulators (hard part)
let result_g1 = if accs_g1.is_empty() {
true
} else {
DLogItemAccumulator::<G1, D>::check_items::<R>(g1_vk, &accs_g1, rng).map_err(|e| {
log::error!(
"Error during batch verification (hard part) of proofs over G1: {:?}",
e
);
end_timer!(verification_time);
None
})?
};
let result_g2 = if accs_g2.is_empty() {
true
} else {
DLogItemAccumulator::<G2, D>::check_items::<R>(g2_vk, &accs_g2, rng).map_err(|e| {
log::error!(
"Error during batch verification (hard part) of proofs over G2: {:?}",
e
);
end_timer!(verification_time);
None
})?
};
end_timer!(verification_time);
Ok(result_g1 && result_g2)
}