-
Notifications
You must be signed in to change notification settings - Fork 78
Expand file tree
/
Copy pathbabe.rs
More file actions
402 lines (345 loc) · 15.2 KB
/
babe.rs
File metadata and controls
402 lines (345 loc) · 15.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
use alloc::vec;
use scale::{Encode, Decode, DecodeAll as _, DecodeWithMemTracking, MaxEncodedLen};
use sp_core::{
serde::{Serialize, Deserialize},
crypto::{Wraps, key_types},
};
use sp_runtime::{
generic::{DigestItem, Digest},
traits::Header as HeaderTrait,
};
use sp_consensus_babe::{
AuthorityId, AuthoritySignature,
digests::{CompatibleDigestItem, PreDigest},
EquivocationProof,
};
use serai_abi::{primitives::network_id::NetworkId, SubstrateHeader as Header};
use super::*;
/*
`pallet-babe` requires `pallet-session` for `GetCurrentSessionForSubstrate` (from its
configuration) but not it itself.
To ensure `pallet_session::Pallet` truly isn't required, our fork of `polkadot-sdk` (as derived
in the `patch-polkadot-sdk` repository) has been patched to _solely_ be this one `trait`. This
means if `pallet-babe` ever updated to require something else from `pallet-session`, we'd observe
the compilation error.
*/
#[doc(hidden)]
pub struct GetCurrentSessionForSubstrate;
impl pallet_session::GetCurrentSessionForSubstrate for GetCurrentSessionForSubstrate {
fn get() -> u32 {
serai_validator_sets_pallet::Pallet::<Runtime>::current_session(NetworkId::Serai)
.map(|session| session.0)
.unwrap_or(0)
}
}
impl pallet_session::Config for Runtime {
type Session = GetCurrentSessionForSubstrate;
}
/// The epoch configuration for BABE.
pub(crate) const BABE_GENESIS_EPOCH_CONFIG: sp_consensus_babe::BabeEpochConfiguration =
sp_consensus_babe::BabeEpochConfiguration {
/*
This value is used within the Polkadot ecosystem, the citation being:
https://research.web3.foundation/Polkadot/protocols/block-production/Babe#6-practical-results
The research itself is disorganized, but the resulting comment is for $\delta = 1$ (where
$\delta$ corresponds to allowed clock drift in _slots_), `c = 0.22` is resistant to clock
drift corresponding to one slot to achieve probabilistic consensus over a span of years.
`(1, 4)` serves as the approximation of that.
Perfect optimality would suggest we should run their linked Python script, extensively
discuss the parameters, and return an output ourselves. In reality, we defer entirely to
Polkadot/Parity/the Web3 Foundation as this is fundamentally defining a bound for
probabilistic consensus in a synchronous environment and Serai intends to reject all notions
that the world is synchronous. Any efforts of our own to derive an optimal, secure answer
would solely yield the obvious answer: Do not use BABE.
As we are incapable of finding parameters for BABE we would be happy with, we defer to the
parameters its creators are happy with, while planning its replacement:
https://github.com/serai-dex/serai/issues/333
*/
c: (1, 4),
allowed_slots: sp_consensus_babe::AllowedSlots::PrimaryAndSecondaryPlainSlots,
};
impl pallet_babe::Config for Runtime {
type EpochDuration = ConstU64<{ SESSION_LENGTH_IN_SLOTS }>;
#[expect(clippy::as_conversions, clippy::cast_possible_truncation)]
type ExpectedBlockTime = ConstU64<{ TARGET_BLOCK_TIME.as_millis() as u64 }>;
type EpochChangeTrigger = pallet_babe::ExternalTrigger;
type WeightInfo = ();
type MaxAuthorities = MaxAuthorities;
type MaxNominators = ConstU32<1>;
type DisabledValidators = ValidatorSets;
// These are stubbed as while we do handle equivocations, they are not routed through here.
type KeyOwnerProof = sp_core::Void;
type EquivocationReportSystem = ();
}
/// A header as used within a BABE equivocation proof.
///
/// BABE equivocation proofs specify a pair of headers, each of unbounded length due to
/// Substrate's policy of including an unbounded list (the digest) within a header. As we wish
/// to bound the size of an equivocation proof, this is unacceptable to use.
///
/// The following shims a header to only as relevant for a BABE equivocation proof, enabling a
/// constant bound on the size of an equivocation proof.
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
#[serde(crate = "sp_core::serde")]
struct BabeHeader {
hash: <Header as HeaderTrait>::Hash,
/*
Our `struct`, while achieving a constant size (or at least a constant bound on its size), still
contains the unbounded `Digest` field due to an API limitation. Specifically, we HAVE to return
`&Digest` later, so we MUST own a `Digest`.
We use this very carefully to achieve all our desired properties accordingly.
*/
digest: Digest,
}
impl BabeHeader {
/// Construct a new `BabeHeader`.
///
/// This will ensure the contained `Digest` has the desired layout as will have its structure
/// assumed later.
fn new(
hash: <Header as HeaderTrait>::Hash,
pre_digest: PreDigest,
seal: AuthoritySignature,
) -> Self {
Self {
hash,
digest: Digest {
logs: vec![DigestItem::babe_pre_digest(pre_digest), DigestItem::babe_seal(seal)],
},
}
}
}
// Truncate a full header into a fixed-size header for an `EquivocationProof`.
impl TryFrom<Header> for BabeHeader {
type Error = ();
fn try_from(header: Header) -> Result<BabeHeader, ()> {
let pre_digest =
header.digest().logs().iter().find_map(CompatibleDigestItem::as_babe_pre_digest).ok_or(())?;
let seal = header.digest().logs().last().ok_or(())?.as_babe_seal().ok_or(())?;
Ok(Self::new(header.hash(), pre_digest, seal))
}
}
impl Encode for BabeHeader {
fn size_hint(&self) -> usize {
// This should be of effectively constant length
Self::max_encoded_len()
}
fn encode_to<T: ?Sized + scale::Output>(&self, dest: &mut T) {
/*
This produces an encoding equivalent to `(Hash, PreDigest, AuthoritySignature)` on the
assumption `logs = vec![pre_digest, seal]`, as the case with the `BabeHeader::new` (the sole
function which directly constructs a `BabeHeader` with `struct`-initialization syntax).
If `logs` has a different value, this would produce an undefined encoding.
*/
self.hash.encode_to(dest);
for digest in &self.digest.logs {
if let Some(pre_digest) = digest.as_babe_pre_digest() {
pre_digest.encode_to(dest);
} else if let Some(seal) = digest.as_babe_seal() {
seal.encode_to(dest);
} else {
unreachable!("`BabeHeader` had non-BABE `DigestItem`")
}
}
}
}
impl MaxEncodedLen for BabeHeader {
fn max_encoded_len() -> usize {
<Header as HeaderTrait>::Hash::max_encoded_len() +
PreDigest::max_encoded_len() +
<AuthoritySignature as Wraps>::Inner::max_encoded_len()
}
}
impl Decode for BabeHeader {
fn decode<I: scale::Input>(input: &mut I) -> Result<Self, scale::Error> {
let (hash, pre_digest, seal) =
<(<Header as HeaderTrait>::Hash, PreDigest, AuthoritySignature)>::decode(input)?;
// Each `DigestItem` contains a heap-allocated encoding of its corresponding item
input.on_before_alloc_mem(PreDigest::max_encoded_len())?;
input.on_before_alloc_mem(<AuthoritySignature as Wraps>::Inner::max_encoded_len())?;
// The allocation for the `Digest` `Vec` containing the `DigestItem`s
input.on_before_alloc_mem(2 * core::mem::size_of::<DigestItem>())?;
Ok(BabeHeader::new(hash, pre_digest, seal))
}
}
impl DecodeWithMemTracking for BabeHeader {}
/*
We now have to implement a variety of methods within `trait`s for `BabeHeader``, many of which
are `unimplemented!()`, effecting a runtime panic if ever tried to be called. Ideally, we could
use of one two tricks to prove these would never be called.
---
We could apply the methodology presented in https://jack.wrenn.fyi/blog/undroppable, defining a
function which would fail to compile if ever called and attempted to be compiled (due to
containing a `const { panic!() }` expression), proving it is never called and the function is
optimized out entirely.
The exact reference for such `const` blocks can be seen with
https://doc.rust-lang.org/1.93.1/reference/expressions/block-expr.html#const-blocks, the relevant
quotes being:
> If the const block expression is executed at runtime, then the constant is guaranteed to be
evaluated, even if its return value is ignored
> If the const block expression is not executed at runtime, it may or may not be evaluated
Those bounds would ensure the expressions aren't "executed" (reachable) at runtime, though
whether or not it'd compile _because_ they aren't reachable would be undefined and up to the
compiler's whims.
Unfortunately, the compiler's whims do not favor us here and such a solution was not demonstrated
to work. This is potentially due to `sp_runtime::traits::Header` being `dyn`-compatible...
Note the above quotes are taken directly from the Rust reference and the Rust reference is
published with MIT and Apache 2.0 licenses. The MIT license is fulfilled here with the following
satisfaction of its terms, as applied to the quotes and only to the quotes, not any of the code
or other associated documentation present in this file.
"""
Copyright (c) 2010 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
---
The other trick would be one seen within https://github.com/Kixunil/dont_panic. In general, one
can call an external function which does not exist and is never linked, causing the linker to
error _unless_ the calling function is itself optimized out as dead code.
Unfortunately for us, Rust's links for WASM targets with `--allow-undefined`:
https://github.com/rust-lang/rust/blob/80381278a08582356c13b0f52af92d27c567c230
/compiler/rustc_target/src/spec/base/wasm.rs#L40
So such a solution will not produce an error at compile-time but rather Rust will simply produce
an invalid (unusable?) WASM blob.
---
With both of those tricks inapplicable, and lacking a third potential trick, we do use runtime
panics here and are left to extremely carefully monitor the exact usage of `BabeHeader`. To that
end, it is not marked `pub(_)` and is solely used within this file.
*/
impl HeaderTrait for BabeHeader {
type Number = <Header as HeaderTrait>::Number;
type Hash = <Header as HeaderTrait>::Hash;
type Hashing = <Header as HeaderTrait>::Hashing;
fn new(
_number: Self::Number,
_extrinsics_root: Self::Hash,
_state_root: Self::Hash,
_parent_hash: Self::Hash,
_digest: Digest,
) -> Self {
unimplemented!()
}
fn number(&self) -> &Self::Number {
unimplemented!()
}
fn set_number(&mut self, _number: Self::Number) {
unimplemented!()
}
fn extrinsics_root(&self) -> &Self::Hash {
unimplemented!()
}
fn set_extrinsics_root(&mut self, _extrinsics_root: Self::Hash) {
unimplemented!()
}
fn state_root(&self) -> &Self::Hash {
unimplemented!()
}
fn set_state_root(&mut self, _state_root: Self::Hash) {
unimplemented!()
}
fn parent_hash(&self) -> &Self::Hash {
unimplemented!()
}
fn set_parent_hash(&mut self, _parent_hash: Self::Hash) {
unimplemented!()
}
fn digest(&self) -> &Digest {
&self.digest
}
/*
This function has the ability to disrupt our assumed, and so far enforced, structure for our
`digest` field. Unfortunately, it is needed by `sp_consensus_babe::check_equivocation_proof`.
*/
fn digest_mut(&mut self) -> &mut Digest {
&mut self.digest
}
fn hash(&self) -> Self::Hash {
self.hash
}
}
pub(crate) fn submit_equivocation(equivocation_proof: EquivocationProof<Header>) -> Option<()> {
// Repack into an `EquivocationProof<BabeHeader>`
let equivocation_proof = {
let EquivocationProof { offender, slot, first_header, second_header } = equivocation_proof;
let first_header = BabeHeader::try_from(first_header).ok()?;
let second_header = BabeHeader::try_from(second_header).ok()?;
EquivocationProof { offender, slot, first_header, second_header }
};
let subkey = equivocation_proof.offender.clone().into_inner();
// Find the most recent (non-historical) session we can associate this with
let mut outer_session = ValidatorSets::current_session(NetworkId::Serai);
let mut completed_iters = 0;
while let Some(inner_session) = outer_session {
let Some(validator) =
ValidatorSets::selected_validators_with_serai_auxiliary_keys(ValidatorSet {
network: NetworkId::Serai,
session: inner_session,
})
.find_map(|(validator, aux_key)| {
(serai_validator_sets_pallet::subkey(&aux_key, key_types::BABE) == subkey)
.then_some(validator)
})
else {
completed_iters += 1;
outer_session = inner_session.0.checked_sub(1).map(Session).filter(|_| completed_iters < 3);
continue;
};
return super::submit_babe_grandpa_equivocation(
inner_session,
validator,
(BABE_EQUIVOCATION, equivocation_proof).encode().try_into().expect(
"`EquivocationProof<BabeHeader>` is of constant size less than the bound for a reason",
),
);
}
None
}
/// Check the (encoded) equivocation proof.
///
/// This expects the encoding to be delimited and will consider the proof invalid if any bytes are
/// present after.
///
/// This will return `None` if the proof was invalid or otherwise `Some(authority_id)` where
/// `authority_id` is the ID of the offending authority.
#[must_use]
pub(super) fn check_equivocation_proof(mut equivocation_proof: &[u8]) -> Option<AuthorityId> {
let Ok(equivocation_proof) = EquivocationProof::<BabeHeader>::decode_all(&mut equivocation_proof)
else {
None?
};
/*
This is the one and only consumer of `BabeHeader as sp_runtime::traits::Header` within the
entire project. This can be easily confirmed as `BabeHeader` is not exported/used beyond this
file. While the function called here is opqaue, it MUST NOT call any functions which
`BabeHeader` does not actually implement. A cursory review of `sp-consensus-babe`'s
implementation does confirm the soundness of this.
TODO: Patch `sp-consensus-babe` to not accept `H: Header` but `H: PartialHeader` where
`impl<H: Header> PartialHeader for H`. This would be backwards compatible but ensure the unused
API functions are in fact unused. This would also allow removing the stubs which panic above
and the _extensive_ associated commentary.
*/
let offender = equivocation_proof.offender.clone();
if !sp_consensus_babe::check_equivocation_proof(equivocation_proof) {
None?;
}
Some(offender)
}