mirror of
https://gitlab.com/pulsechaincom/lighthouse-pulse.git
synced 2024-12-22 11:41:28 +00:00
VC: accept unknown fields in chain spec (#2277)
## Issue Addressed Closes #2274 ## Proposed Changes * Modify the `YamlConfig` to collect unknown fields into an `extra_fields` map, instead of failing hard. * Log a debug message if there are extra fields returned to the VC from one of its BNs. This restores Lighthouse's compatibility with Teku beacon nodes (and therefore Infura)
This commit is contained in:
parent
9a71a7e486
commit
f9d60f5436
6
Makefile
6
Makefile
@ -121,7 +121,11 @@ test-full: cargo-fmt test-release test-debug test-ef
|
||||
# Lints the code for bad style and potentially unsafe arithmetic using Clippy.
|
||||
# Clippy lints are opt-in per-crate for now. By default, everything is allowed except for performance and correctness lints.
|
||||
lint:
|
||||
cargo clippy --all --tests -- -D warnings
|
||||
cargo clippy --all --tests -- \
|
||||
-D warnings \
|
||||
-A clippy::from-over-into \
|
||||
-A clippy::upper-case-acronyms \
|
||||
-A clippy::vec-init-then-push
|
||||
|
||||
# Runs the makefile in the `ef_tests` repo.
|
||||
#
|
||||
|
@ -10,7 +10,7 @@ use eth2_keystore::Keystore;
|
||||
use eth2_network_config::Eth2NetworkConfig;
|
||||
use safe_arith::SafeArith;
|
||||
use slot_clock::{SlotClock, SystemTimeSlotClock};
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Duration;
|
||||
use tokio::time::sleep;
|
||||
use types::{ChainSpec, Epoch, EthSpec, Fork, VoluntaryExit};
|
||||
@ -91,7 +91,7 @@ pub fn cli_run<E: EthSpec>(matches: &ArgMatches, env: Environment<E>) -> Result<
|
||||
|
||||
/// Gets the keypair and validator_index for every validator and calls `publish_voluntary_exit` on it.
|
||||
async fn publish_voluntary_exit<E: EthSpec>(
|
||||
keystore_path: &PathBuf,
|
||||
keystore_path: &Path,
|
||||
password_file_path: Option<&PathBuf>,
|
||||
client: &BeaconNodeHttpClient,
|
||||
spec: &ChainSpec,
|
||||
@ -310,7 +310,7 @@ fn get_current_epoch<E: EthSpec>(genesis_time: u64, spec: &ChainSpec) -> Option<
|
||||
/// If the `password_file_path` is Some, unlock keystore using password in given file
|
||||
/// otherwise, prompts user for a password to unlock the keystore.
|
||||
fn load_voting_keypair(
|
||||
voting_keystore_path: &PathBuf,
|
||||
voting_keystore_path: &Path,
|
||||
password_file_path: Option<&PathBuf>,
|
||||
stdin_inputs: bool,
|
||||
) -> Result<Keypair, String> {
|
||||
|
@ -53,7 +53,7 @@ impl Inner {
|
||||
pub fn from_bytes(bytes: &[u8], config: Config, spec: ChainSpec) -> Result<Self, String> {
|
||||
let ssz_cache = SszEth1Cache::from_ssz_bytes(bytes)
|
||||
.map_err(|e| format!("Ssz decoding error: {:?}", e))?;
|
||||
Ok(ssz_cache.to_inner(config, spec)?)
|
||||
ssz_cache.to_inner(config, spec)
|
||||
}
|
||||
|
||||
/// Returns a reference to the specification.
|
||||
|
@ -36,7 +36,7 @@ use ssz::Encode;
|
||||
use std::collections::HashSet;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{
|
||||
collections::VecDeque,
|
||||
marker::PhantomData,
|
||||
@ -1336,7 +1336,7 @@ impl<TSpec: EthSpec> std::convert::From<Response<TSpec>> for RPCCodedResponse<TS
|
||||
}
|
||||
|
||||
/// Persist metadata to disk
|
||||
pub fn save_metadata_to_disk<E: EthSpec>(dir: &PathBuf, metadata: MetaData<E>, log: &slog::Logger) {
|
||||
pub fn save_metadata_to_disk<E: EthSpec>(dir: &Path, metadata: MetaData<E>, log: &slog::Logger) {
|
||||
let _ = std::fs::create_dir_all(&dir);
|
||||
match File::create(dir.join(METADATA_FILENAME))
|
||||
.and_then(|mut f| f.write_all(&metadata.as_ssz_bytes()))
|
||||
|
@ -474,7 +474,7 @@ fn strip_peer_id(addr: &mut Multiaddr) {
|
||||
|
||||
/// Load metadata from persisted file. Return default metadata if loading fails.
|
||||
fn load_or_build_metadata<E: EthSpec>(
|
||||
network_dir: &std::path::PathBuf,
|
||||
network_dir: &std::path::Path,
|
||||
log: &slog::Logger,
|
||||
) -> MetaData<E> {
|
||||
// Default metadata
|
||||
|
@ -10,7 +10,7 @@ use std::cmp::max;
|
||||
use std::fs;
|
||||
use std::net::{IpAddr, Ipv4Addr, ToSocketAddrs};
|
||||
use std::net::{TcpListener, UdpSocket};
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use types::{ChainSpec, Checkpoint, Epoch, EthSpec, Hash256, PublicKeyBytes, GRAFFITI_BYTES_LEN};
|
||||
|
||||
@ -422,7 +422,7 @@ pub fn get_config<E: EthSpec>(
|
||||
pub fn set_network_config(
|
||||
config: &mut NetworkConfig,
|
||||
cli_args: &ArgMatches,
|
||||
data_dir: &PathBuf,
|
||||
data_dir: &Path,
|
||||
log: &Logger,
|
||||
use_listening_port_as_enr_port_by_default: bool,
|
||||
) -> Result<(), String> {
|
||||
|
@ -660,7 +660,7 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
|
||||
partial_state.load_historical_roots(&self.cold_db, &self.spec)?;
|
||||
partial_state.load_randao_mixes(&self.cold_db, &self.spec)?;
|
||||
|
||||
Ok(partial_state.try_into()?)
|
||||
partial_state.try_into()
|
||||
}
|
||||
|
||||
/// Load a restore point state by its `restore_point_index`.
|
||||
|
@ -35,7 +35,7 @@ fn testnet_url() -> String {
|
||||
fn main() {
|
||||
match get_all_contracts() {
|
||||
Ok(()) => (),
|
||||
Err(e) => panic!(e),
|
||||
Err(e) => panic!("{}", e),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ pub fn decode_eth1_tx_data(
|
||||
)
|
||||
.map_err(DecodeError::SszDecodeError)?
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
let root = decode_token!(Hash256, to_fixed_bytes);
|
||||
|
||||
|
@ -38,6 +38,10 @@ mod test {
|
||||
fn version_formatting() {
|
||||
let re = Regex::new(r"^Lighthouse/v[0-9]+\.[0-9]+\.[0-9]+(-rc.[0-9])?-[[:xdigit:]]{7}\+?$")
|
||||
.unwrap();
|
||||
assert!(re.is_match(VERSION), VERSION);
|
||||
assert!(
|
||||
re.is_match(VERSION),
|
||||
"version doesn't match regex: {}",
|
||||
VERSION
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -149,7 +149,7 @@ mod post {
|
||||
let r = run_testcase(u).unwrap_err();
|
||||
|
||||
for msg in msgs.iter() {
|
||||
assert!(r.contains(msg), format!("{:?} should contain {:?}", r, msg));
|
||||
assert!(r.contains(msg), "{:?} should contain {:?}", r, msg);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -89,7 +89,7 @@ impl ValidatorDir {
|
||||
}
|
||||
|
||||
/// Returns the `dir` provided to `Self::open`.
|
||||
pub fn dir(&self) -> &PathBuf {
|
||||
pub fn dir(&self) -> &Path {
|
||||
&self.dir
|
||||
}
|
||||
|
||||
@ -204,7 +204,7 @@ impl ValidatorDir {
|
||||
|
||||
/// Attempts to load and decrypt a Keypair given path to the keystore.
|
||||
pub fn unlock_keypair<P: AsRef<Path>>(
|
||||
keystore_path: &PathBuf,
|
||||
keystore_path: &Path,
|
||||
password_dir: P,
|
||||
) -> Result<Keypair, Error> {
|
||||
let keystore = Keystore::from_json_reader(
|
||||
@ -229,8 +229,8 @@ pub fn unlock_keypair<P: AsRef<Path>>(
|
||||
|
||||
/// Attempts to load and decrypt a Keypair given path to the keystore and the password file.
|
||||
pub fn unlock_keypair_from_password_path(
|
||||
keystore_path: &PathBuf,
|
||||
password_path: &PathBuf,
|
||||
keystore_path: &Path,
|
||||
password_path: &Path,
|
||||
) -> Result<Keypair, Error> {
|
||||
let keystore = Keystore::from_json_reader(
|
||||
&mut OpenOptions::new()
|
||||
@ -242,7 +242,7 @@ pub fn unlock_keypair_from_password_path(
|
||||
.map_err(Error::UnableToReadKeystore)?;
|
||||
|
||||
let password: PlainText = read(password_path)
|
||||
.map_err(|_| Error::UnableToReadPassword(password_path.clone()))?
|
||||
.map_err(|_| Error::UnableToReadPassword(password_path.into()))?
|
||||
.into();
|
||||
keystore
|
||||
.decrypt_keypair(password.as_bytes())
|
||||
|
@ -182,7 +182,7 @@ fn concurrency() {
|
||||
let harness = Harness::new();
|
||||
|
||||
let val_dir = harness.create_and_test(&BuildConfig::default());
|
||||
let path = val_dir.dir().clone();
|
||||
let path = val_dir.dir().to_owned();
|
||||
|
||||
// Should not re-open whilst opened after build.
|
||||
ValidatorDir::open(&path).unwrap_err();
|
||||
|
@ -55,8 +55,8 @@ impl<'de> Visitor<'de> for HexVisitor {
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
Ok(hex::decode(value.trim_start_matches("0x"))
|
||||
.map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))?)
|
||||
hex::decode(value.trim_start_matches("0x"))
|
||||
.map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -353,7 +353,7 @@ macro_rules! impl_decodable_for_u8_array {
|
||||
Err(DecodeError::InvalidByteLength { len, expected })
|
||||
} else {
|
||||
let mut array: [u8; $len] = [0; $len];
|
||||
array.copy_from_slice(&bytes[..]);
|
||||
array.copy_from_slice(bytes);
|
||||
|
||||
Ok(array)
|
||||
}
|
||||
|
@ -108,9 +108,9 @@ impl<T: Default, N: Unsigned> From<Vec<T>> for FixedVector<T, N> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, N: Unsigned> Into<Vec<T>> for FixedVector<T, N> {
|
||||
fn into(self) -> Vec<T> {
|
||||
self.vec
|
||||
impl<T, N: Unsigned> From<FixedVector<T, N>> for Vec<T> {
|
||||
fn from(vector: FixedVector<T, N>) -> Vec<T> {
|
||||
vector.vec
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -120,9 +120,9 @@ impl<T, N: Unsigned> From<Vec<T>> for VariableList<T, N> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, N: Unsigned> Into<Vec<T>> for VariableList<T, N> {
|
||||
fn into(self) -> Vec<T> {
|
||||
self.vec
|
||||
impl<T, N: Unsigned> From<VariableList<T, N>> for Vec<T> {
|
||||
fn from(list: VariableList<T, N>) -> Vec<T> {
|
||||
list.vec
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::*;
|
||||
use int_to_bytes::int_to_bytes4;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
use tree_hash::TreeHash;
|
||||
@ -449,10 +450,8 @@ mod tests {
|
||||
}
|
||||
|
||||
/// YAML config file as defined by the spec.
|
||||
///
|
||||
/// Spec v0.12.3
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
|
||||
#[serde(rename_all = "UPPERCASE", deny_unknown_fields)]
|
||||
#[serde(rename_all = "UPPERCASE")]
|
||||
pub struct YamlConfig {
|
||||
pub config_name: String,
|
||||
// ChainSpec
|
||||
@ -576,6 +575,10 @@ pub struct YamlConfig {
|
||||
#[serde(with = "serde_utils::quoted_u64")]
|
||||
deposit_network_id: u64,
|
||||
deposit_contract_address: Address,
|
||||
|
||||
// Extra fields (could be from a future hard-fork that we don't yet know).
|
||||
#[serde(flatten)]
|
||||
pub extra_fields: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl Default for YamlConfig {
|
||||
@ -671,6 +674,8 @@ impl YamlConfig {
|
||||
deposit_chain_id: spec.deposit_chain_id,
|
||||
deposit_network_id: spec.deposit_network_id,
|
||||
deposit_contract_address: spec.deposit_contract_address,
|
||||
|
||||
extra_fields: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -849,6 +854,31 @@ mod yaml_tests {
|
||||
assert_eq!(from, yamlconfig);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extra_fields_round_trip() {
|
||||
let tmp_file = NamedTempFile::new().expect("failed to create temp file");
|
||||
let writer = OpenOptions::new()
|
||||
.read(false)
|
||||
.write(true)
|
||||
.open(tmp_file.as_ref())
|
||||
.expect("error opening file");
|
||||
let mainnet_spec = ChainSpec::mainnet();
|
||||
let mut yamlconfig = YamlConfig::from_spec::<MainnetEthSpec>(&mainnet_spec);
|
||||
let (k1, v1) = ("SAMPLE_HARDFORK_KEY1", "123456789");
|
||||
let (k2, v2) = ("SAMPLE_HARDFORK_KEY2", "987654321");
|
||||
yamlconfig.extra_fields.insert(k1.into(), v1.into());
|
||||
yamlconfig.extra_fields.insert(k2.into(), v2.into());
|
||||
serde_yaml::to_writer(writer, &yamlconfig).expect("failed to write or serialize");
|
||||
|
||||
let reader = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(false)
|
||||
.open(tmp_file.as_ref())
|
||||
.expect("error while opening the file");
|
||||
let from: YamlConfig = serde_yaml::from_reader(reader).expect("error while deserializing");
|
||||
assert_eq!(from, yamlconfig);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_to_spec() {
|
||||
let mut spec = ChainSpec::minimal();
|
||||
|
@ -19,35 +19,35 @@ where
|
||||
aggregate: Cow<'a, GenericAggregateSignature<Pub, AggPub, Sig, AggSig>>,
|
||||
}
|
||||
|
||||
impl<'a, Pub, AggPub, Sig, AggSig> Into<WrappedSignature<'a, Pub, AggPub, Sig, AggSig>>
|
||||
for &'a GenericSignature<Pub, Sig>
|
||||
impl<'a, Pub, AggPub, Sig, AggSig> From<&'a GenericSignature<Pub, Sig>>
|
||||
for WrappedSignature<'a, Pub, AggPub, Sig, AggSig>
|
||||
where
|
||||
Pub: TPublicKey + Clone,
|
||||
AggPub: Clone,
|
||||
Sig: TSignature<Pub> + Clone,
|
||||
AggSig: TAggregateSignature<Pub, AggPub, Sig> + Clone,
|
||||
{
|
||||
fn into(self) -> WrappedSignature<'a, Pub, AggPub, Sig, AggSig> {
|
||||
fn from(sig: &'a GenericSignature<Pub, Sig>) -> Self {
|
||||
let mut aggregate: GenericAggregateSignature<Pub, AggPub, Sig, AggSig> =
|
||||
GenericAggregateSignature::infinity();
|
||||
aggregate.add_assign(self);
|
||||
aggregate.add_assign(sig);
|
||||
WrappedSignature {
|
||||
aggregate: Cow::Owned(aggregate),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, Pub, AggPub, Sig, AggSig> Into<WrappedSignature<'a, Pub, AggPub, Sig, AggSig>>
|
||||
for &'a GenericAggregateSignature<Pub, AggPub, Sig, AggSig>
|
||||
impl<'a, Pub, AggPub, Sig, AggSig> From<&'a GenericAggregateSignature<Pub, AggPub, Sig, AggSig>>
|
||||
for WrappedSignature<'a, Pub, AggPub, Sig, AggSig>
|
||||
where
|
||||
Pub: TPublicKey + Clone,
|
||||
AggPub: Clone,
|
||||
Sig: Clone,
|
||||
AggSig: Clone,
|
||||
{
|
||||
fn into(self) -> WrappedSignature<'a, Pub, AggPub, Sig, AggSig> {
|
||||
fn from(aggregate: &'a GenericAggregateSignature<Pub, AggPub, Sig, AggSig>) -> Self {
|
||||
WrappedSignature {
|
||||
aggregate: Cow::Borrowed(self),
|
||||
aggregate: Cow::Borrowed(aggregate),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
#![cfg(feature = "ef_tests")]
|
||||
|
||||
use ef_tests::*;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use types::*;
|
||||
|
||||
@ -17,6 +18,11 @@ fn config_test<E: EthSpec + TypeName>() {
|
||||
let yaml_from_spec = YamlConfig::from_spec::<E>(&spec);
|
||||
assert_eq!(yaml_config.apply_to_chain_spec::<E>(&spec), Some(spec));
|
||||
assert_eq!(yaml_from_spec, yaml_config);
|
||||
assert_eq!(
|
||||
yaml_config.extra_fields,
|
||||
HashMap::new(),
|
||||
"not all config fields read"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -7,7 +7,7 @@ use state_processing::test_utils::BlockProcessingBuilder;
|
||||
use std::env;
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::exit;
|
||||
use types::MainnetEthSpec;
|
||||
use types::{BeaconState, ChainSpec, EthSpec, SignedBeaconBlock};
|
||||
@ -91,12 +91,12 @@ fn write_vectors_to_file(title: &str, vectors: &[TestVector]) -> Result<(), Stri
|
||||
}
|
||||
|
||||
/// Write some SSZ object to file.
|
||||
fn write_to_ssz_file<T: Encode>(path: &PathBuf, item: &T) -> Result<(), String> {
|
||||
fn write_to_ssz_file<T: Encode>(path: &Path, item: &T) -> Result<(), String> {
|
||||
write_to_file(path, &item.as_ssz_bytes())
|
||||
}
|
||||
|
||||
/// Write some bytes to file.
|
||||
fn write_to_file(path: &PathBuf, item: &[u8]) -> Result<(), String> {
|
||||
fn write_to_file(path: &Path, item: &[u8]) -> Result<(), String> {
|
||||
File::create(path)
|
||||
.map_err(|e| format!("Unable to create {:?}: {:?}", path, e))
|
||||
.and_then(|mut file| {
|
||||
|
@ -7,7 +7,7 @@ use crate::http_metrics::metrics::{inc_counter_vec, ENDPOINT_ERRORS, ENDPOINT_RE
|
||||
use environment::RuntimeContext;
|
||||
use eth2::BeaconNodeHttpClient;
|
||||
use futures::future;
|
||||
use slog::{error, info, warn, Logger};
|
||||
use slog::{debug, error, info, warn, Logger};
|
||||
use slot_clock::SlotClock;
|
||||
use std::fmt;
|
||||
use std::fmt::Debug;
|
||||
@ -236,6 +236,14 @@ impl<E: EthSpec> CandidateBeaconNode<E> {
|
||||
CandidateError::Incompatible
|
||||
})?;
|
||||
|
||||
if !yaml_config.extra_fields.is_empty() {
|
||||
debug!(
|
||||
log,
|
||||
"Beacon spec includes unknown fields";
|
||||
"fields" => ?yaml_config.extra_fields
|
||||
);
|
||||
}
|
||||
|
||||
if *spec == beacon_node_spec {
|
||||
Ok(())
|
||||
} else {
|
||||
|
@ -19,7 +19,7 @@ use slog::{debug, error, info, warn, Logger};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use types::{Graffiti, Keypair, PublicKey, PublicKeyBytes};
|
||||
|
||||
use crate::key_cache;
|
||||
@ -101,20 +101,16 @@ impl InitializedValidator {
|
||||
}
|
||||
}
|
||||
|
||||
fn open_keystore(path: &PathBuf) -> Result<Keystore, Error> {
|
||||
fn open_keystore(path: &Path) -> Result<Keystore, Error> {
|
||||
let keystore_file = File::open(path).map_err(Error::UnableToOpenVotingKeystore)?;
|
||||
Keystore::from_json_reader(keystore_file).map_err(Error::UnableToParseVotingKeystore)
|
||||
}
|
||||
|
||||
fn get_lockfile_path(file_path: &PathBuf) -> Option<PathBuf> {
|
||||
fn get_lockfile_path(file_path: &Path) -> Option<PathBuf> {
|
||||
file_path
|
||||
.file_name()
|
||||
.and_then(|os_str| os_str.to_str())
|
||||
.map(|filename| {
|
||||
file_path
|
||||
.clone()
|
||||
.with_file_name(format!("{}.lock", filename))
|
||||
})
|
||||
.map(|filename| file_path.with_file_name(format!("{}.lock", filename)))
|
||||
}
|
||||
|
||||
impl InitializedValidator {
|
||||
@ -238,7 +234,7 @@ impl InitializedValidator {
|
||||
/// Try to unlock `keystore` at `keystore_path` by prompting the user via `stdin`.
|
||||
fn unlock_keystore_via_stdin_password(
|
||||
keystore: &Keystore,
|
||||
keystore_path: &PathBuf,
|
||||
keystore_path: &Path,
|
||||
) -> Result<(ZeroizeString, Keypair), Error> {
|
||||
eprintln!();
|
||||
eprintln!(
|
||||
|
Loading…
Reference in New Issue
Block a user