mirror of
https://gitlab.com/pulsechaincom/prysm-pulse.git
synced 2024-12-25 21:07:18 +00:00
053038446c
* plug forkchoice to blockchain service's block processing * fixed tests * more fixes... * clean ups * fixed test * Update beacon-chain/blockchain/block_processing.go * merged with 2006 and started fixing tests * remove prints * fixed tests * lint * include ops service * if there's a skip slot, slot-- * fixed typo * started working on test * no fork choice in propose * bleh, need to fix state generator first * state gen takes input slot * feedback * fixed tests * preston's feedback * fmt * removed extra logging * add more logs * fixed validator attest * builds * fixed save block * children fix * removed verbose logs * fix fork choice * right logs * Add Prometheus Counter for Reorg (#2051) * fetch every slot (#2052) * test Fixes * lint * only regenerate state if there was a reorg * better logging * fixed seed * better logging * process skip slots in assignment requests * fix lint * disable state root computation * filter attestations in regular sync * log important items * better info logs * added spans to stategen * span in stategen * set validator deadline * randao stuff * disable sig verify * lint * lint * save only using historical states * use new goroutine for handling sync messages * change default buffer sizes * better p2p * rem some useless logs * lint * sync tests complete * complete tests * tests fixed * lint * fix flakey att service * PR feedback * undo k8s changes * Update beacon-chain/blockchain/block_processing.go * Update beacon-chain/sync/regular_sync.go * Add feature flag to enable compute state root * add comment * gazelle lint fix
324 lines
8.9 KiB
Go
324 lines
8.9 KiB
Go
package client
|
|
|
|
import (
|
|
"context"
|
|
"errors"
|
|
"io/ioutil"
|
|
"strings"
|
|
"testing"
|
|
"time"
|
|
|
|
ptypes "github.com/gogo/protobuf/types"
|
|
"github.com/golang/mock/gomock"
|
|
pbp2p "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
|
pb "github.com/prysmaticlabs/prysm/proto/beacon/rpc/v1"
|
|
"github.com/prysmaticlabs/prysm/shared/params"
|
|
"github.com/prysmaticlabs/prysm/shared/testutil"
|
|
"github.com/prysmaticlabs/prysm/validator/internal"
|
|
"github.com/sirupsen/logrus"
|
|
logTest "github.com/sirupsen/logrus/hooks/test"
|
|
)
|
|
|
|
func init() {
|
|
logrus.SetLevel(logrus.DebugLevel)
|
|
logrus.SetOutput(ioutil.Discard)
|
|
}
|
|
|
|
var _ = Validator(&validator{})
|
|
|
|
func TestWaitForChainStart_SetsChainStartGenesisTime(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockBeaconServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
beaconClient: client,
|
|
}
|
|
genesis := uint64(time.Unix(0, 0).Unix())
|
|
clientStream := internal.NewMockBeaconService_WaitForChainStartClient(ctrl)
|
|
client.EXPECT().WaitForChainStart(
|
|
gomock.Any(),
|
|
&ptypes.Empty{},
|
|
).Return(clientStream, nil)
|
|
clientStream.EXPECT().Recv().Return(
|
|
&pb.ChainStartResponse{
|
|
Started: true,
|
|
GenesisTime: genesis,
|
|
},
|
|
nil,
|
|
)
|
|
if err := v.WaitForChainStart(context.Background()); err != nil {
|
|
t.Fatal(err)
|
|
}
|
|
if v.genesisTime != genesis {
|
|
t.Errorf("Expected chain start time to equal %d, received %d", genesis, v.genesisTime)
|
|
}
|
|
if v.ticker == nil {
|
|
t.Error("Expected ticker to be set, received nil")
|
|
}
|
|
}
|
|
|
|
func TestWaitForChainStart_ContextCanceled(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockBeaconServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
beaconClient: client,
|
|
}
|
|
genesis := uint64(time.Unix(0, 0).Unix())
|
|
clientStream := internal.NewMockBeaconService_WaitForChainStartClient(ctrl)
|
|
client.EXPECT().WaitForChainStart(
|
|
gomock.Any(),
|
|
&ptypes.Empty{},
|
|
).Return(clientStream, nil)
|
|
clientStream.EXPECT().Recv().Return(
|
|
&pb.ChainStartResponse{
|
|
Started: true,
|
|
GenesisTime: genesis,
|
|
},
|
|
nil,
|
|
)
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
cancel()
|
|
err := v.WaitForChainStart(ctx)
|
|
want := "context has been canceled"
|
|
if !strings.Contains(err.Error(), want) {
|
|
t.Errorf("Expected %v, received %v", want, err)
|
|
}
|
|
}
|
|
|
|
func TestWaitForChainStart_StreamSetupFails(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockBeaconServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
beaconClient: client,
|
|
}
|
|
clientStream := internal.NewMockBeaconService_WaitForChainStartClient(ctrl)
|
|
client.EXPECT().WaitForChainStart(
|
|
gomock.Any(),
|
|
&ptypes.Empty{},
|
|
).Return(clientStream, errors.New("failed stream"))
|
|
err := v.WaitForChainStart(context.Background())
|
|
want := "could not setup beacon chain ChainStart streaming client"
|
|
if !strings.Contains(err.Error(), want) {
|
|
t.Errorf("Expected %v, received %v", want, err)
|
|
}
|
|
}
|
|
|
|
func TestWaitForChainStart_ReceiveErrorFromStream(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockBeaconServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
beaconClient: client,
|
|
}
|
|
clientStream := internal.NewMockBeaconService_WaitForChainStartClient(ctrl)
|
|
client.EXPECT().WaitForChainStart(
|
|
gomock.Any(),
|
|
&ptypes.Empty{},
|
|
).Return(clientStream, nil)
|
|
clientStream.EXPECT().Recv().Return(
|
|
nil,
|
|
errors.New("fails"),
|
|
)
|
|
err := v.WaitForChainStart(context.Background())
|
|
want := "could not receive ChainStart from stream"
|
|
if !strings.Contains(err.Error(), want) {
|
|
t.Errorf("Expected %v, received %v", want, err)
|
|
}
|
|
}
|
|
|
|
func TestWaitActivation_ContextCanceled(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockValidatorServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
validatorClient: client,
|
|
}
|
|
clientStream := internal.NewMockValidatorService_WaitForActivationClient(ctrl)
|
|
client.EXPECT().WaitForActivation(
|
|
gomock.Any(),
|
|
&pb.ValidatorActivationRequest{
|
|
Pubkey: v.key.PublicKey.Marshal(),
|
|
},
|
|
).Return(clientStream, nil)
|
|
clientStream.EXPECT().Recv().Return(
|
|
&pb.ValidatorActivationResponse{
|
|
Validator: &pbp2p.Validator{
|
|
ActivationEpoch: params.BeaconConfig().GenesisEpoch,
|
|
},
|
|
},
|
|
nil,
|
|
)
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
cancel()
|
|
err := v.WaitForActivation(ctx)
|
|
want := "context has been canceled"
|
|
if !strings.Contains(err.Error(), want) {
|
|
t.Errorf("Expected %v, received %v", want, err)
|
|
}
|
|
}
|
|
|
|
func TestWaitActivation_StreamSetupFails(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockValidatorServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
validatorClient: client,
|
|
}
|
|
clientStream := internal.NewMockValidatorService_WaitForActivationClient(ctrl)
|
|
client.EXPECT().WaitForActivation(
|
|
gomock.Any(),
|
|
&pb.ValidatorActivationRequest{
|
|
Pubkey: v.key.PublicKey.Marshal(),
|
|
},
|
|
).Return(clientStream, errors.New("failed stream"))
|
|
err := v.WaitForActivation(context.Background())
|
|
want := "could not setup validator WaitForActivation streaming client"
|
|
if !strings.Contains(err.Error(), want) {
|
|
t.Errorf("Expected %v, received %v", want, err)
|
|
}
|
|
}
|
|
|
|
func TestWaitActivation_ReceiveErrorFromStream(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockValidatorServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
validatorClient: client,
|
|
}
|
|
clientStream := internal.NewMockValidatorService_WaitForActivationClient(ctrl)
|
|
client.EXPECT().WaitForActivation(
|
|
gomock.Any(),
|
|
&pb.ValidatorActivationRequest{
|
|
Pubkey: v.key.PublicKey.Marshal(),
|
|
},
|
|
).Return(clientStream, nil)
|
|
clientStream.EXPECT().Recv().Return(
|
|
nil,
|
|
errors.New("fails"),
|
|
)
|
|
err := v.WaitForActivation(context.Background())
|
|
want := "could not receive validator activation from stream"
|
|
if !strings.Contains(err.Error(), want) {
|
|
t.Errorf("Expected %v, received %v", want, err)
|
|
}
|
|
}
|
|
|
|
func TestWaitActivation_LogsActivationEpochOK(t *testing.T) {
|
|
hook := logTest.NewGlobal()
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockValidatorServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
validatorClient: client,
|
|
}
|
|
clientStream := internal.NewMockValidatorService_WaitForActivationClient(ctrl)
|
|
client.EXPECT().WaitForActivation(
|
|
gomock.Any(),
|
|
&pb.ValidatorActivationRequest{
|
|
Pubkey: v.key.PublicKey.Marshal(),
|
|
},
|
|
).Return(clientStream, nil)
|
|
clientStream.EXPECT().Recv().Return(
|
|
&pb.ValidatorActivationResponse{
|
|
Validator: &pbp2p.Validator{
|
|
ActivationEpoch: params.BeaconConfig().GenesisEpoch,
|
|
},
|
|
},
|
|
nil,
|
|
)
|
|
if err := v.WaitForActivation(context.Background()); err != nil {
|
|
t.Errorf("Could not wait for activation: %v", err)
|
|
}
|
|
testutil.AssertLogsContain(t, hook, "Validator activated")
|
|
}
|
|
|
|
func TestUpdateAssignments_ReturnsError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockValidatorServiceClient(ctrl)
|
|
|
|
v := validator{
|
|
key: validatorKey,
|
|
validatorClient: client,
|
|
assignment: &pb.CommitteeAssignmentResponse{
|
|
Assignment: []*pb.CommitteeAssignmentResponse_CommitteeAssignment{
|
|
{
|
|
Shard: 1,
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
expected := errors.New("bad")
|
|
|
|
client.EXPECT().CommitteeAssignment(
|
|
gomock.Any(),
|
|
gomock.Any(),
|
|
).Return(nil, expected)
|
|
|
|
if err := v.UpdateAssignments(context.Background(), params.BeaconConfig().SlotsPerEpoch); err != expected {
|
|
t.Errorf("Bad error; want=%v got=%v", expected, err)
|
|
}
|
|
if v.assignment != nil {
|
|
t.Error("Assignments should have been cleared on failure")
|
|
}
|
|
}
|
|
|
|
func TestUpdateAssignments_OK(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
client := internal.NewMockValidatorServiceClient(ctrl)
|
|
|
|
slot := params.BeaconConfig().SlotsPerEpoch
|
|
resp := &pb.CommitteeAssignmentResponse{
|
|
Assignment: []*pb.CommitteeAssignmentResponse_CommitteeAssignment{
|
|
{
|
|
Slot: params.BeaconConfig().SlotsPerEpoch,
|
|
Shard: 100,
|
|
Committee: []uint64{0, 1, 2, 3},
|
|
IsProposer: true,
|
|
},
|
|
},
|
|
}
|
|
v := validator{
|
|
key: validatorKey,
|
|
validatorClient: client,
|
|
}
|
|
client.EXPECT().CommitteeAssignment(
|
|
gomock.Any(),
|
|
gomock.Any(),
|
|
).Return(resp, nil)
|
|
|
|
if err := v.UpdateAssignments(context.Background(), slot); err != nil {
|
|
t.Fatalf("Could not update assignments: %v", err)
|
|
}
|
|
|
|
if v.assignment.Assignment[0].Slot != params.BeaconConfig().SlotsPerEpoch {
|
|
t.Errorf("Unexpected validator assignments. want=%v got=%v", params.BeaconConfig().SlotsPerEpoch, v.assignment.Assignment[0].Slot)
|
|
}
|
|
if v.assignment.Assignment[0].Shard != resp.Assignment[0].Shard {
|
|
t.Errorf("Unexpected validator assignments. want=%v got=%v", resp.Assignment[0].Shard, v.assignment.Assignment[0].Slot)
|
|
}
|
|
if !v.assignment.Assignment[0].IsProposer {
|
|
t.Errorf("Unexpected validator assignments. want: proposer=true")
|
|
}
|
|
}
|