prysm-pulse/beacon-chain/db/kafka/export_wrapper.go
Preston Van Loon 49a0d3caf0
Refactor dependencies, make Prysm "go gettable" (#6053)
* Fix a few deps to work with go.mod, check in generated files

* Update Gossipsub to 1.1 (#5998)

* update libs

* add new validators

* add new deps

* new set of deps

* tls

* further fix gossip update

* get everything to build

* clean up

* gaz

* fix build

* fix all tests

* add deps to images

* imports

Co-authored-by: rauljordan <raul@prysmaticlabs.com>

* Beacon chain builds with go build

* fix bazel

* fix dep

* lint

* Add github action for testing go

* on PR for any branch

* fix libp2p test failure

* Fix TestProcessBlock_PassesProcessingConditions by updating the proposer index in test

* Revert "Fix TestProcessBlock_PassesProcessingConditions by updating the proposer index in test"

This reverts commit 43676894ab01f03fe90a9b8ee3ecfbc2ec1ec4e4.

* Compute and set proposer index instead of hard code

* Add back go mod/sum, fix deps

* go build ./...

* Temporarily skip two tests

* Fix kafka confluent patch

* Fix kafka confluent patch

* fix kafka build

* fix kafka

* Add info in DEPENDENCIES. Added a stub link for Why Bazel? until https://github.com/prysmaticlabs/documentation/issues/138

* Update fuzz ssz files as well

* Update fuzz ssz files as well

* getting closer

* rollback rules_go and gazelle

* fix gogo protobuf

* install librdkafka-dev as part of github actions

* Update kafka to a recent version where librkafkfa is not required for go modules

* clarify comment

* fix kafka build

* disable go tests

* comment

* Fix geth dependencies for end to end

* rename word

* lint

* fix docker

Co-authored-by: Nishant Das <nishdas93@gmail.com>
Co-authored-by: rauljordan <raul@prysmaticlabs.com>
Co-authored-by: terence tsao <terence@prysmaticlabs.com>
2020-05-31 14:44:34 +08:00

128 lines
3.6 KiB
Go

// Package kafka defines an implementation of Database interface
// which exports streaming data using Kafka for data analysis.
package kafka
import (
"bytes"
"context"
"github.com/golang/protobuf/jsonpb"
"github.com/golang/protobuf/proto"
eth "github.com/prysmaticlabs/ethereumapis/eth/v1alpha1"
"github.com/prysmaticlabs/go-ssz"
"github.com/prysmaticlabs/prysm/beacon-chain/db/iface"
"github.com/prysmaticlabs/prysm/shared/featureconfig"
"github.com/prysmaticlabs/prysm/shared/traceutil"
"github.com/sirupsen/logrus"
"go.opencensus.io/trace"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
_ "gopkg.in/confluentinc/confluent-kafka-go.v1/kafka/librdkafka" // Required for c++ kafka library.
)
var _ = iface.Database(&Exporter{})
var log = logrus.WithField("prefix", "exporter")
var marshaler = &jsonpb.Marshaler{}
// Exporter wraps a database interface and exports certain objects to kafka topics.
type Exporter struct {
db iface.Database
p *kafka.Producer
}
// Wrap the db with kafka exporter. If the feature flag is not enabled, this service does not wrap
// the database, but returns the underlying database pointer itself.
func Wrap(db iface.Database) (iface.Database, error) {
if featureconfig.Get().KafkaBootstrapServers == "" {
return db, nil
}
p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": featureconfig.Get().KafkaBootstrapServers})
if err != nil {
return nil, err
}
return &Exporter{db: db, p: p}, nil
}
func (e Exporter) publish(ctx context.Context, topic string, msg proto.Message) error {
ctx, span := trace.StartSpan(ctx, "kafka.publish")
defer span.End()
buf := bytes.NewBuffer(nil)
if err := marshaler.Marshal(buf, msg); err != nil {
traceutil.AnnotateError(span, err)
return err
}
key, err := ssz.HashTreeRoot(msg)
if err != nil {
traceutil.AnnotateError(span, err)
return err
}
if err := e.p.Produce(&kafka.Message{
TopicPartition: kafka.TopicPartition{
Topic: &topic,
},
Value: buf.Bytes(),
Key: key[:],
}, nil); err != nil {
traceutil.AnnotateError(span, err)
return err
}
return nil
}
// Close closes kafka producer and underlying db.
func (e Exporter) Close() error {
e.p.Close()
return e.db.Close()
}
// SaveAttestation publishes to the kafka topic for attestations.
func (e Exporter) SaveAttestation(ctx context.Context, att *eth.Attestation) error {
go func() {
if err := e.publish(ctx, "beacon_attestation", att); err != nil {
log.WithError(err).Error("Failed to publish attestation")
}
}()
return e.db.SaveAttestation(ctx, att)
}
// SaveAttestations publishes to the kafka topic for beacon attestations.
func (e Exporter) SaveAttestations(ctx context.Context, atts []*eth.Attestation) error {
go func() {
for _, att := range atts {
if err := e.publish(ctx, "beacon_attestation", att); err != nil {
log.WithError(err).Error("Failed to publish attestation")
}
}
}()
return e.db.SaveAttestations(ctx, atts)
}
// SaveBlock publishes to the kafka topic for beacon blocks.
func (e Exporter) SaveBlock(ctx context.Context, block *eth.SignedBeaconBlock) error {
go func() {
if err := e.publish(ctx, "beacon_block", block); err != nil {
log.WithError(err).Error("Failed to publish block")
}
}()
return e.db.SaveBlock(ctx, block)
}
// SaveBlocks publishes to the kafka topic for beacon blocks.
func (e Exporter) SaveBlocks(ctx context.Context, blocks []*eth.SignedBeaconBlock) error {
go func() {
for _, block := range blocks {
if err := e.publish(ctx, "beacon_block", block); err != nil {
log.WithError(err).Error("Failed to publish block")
}
}
}()
return e.db.SaveBlocks(ctx, blocks)
}