prysm-pulse/beacon-chain/cache/hot_state_cache.go
terence tsao 7a27d89e68
Reduce memory usage for new-state-mgmt at init sync (#5778)
* Add `GetWithoutCopy`
* Add `StateByRootInitialSync`
* Use `StateByRootInitialSync` for initial syncing using new-state-mgmt
* Merge branch 'master' into new-state-init-sync-mem
* Skip if split slot is 0
* Merge branch 'new-state-init-sync-mem' of github.com:prysmaticlabs/prysm into new-state-init-sync-mem
* Make sure we invalidate cache
* Add test part 1
* Add tests part 2
* Update beacon-chain/cache/hot_state_cache.go

Co-authored-by: Victor Farazdagi <simple.square@gmail.com>
* Comment
* Merge branch 'new-state-init-sync-mem' of github.com:prysmaticlabs/prysm into new-state-init-sync-mem
* Merge branch 'master' of github.com:prysmaticlabs/prysm into new-state-init-sync-mem
* Dont need to run fork choice and save head during intial sync
* Invalidate cache at onBlockInitialSyncStateTransition
* Merge branch 'new-state-init-sync-mem' of github.com:prysmaticlabs/prysm into new-state-init-sync-mem
* Revert saveHeadNoDB changes
* Add back DeleteHotStateInCache
* Removed extra deletion
* Merge branch 'master' of github.com:prysmaticlabs/prysm into new-state-init-sync-mem
* Proper set splitslot for tests
* Merge branch 'master' into new-state-init-sync-mem
* Merge branch 'master' of github.com:prysmaticlabs/prysm into new-state-init-sync-mem
* Merge branch 'new-state-init-sync-mem' of github.com:prysmaticlabs/prysm into new-state-init-sync-mem
2020-05-08 19:02:48 +00:00

78 lines
2.1 KiB
Go

package cache
import (
lru "github.com/hashicorp/golang-lru"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
stateTrie "github.com/prysmaticlabs/prysm/beacon-chain/state"
)
var (
// hotStateCacheSize defines the max number of hot state this can cache.
hotStateCacheSize = 16
// Metrics
hotStateCacheHit = promauto.NewCounter(prometheus.CounterOpts{
Name: "hot_state_cache_hit",
Help: "The total number of cache hits on the hot state cache.",
})
hotStateCacheMiss = promauto.NewCounter(prometheus.CounterOpts{
Name: "hot_state_cache_miss",
Help: "The total number of cache misses on the hot state cache.",
})
)
// HotStateCache is used to store the processed beacon state after finalized check point..
type HotStateCache struct {
cache *lru.Cache
}
// NewHotStateCache initializes the map and underlying cache.
func NewHotStateCache() *HotStateCache {
cache, err := lru.New(hotStateCacheSize)
if err != nil {
panic(err)
}
return &HotStateCache{
cache: cache,
}
}
// Get returns a cached response via input block root, if any.
// The response is copied by default.
func (c *HotStateCache) Get(root [32]byte) *stateTrie.BeaconState {
item, exists := c.cache.Get(root)
if exists && item != nil {
hotStateCacheHit.Inc()
return item.(*stateTrie.BeaconState).Copy()
}
hotStateCacheMiss.Inc()
return nil
}
// GetWithoutCopy returns a non-copied cached response via input block root.
func (c *HotStateCache) GetWithoutCopy(root [32]byte) *stateTrie.BeaconState {
item, exists := c.cache.Get(root)
if exists && item != nil {
hotStateCacheHit.Inc()
return item.(*stateTrie.BeaconState)
}
hotStateCacheMiss.Inc()
return nil
}
// Put the response in the cache.
func (c *HotStateCache) Put(root [32]byte, state *stateTrie.BeaconState) {
c.cache.Add(root, state)
}
// Has returns true if the key exists in the cache.
func (c *HotStateCache) Has(root [32]byte) bool {
return c.cache.Contains(root)
}
// Delete deletes the key exists in the cache.
func (c *HotStateCache) Delete(root [32]byte) bool {
return c.cache.Remove(root)
}