erigon-pulse/erigon-lib/kv/dbutils/composite_keys.go

177 lines
5.7 KiB
Go
Raw Normal View History

package dbutils
import (
"encoding/binary"
"errors"
"fmt"
libcommon "github.com/ledgerwatch/erigon-lib/common"
2022-11-20 03:58:20 +00:00
"github.com/ledgerwatch/erigon-lib/common/length"
)
const NumberLength = 8
// EncodeBlockNumber encodes a block number as big endian uint64
func EncodeBlockNumber(number uint64) []byte {
enc := make([]byte, NumberLength)
binary.BigEndian.PutUint64(enc, number)
return enc
}
var ErrInvalidSize = errors.New("bit endian number has an invalid size")
func DecodeBlockNumber(number []byte) (uint64, error) {
if len(number) != NumberLength {
return 0, fmt.Errorf("%w: %d", ErrInvalidSize, len(number))
}
return binary.BigEndian.Uint64(number), nil
}
// HeaderKey = num (uint64 big endian) + hash
func HeaderKey(number uint64, hash libcommon.Hash) []byte {
2022-11-20 03:58:20 +00:00
k := make([]byte, NumberLength+length.Hash)
2021-07-13 09:31:59 +00:00
binary.BigEndian.PutUint64(k, number)
copy(k[NumberLength:], hash[:])
return k
}
// BlockBodyKey = num (uint64 big endian) + hash
func BlockBodyKey(number uint64, hash libcommon.Hash) []byte {
2022-11-20 03:58:20 +00:00
k := make([]byte, NumberLength+length.Hash)
2021-07-13 09:31:59 +00:00
binary.BigEndian.PutUint64(k, number)
copy(k[NumberLength:], hash[:])
return k
}
// LogKey = blockN (uint64 big endian) + txId (uint32 big endian)
func LogKey(blockNumber uint64, txId uint32) []byte {
newK := make([]byte, 8+4)
binary.BigEndian.PutUint64(newK, blockNumber)
binary.BigEndian.PutUint32(newK[8:], txId)
return newK
}
// bloomBitsKey = bloomBitsPrefix + bit (uint16 big endian) + section (uint64 big endian) + hash
func BloomBitsKey(bit uint, section uint64, hash libcommon.Hash) []byte {
key := append(make([]byte, 10), hash.Bytes()...)
binary.BigEndian.PutUint16(key[0:], uint16(bit))
binary.BigEndian.PutUint64(key[2:], section)
return key
}
2019-11-07 15:51:25 +00:00
// AddrHash + KeyHash
// Only for trie
func GenerateCompositeTrieKey(addressHash libcommon.Hash, seckey libcommon.Hash) []byte {
2022-11-20 03:58:20 +00:00
compositeKey := make([]byte, 0, length.Hash+length.Hash)
compositeKey = append(compositeKey, addressHash[:]...)
compositeKey = append(compositeKey, seckey[:]...)
return compositeKey
}
2019-11-07 15:51:25 +00:00
// AddrHash + incarnation + KeyHash
// For contract storage
func GenerateCompositeStorageKey(addressHash libcommon.Hash, incarnation uint64, seckey libcommon.Hash) []byte {
2022-11-20 03:58:20 +00:00
compositeKey := make([]byte, length.Hash+length.Incarnation+length.Hash)
copy(compositeKey, addressHash[:])
2022-11-20 03:58:20 +00:00
binary.BigEndian.PutUint64(compositeKey[length.Hash:], incarnation)
copy(compositeKey[length.Hash+length.Incarnation:], seckey[:])
return compositeKey
}
func ParseCompositeStorageKey(compositeKey []byte) (libcommon.Hash, uint64, libcommon.Hash) {
2022-11-20 03:58:20 +00:00
prefixLen := length.Hash + length.Incarnation
addrHash, inc := ParseStoragePrefix(compositeKey[:prefixLen])
var key libcommon.Hash
2022-11-20 03:58:20 +00:00
copy(key[:], compositeKey[prefixLen:prefixLen+length.Hash])
return addrHash, inc, key
}
// AddrHash + incarnation + KeyHash
// For contract storage (for plain state)
State cache switching writes to reads during commit (#1368) * State cache init * More code * Fix lint * More tests * More tests * More tests * Fix test * Transformations * remove writeQueue, before fixing the tests * Fix tests * Add more tests, incarnation to the code items * Fix lint * Fix lint * Remove shards prototype, add incarnation to the state reader code * Clean up and replace cache in call_traces stage * fix flaky test * Save changes * Readers to use addrHash, writes - addresses * Fix lint * Fix lint * More accurate tracking of size * Optimise for smaller write batches * Attempt to integrate state cache into Execution stage * cacheSize to default flags * Print correct cache sizes and batch sizes * cacheSize in the integration * Fix tests * Fix lint * Remove print * Fix exec stage * Fix test * Refresh sequence on write * No double increment * heap.Remove * Try to fix alignment * Refactoring, adding hashItems * More changes * Fix compile errors * Fix lint * Wrapping cached reader * Wrap writer into cached writer * Turn state cache off by default * Fix plain state writer * Fix for code/storage mixup * Fix tests * Fix clique test * Better fix for the tests * Add test and fix some more * Fix compile error| * More functions * Fixes * Fix for the tests * sepatate DeletedFlag and AbsentFlag * Minor fixes * Test refactoring * More changes * Fix some tests * More test fixes * More test fixes * Fix lint * Move blockchain_test to be able to use stagedsync * More fixes * Fixes and cleanup * Fix tests in turbo/stages * Fix lint * Fix lint * Intemediate * Fix tests * Intemediate * More fixes * Compilation fixes * More fixes * Fix compile errors * More test fixes * More fixes * More test fixes * Fix compile error * Fixes * Fix * Fix * More fixes * Fixes * More fixes and cleanup * Further fix * Check gas used and bloom with header Co-authored-by: Alexey Sharp <alexeysharp@Alexeys-iMac.local>
2020-12-08 09:44:29 +00:00
func PlainGenerateCompositeStorageKey(address []byte, incarnation uint64, key []byte) []byte {
2022-11-20 03:58:20 +00:00
compositeKey := make([]byte, length.Addr+length.Incarnation+length.Hash)
State cache switching writes to reads during commit (#1368) * State cache init * More code * Fix lint * More tests * More tests * More tests * Fix test * Transformations * remove writeQueue, before fixing the tests * Fix tests * Add more tests, incarnation to the code items * Fix lint * Fix lint * Remove shards prototype, add incarnation to the state reader code * Clean up and replace cache in call_traces stage * fix flaky test * Save changes * Readers to use addrHash, writes - addresses * Fix lint * Fix lint * More accurate tracking of size * Optimise for smaller write batches * Attempt to integrate state cache into Execution stage * cacheSize to default flags * Print correct cache sizes and batch sizes * cacheSize in the integration * Fix tests * Fix lint * Remove print * Fix exec stage * Fix test * Refresh sequence on write * No double increment * heap.Remove * Try to fix alignment * Refactoring, adding hashItems * More changes * Fix compile errors * Fix lint * Wrapping cached reader * Wrap writer into cached writer * Turn state cache off by default * Fix plain state writer * Fix for code/storage mixup * Fix tests * Fix clique test * Better fix for the tests * Add test and fix some more * Fix compile error| * More functions * Fixes * Fix for the tests * sepatate DeletedFlag and AbsentFlag * Minor fixes * Test refactoring * More changes * Fix some tests * More test fixes * More test fixes * Fix lint * Move blockchain_test to be able to use stagedsync * More fixes * Fixes and cleanup * Fix tests in turbo/stages * Fix lint * Fix lint * Intemediate * Fix tests * Intemediate * More fixes * Compilation fixes * More fixes * Fix compile errors * More test fixes * More fixes * More test fixes * Fix compile error * Fixes * Fix * Fix * More fixes * Fixes * More fixes and cleanup * Further fix * Check gas used and bloom with header Co-authored-by: Alexey Sharp <alexeysharp@Alexeys-iMac.local>
2020-12-08 09:44:29 +00:00
copy(compositeKey, address)
2022-11-20 03:58:20 +00:00
binary.BigEndian.PutUint64(compositeKey[length.Addr:], incarnation)
copy(compositeKey[length.Addr+length.Incarnation:], key)
return compositeKey
}
func PlainParseCompositeStorageKey(compositeKey []byte) (libcommon.Address, uint64, libcommon.Hash) {
2022-11-20 03:58:20 +00:00
prefixLen := length.Addr + length.Incarnation
addr, inc := PlainParseStoragePrefix(compositeKey[:prefixLen])
var key libcommon.Hash
2022-11-20 03:58:20 +00:00
copy(key[:], compositeKey[prefixLen:prefixLen+length.Hash])
return addr, inc, key
}
// AddrHash + incarnation + StorageHashPrefix
func GenerateCompositeStoragePrefix(addressHash []byte, incarnation uint64, storageHashPrefix []byte) []byte {
2022-11-20 03:58:20 +00:00
key := make([]byte, length.Hash+length.Incarnation+len(storageHashPrefix))
copy(key, addressHash)
2022-11-20 03:58:20 +00:00
binary.BigEndian.PutUint64(key[length.Hash:], incarnation)
copy(key[length.Hash+length.Incarnation:], storageHashPrefix)
return key
}
// address hash + incarnation prefix
func GenerateStoragePrefix(addressHash []byte, incarnation uint64) []byte {
2022-11-20 03:58:20 +00:00
prefix := make([]byte, length.Hash+NumberLength)
copy(prefix, addressHash)
2022-11-20 03:58:20 +00:00
binary.BigEndian.PutUint64(prefix[length.Hash:], incarnation)
return prefix
}
// address hash + incarnation prefix (for plain state)
func PlainGenerateStoragePrefix(address []byte, incarnation uint64) []byte {
2022-11-20 03:58:20 +00:00
prefix := make([]byte, length.Addr+NumberLength)
copy(prefix, address)
2022-11-20 03:58:20 +00:00
binary.BigEndian.PutUint64(prefix[length.Addr:], incarnation)
return prefix
}
func PlainParseStoragePrefix(prefix []byte) (libcommon.Address, uint64) {
var addr libcommon.Address
2022-11-20 03:58:20 +00:00
copy(addr[:], prefix[:length.Addr])
inc := binary.BigEndian.Uint64(prefix[length.Addr : length.Addr+length.Incarnation])
return addr, inc
}
func ParseStoragePrefix(prefix []byte) (libcommon.Hash, uint64) {
var addrHash libcommon.Hash
2022-11-20 03:58:20 +00:00
copy(addrHash[:], prefix[:length.Hash])
inc := binary.BigEndian.Uint64(prefix[length.Hash : length.Hash+length.Incarnation])
return addrHash, inc
}
2019-11-07 15:51:25 +00:00
// Key + blockNum
func CompositeKeySuffix(key []byte, timestamp uint64) (composite, encodedTS []byte) {
2022-12-19 08:38:54 +00:00
encodedTS = encodeTimestamp(timestamp)
2019-11-07 15:51:25 +00:00
composite = make([]byte, len(key)+len(encodedTS))
copy(composite, key)
2019-11-07 15:51:25 +00:00
copy(composite[len(key):], encodedTS)
return composite, encodedTS
}
2022-12-19 08:38:54 +00:00
// encodeTimestamp has the property: if a < b, then Encoding(a) < Encoding(b) lexicographically
func encodeTimestamp(timestamp uint64) []byte {
var suffix []byte
var limit uint64 = 32
for bytecount := 1; bytecount <= 8; bytecount++ {
if timestamp < limit {
suffix = make([]byte, bytecount)
b := timestamp
for i := bytecount - 1; i > 0; i-- {
suffix[i] = byte(b & 0xff)
b >>= 8
}
suffix[0] = byte(b) | (byte(bytecount) << 5) // 3 most significant bits of the first byte are bytecount
break
}
limit <<= 8
}
return suffix
}