From fb4e83e6a39e4f8e22d0729d4bcda3912e4f4141 Mon Sep 17 00:00:00 2001 From: MichelMajdalani Date: Sun, 21 Aug 2022 19:06:37 -0700 Subject: [PATCH 01/11] WIP: Test failing because returning invalid input for valid input --- go.mod | 1 + go.sum | 2 + lib/block_view.go | 24 ++ lib/block_view_flush.go | 50 +++ lib/block_view_reaction.go | 294 +++++++++++++ lib/block_view_reaction_test.go | 726 ++++++++++++++++++++++++++++++++ lib/block_view_types.go | 69 ++- lib/blockchain.go | 54 +++ lib/db_utils.go | 220 +++++++++- lib/errors.go | 4 + lib/mempool.go | 28 ++ lib/network.go | 97 ++++- lib/network_test.go | 58 +++ lib/notifier.go | 15 + lib/postgres.go | 100 +++++ 15 files changed, 1736 insertions(+), 6 deletions(-) create mode 100644 lib/block_view_reaction.go create mode 100644 lib/block_view_reaction_test.go diff --git a/go.mod b/go.mod index d409be68c..50690f694 100644 --- a/go.mod +++ b/go.mod @@ -55,6 +55,7 @@ require ( golang.org/x/mod v0.4.2 // indirect golang.org/x/net v0.0.0-20210614182718-04defd469f4e // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect + golang.org/x/text v0.3.7 // indirect golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 // indirect golang.org/x/tools v0.1.0 // indirect gopkg.in/DataDog/dd-trace-go.v1 v1.29.0 diff --git a/go.sum b/go.sum index 7b814c22c..d65c9ef92 100644 --- a/go.sum +++ b/go.sum @@ -665,6 +665,8 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 h1:Hir2P/De0WpUhtrKGGjvSb2YxUgyZ7EFOSLIcSSpiwE= diff --git a/lib/block_view.go b/lib/block_view.go index 20eb61b9f..158f4c2c2 100644 --- a/lib/block_view.go +++ b/lib/block_view.go @@ -64,6 +64,9 @@ type UtxoView struct { // Like data LikeKeyToLikeEntry map[LikeKey]*LikeEntry + // React data + ReactionKeyToReactionEntry map[ReactionKey]*ReactionEntry + // Repost data RepostKeyToRepostEntry map[RepostKey]*RepostEntry @@ -145,6 +148,9 @@ func (bav *UtxoView) _ResetViewMappingsAfterFlush() { // Like data bav.LikeKeyToLikeEntry = make(map[LikeKey]*LikeEntry) + // React data + bav.ReactionKeyToReactionEntry = make(map[ReactionKey]*ReactionEntry) + // Repost data bav.RepostKeyToRepostEntry = make(map[RepostKey]*RepostEntry) @@ -281,6 +287,16 @@ func (bav *UtxoView) CopyUtxoView() (*UtxoView, error) { newView.LikeKeyToLikeEntry[likeKey] = &newLikeEntry } + // Copy the react data + newView.ReactionKeyToReactionEntry = make(map[ReactionKey]*ReactionEntry, len(bav.ReactionKeyToReactionEntry)) + for reactKey, reactEntry := range bav.ReactionKeyToReactionEntry { + if reactEntry == nil { + continue + } + newReactEntry := *reactEntry + newView.ReactionKeyToReactionEntry[reactKey] = &newReactEntry + } + // Copy the repost data newView.RepostKeyToRepostEntry = make(map[RepostKey]*RepostEntry, len(bav.RepostKeyToRepostEntry)) for repostKey, repostEntry := range bav.RepostKeyToRepostEntry { @@ -947,6 +963,10 @@ func (bav *UtxoView) DisconnectTransaction(currentTxn *MsgDeSoTxn, txnHash *Bloc return bav._disconnectLike( OperationTypeLike, currentTxn, txnHash, utxoOpsForTxn, blockHeight) + } else if currentTxn.TxnMeta.GetTxnType() == TxnTypeReact { + return bav._disconnectReact( + OperationTypeReact, currentTxn, txnHash, utxoOpsForTxn, blockHeight) + } else if currentTxn.TxnMeta.GetTxnType() == TxnTypeCreatorCoin { return bav._disconnectCreatorCoin( OperationTypeCreatorCoin, currentTxn, txnHash, utxoOpsForTxn, blockHeight) @@ -2257,6 +2277,10 @@ func (bav *UtxoView) _connectTransaction(txn *MsgDeSoTxn, txHash *BlockHash, totalInput, totalOutput, utxoOpsForTxn, err = bav._connectLike(txn, txHash, blockHeight, verifySignatures) + } else if txn.TxnMeta.GetTxnType() == TxnTypeReact { + totalInput, totalOutput, utxoOpsForTxn, err = + bav._connectReact(txn, txHash, blockHeight, verifySignatures) + } else if txn.TxnMeta.GetTxnType() == TxnTypeCreatorCoin { totalInput, totalOutput, utxoOpsForTxn, err = bav._connectCreatorCoin( diff --git a/lib/block_view_flush.go b/lib/block_view_flush.go index adf1bfcc4..b1de080b8 100644 --- a/lib/block_view_flush.go +++ b/lib/block_view_flush.go @@ -66,6 +66,9 @@ func (bav *UtxoView) FlushToDbWithTxn(txn *badger.Txn, blockHeight uint64) error if err := bav._flushLikeEntriesToDbWithTxn(txn); err != nil { return err } + if err := bav._flushReactEntriesToDbWithTxn(txn); err != nil { + return err + } if err := bav._flushFollowEntriesToDbWithTxn(txn); err != nil { return err } @@ -411,6 +414,53 @@ func (bav *UtxoView) _flushLikeEntriesToDbWithTxn(txn *badger.Txn) error { return nil } +func (bav *UtxoView) _flushReactEntriesToDbWithTxn(txn *badger.Txn) error { + + // Go through all the entries in the ReactionKeyToReactionEntry map. + for reactKeyIter, reactEntry := range bav.ReactionKeyToReactionEntry { + // Make a copy of the iterator since we make references to it below. + reactKey := reactKeyIter + + // Sanity-check that the ReactKey computed from the ReactEntry is + // equal to the ReactKey that maps to that entry. + reactKeyInEntry := MakeReactionKey(reactEntry.ReactorPubKey, *reactEntry.ReactedPostHash, reactEntry.ReactEmoji) + if reactKeyInEntry != reactKey { + return fmt.Errorf("_flushReactEntriesToDbWithTxn: ReactEntry has "+ + "ReactKey: %v, which doesn't match the ReactKeyToReactEntry map key %v", + &reactKeyInEntry, &reactKey) + } + + // Delete the existing mappings in the db for this ReactKey. They will be re-added + // if the corresponding entry in memory has isDeleted=false. + if err := DbDeleteReactMappingsWithTxn( + txn, reactKey.ReactorPubKey[:], reactKey.ReactedPostHash, reactKey.ReactEmoji); err != nil { + + return errors.Wrapf( + err, "_flushReactEntriesToDbWithTxn: Problem deleting mappings "+ + "for LikeKey: %v: ", &reactKey) + } + } + + // Go through all the entries in the LikeKeyToLikeEntry map. + for _, reactEntry := range bav.ReactionKeyToReactionEntry { + + if reactEntry.isDeleted { + // If the LikeEntry has isDeleted=true then there's nothing to do because + // we already deleted the entry above. + } else { + // If the LikeEntry has (isDeleted = false) then we put the corresponding + // mappings for it into the db. + if err := DbPutReactMappingsWithTxn( + txn, reactEntry.ReactorPubKey, *reactEntry.ReactedPostHash, reactEntry.ReactEmoji); err != nil { + + return err + } + } + } + + return nil +} + func (bav *UtxoView) _flushFollowEntriesToDbWithTxn(txn *badger.Txn) error { // Go through all the entries in the FollowKeyToFollowEntry map. diff --git a/lib/block_view_reaction.go b/lib/block_view_reaction.go new file mode 100644 index 000000000..7bc67d877 --- /dev/null +++ b/lib/block_view_reaction.go @@ -0,0 +1,294 @@ +package lib + +import ( + "fmt" + "github.com/btcsuite/btcd/btcec" + "github.com/golang/glog" + "github.com/pkg/errors" + "reflect" +) + +func (bav *UtxoView) _getReactionEntryForReactionKey(reactionKey *ReactionKey) *ReactionEntry { + // If an entry exists in the in-memory map, return the value of that mapping. + mapValue, existsMapValue := bav.ReactionKeyToReactionEntry[*reactionKey] + if existsMapValue { + return mapValue + } + + // If we get here it means no value exists in our in-memory map. In this case, + // defer to the db. If a mapping exists in the db, return it. If not, return + // nil. Either way, save the value to the in-memory view mapping got later. + reactionExists := false + if bav.Postgres != nil { + reactionExists = bav.Postgres.GetReaction(reactionKey.ReactorPubKey[:], &reactionKey.ReactedPostHash, reactionKey.ReactEmoji) != nil + } else { + reactionExists = DbGetReactorPubKeyToPostHashMapping(bav.Handle, reactionKey.ReactorPubKey[:], reactionKey.ReactedPostHash, reactionKey.ReactEmoji) != nil + } + + if reactionExists { + reactionEntry := ReactionEntry{ + ReactorPubKey: reactionKey.ReactorPubKey[:], + ReactedPostHash: &reactionKey.ReactedPostHash, + ReactEmoji: reactionKey.ReactEmoji, + } + bav._setReactionEntryMappings(&reactionEntry) + return &reactionEntry + } + + return nil +} + +func (bav *UtxoView) _setReactionEntryMappings(reactionEntry *ReactionEntry) { + // This function shouldn't be called with nil. + if reactionEntry == nil { + glog.Errorf("_setReactionEntryMappings: Called with nil ReactionEntry; " + + "this should never happen.") + return + } + + reactionKey := MakeReactionKey(reactionEntry.ReactorPubKey, *reactionEntry.ReactedPostHash, reactionEntry.ReactEmoji) + bav.ReactionKeyToReactionEntry[reactionKey] = reactionEntry +} + +func (bav *UtxoView) _deleteReactionEntryMappings(reactionEntry *ReactionEntry) { + + // Create a tombstone entry. + tombstoneReactionEntry := *reactionEntry + tombstoneReactionEntry.isDeleted = true + + // Set the mappings to point to the tombstone entry. + bav._setReactionEntryMappings(&tombstoneReactionEntry) +} + +func (bav *UtxoView) GetReactionByReader(readerPK []byte, postHash *BlockHash, reactEmoji rune) bool { + // Get react state. + reactionKey := MakeReactionKey(readerPK, *postHash, reactEmoji) + reactionEntry := bav._getReactionEntryForReactionKey(&reactionKey) + return reactionEntry != nil && !reactionEntry.isDeleted +} + +//TODO and only update the view if the key constructed from the entry does not exist in the view yet. Otherwise, we risk updating entries in the view +// that haven't been flushed. +func (bav *UtxoView) GetReactorsForPostHash(postHash *BlockHash, reactionEmoji rune) (_ReactorPubKeys [][]byte, _err error) { + adapter := bav.GetDbAdapter() + + if adapter.postgresDb != nil { + reactions := adapter.postgresDb.GetReactionsForPost(postHash) + for _, reaction := range reactions { + bav._setReactionEntryMappings(reaction.NewReactionEntry()) + } + } else { + handle := adapter.badgerDb + dbPrefix := append([]byte{}, Prefixes.PrefixPostHashToReactorPubKey...) + dbPrefix = append(dbPrefix, postHash[:]...) + keysFound, _ := EnumerateKeysForPrefix(handle, dbPrefix) + + // Iterate over all the db keys & values and load them into the view. + expectedKeyLength := 1 + HashSizeBytes + btcec.PubKeyBytesLenCompressed + for _, key := range keysFound { + // Sanity check that this is a reasonable key. + if len(key) != expectedKeyLength { + return nil, fmt.Errorf("UtxoView.GetReactuibsForPostHash: Invalid key length found: %d", len(key)) + } + + reactorPubKey := key[1+HashSizeBytes:] + reactKey := MakeReactionKey(reactorPubKey, *postHash, reactionEmoji) + bav._getReactionEntryForReactionKey(&reactKey) + } + } + + // Iterate over the view and create the final list to return. + var reactorPubKeys [][]byte + for _, reactionEntry := range bav.ReactionKeyToReactionEntry { + if !reactionEntry.isDeleted && reflect.DeepEqual(reactionEntry.ReactedPostHash[:], postHash[:]) { + reactorPubKeys = append(reactorPubKeys, reactionEntry.ReactorPubKey) + } + } + + return reactorPubKeys, nil +} + +func (bav *UtxoView) _connectReact( + txn *MsgDeSoTxn, txHash *BlockHash, blockHeight uint32, verifySignatures bool) ( + _totalInput uint64, _totalOutput uint64, _utxoOps []*UtxoOperation, _err error) { + + // Check that the transaction has the right TxnType. + if txn.TxnMeta.GetTxnType() != TxnTypeReact { + return 0, 0, nil, fmt.Errorf("_connectReact: called with bad TxnType %s", + txn.TxnMeta.GetTxnType().String()) + } + txMeta := txn.TxnMeta.(*ReactMetadata) + + // Connect basic txn to get the total input and the total output without + // considering the transaction metadata. + totalInput, totalOutput, utxoOpsForTxn, err := bav._connectBasicTransfer( + txn, txHash, blockHeight, verifySignatures) + if err != nil { + return 0, 0, nil, errors.Wrapf(err, "_connectReact: ") + } + + if verifySignatures { + // _connectBasicTransfer has already checked that the transaction is + // signed by the top-level public key, which we take to be the sender's + // public key so there is no need to verify anything further. + } + + // At this point the inputs and outputs have been processed. Now we need to handle + // the metadata. + + // There are two main checks that need to be done before allowing a reaction: + // - Check that the post exists + // - Check that the person hasn't already reacted with the same emoji + + // Check that the post to react actually exists. + existingPostEntry := bav.GetPostEntryForPostHash(txMeta.PostHash) + if existingPostEntry == nil || existingPostEntry.isDeleted { + return 0, 0, nil, errors.Wrapf( + RuleErrorCannotReactNonexistentPost, + "_connectReact: Post hash: %v", txMeta.PostHash) + } + + // At this point the code diverges and considers the react flows differently + // since the presence of an existing react entry has a different effect in either case. + + reactionKey := MakeReactionKey(txn.PublicKey, *txMeta.PostHash, txMeta.EmojiReaction) + existingReactEntry := bav._getReactionEntryForReactionKey(&reactionKey) + // We don't need to make a copy of the post entry because all we're modifying is the emoji counts, + // which isn't stored in any of our mappings. But we make a copy here just because it's a little bit + // more foolproof. + updatedPostEntry := *existingPostEntry + + if txMeta.IsRemove { + // Ensure that there *is* an existing emoji entry to delete. + if existingReactEntry == nil || existingReactEntry.isDeleted { + return 0, 0, nil, errors.Wrapf( + RuleErrorCannotRemoveReactionWithoutAnExistingReaction, + "_connectReact: React key: %v", &reactionKey) + } + + // Now that we know there is a react entry, we delete it and decrement the emoji count. + bav._deleteReactionEntryMappings(existingReactEntry) + updatedPostEntry.EmojiCount[txMeta.EmojiReaction] -= 1 + } else { + // Ensure that there *is not* an existing react entry. + if existingReactEntry != nil && !existingReactEntry.isDeleted { + return 0, 0, nil, errors.Wrapf( + RuleErrorReactEntryAlreadyExists, + "_connectReact: Like key: %v", &reactionKey) + } + + // Now that we know there is no pre-existing reactentry, we can create one and + // increment the react s on the react d post. + reactEntry := &ReactionEntry{ + ReactorPubKey: txn.PublicKey, + ReactedPostHash: txMeta.PostHash, + ReactEmoji: txMeta.EmojiReaction, + } + bav._setReactionEntryMappings(reactEntry) + if updatedPostEntry.EmojiCount == nil { + updatedPostEntry.EmojiCount = make(map[rune]uint64) + } + updatedPostEntry.EmojiCount[txMeta.EmojiReaction] += 1 + } + + // Set the updated post entry so it has the new emoji count. + bav._setPostEntryMappings(&updatedPostEntry) + + // Add an operation to the list at the end indicating we've added a follow. + utxoOpsForTxn = append(utxoOpsForTxn, &UtxoOperation{ + Type: OperationTypeReact, + PrevReactEntry: existingReactEntry, + PrevEmojiCount: existingPostEntry.EmojiCount, + }) + + return totalInput, totalOutput, utxoOpsForTxn, nil +} + +func (bav *UtxoView) _disconnectReact( + operationType OperationType, currentTxn *MsgDeSoTxn, txnHash *BlockHash, + utxoOpsForTxn []*UtxoOperation, blockHeight uint32) error { + + // Verify that the last operation is a Reaction operation + if len(utxoOpsForTxn) == 0 { + return fmt.Errorf("_disconnectReact: utxoOperations are missing") + } + operationIndex := len(utxoOpsForTxn) - 1 + if utxoOpsForTxn[operationIndex].Type != OperationTypeReact { + return fmt.Errorf("_disconnectReact: Trying to revert "+ + "OperationTypeReact but found type %v", + utxoOpsForTxn[operationIndex].Type) + } + + // Now we know the txMeta is a React + txMeta := currentTxn.TxnMeta.(*ReactMetadata) + + // Before we do anything, let's get the post so we can adjust the emoji map counter later. + reactedPostEntry := bav.GetPostEntryForPostHash(txMeta.PostHash) + if reactedPostEntry == nil { + return fmt.Errorf("_disconnectReact: Error getting post: %v", txMeta.PostHash) + } + + // Here we diverge and consider the react and unreact cases separately. + if txMeta.IsRemove { + // If this is an remove we just need to add back the previous react entry and react + // react count. We do some sanity checks first though to be extra safe. + + prevReactEntry := utxoOpsForTxn[operationIndex].PrevReactEntry + // Sanity check: verify that the user on the reactEntry matches the transaction sender. + if !reflect.DeepEqual(prevReactEntry.ReactorPubKey, currentTxn.PublicKey) { + return fmt.Errorf("_disconnectReact: User public key on "+ + "ReactionEntry was %s but the PublicKey on the txn was %s", + PkToStringBoth(prevReactEntry.ReactorPubKey), + PkToStringBoth(currentTxn.PublicKey)) + } + + // Sanity check: verify that the post hash on the prevReactEntry matches the transaction's. + if !reflect.DeepEqual(prevReactEntry.ReactedPostHash, txMeta.PostHash) { + return fmt.Errorf("_disconnectLike: Liked post hash on "+ + "ReactionEntry was %s but the ReactedPostHash on the txn was %s", + prevReactEntry.ReactedPostHash, txMeta.PostHash) + } + + // Set the react entry and react count to their previous state. + bav._setReactionEntryMappings(prevReactEntry) + reactedPostEntry.EmojiCount = utxoOpsForTxn[operationIndex].PrevEmojiCount + bav._setPostEntryMappings(reactedPostEntry) + } else { + // If this is a normal "react," we do some sanity checks and then delete the entry. + + // Get the ReactionEntry. If we don't find it or isDeleted=true, that's an error. + reactKey := MakeReactionKey(currentTxn.PublicKey, *txMeta.PostHash, txMeta.EmojiReaction) + reactEntry := bav._getReactionEntryForReactionKey(&reactKey) + if reactEntry == nil || reactEntry.isDeleted { + return fmt.Errorf("_disconnectReact: ReactionEntry for "+ + "reactKey %v was found to be nil or isDeleted not set appropriately: %v", + &reactKey, reactEntry) + } + + // Sanity check: verify that the user on the reactEntry matches the transaction sender. + if !reflect.DeepEqual(reactEntry.ReactorPubKey, currentTxn.PublicKey) { + return fmt.Errorf("_disconnectReact: User public key on "+ + "ReactionEntry was %s but the PublicKey on the txn was %s", + PkToStringBoth(reactEntry.ReactorPubKey), + PkToStringBoth(currentTxn.PublicKey)) + } + + // Sanity check: verify that the post hash on the reactEntry matches the transaction's. + if !reflect.DeepEqual(reactEntry.ReactedPostHash, txMeta.PostHash) { + return fmt.Errorf("_disconnectReact: Reacted post hash on "+ + "ReactionEntry was %s but the ReactedPostHash on the txn was %s", + reactEntry.ReactedPostHash, txMeta.PostHash) + } + + // Now that we're confident the FollowEntry lines up with the transaction we're + // rolling back, delete the mappings and set the reaction counter to its previous value. + bav._deleteReactionEntryMappings(reactEntry) + reactedPostEntry.EmojiCount = utxoOpsForTxn[operationIndex].PrevEmojiCount + bav._setPostEntryMappings(reactedPostEntry) + } + + // Now revert the basic transfer with the remaining operations. Cut off + // the Like operation at the end since we just reverted it. + return bav._disconnectBasicTransfer( + currentTxn, txnHash, utxoOpsForTxn[:operationIndex], blockHeight) +} diff --git a/lib/block_view_reaction_test.go b/lib/block_view_reaction_test.go new file mode 100644 index 000000000..47b6ce669 --- /dev/null +++ b/lib/block_view_reaction_test.go @@ -0,0 +1,726 @@ +package lib + +import ( + "fmt" + "github.com/stretchr/testify/require" + "golang.org/x/text/unicode/norm" + "testing" +) + +var ( + HappyReaction = rune(norm.NFC.String(string('😊'))[0]) + SadReaction = rune(norm.NFC.String(string('😥'))[0]) + AngryReaction = rune(norm.NFC.String(string('😠'))[0]) + SurprisedReaction = rune(norm.NFC.String(string('😮'))[0]) +) + +func _doReactTxn(testMeta *TestMeta, feeRateNanosPerKB uint64, senderPkBase58Check string, + postHash BlockHash, senderPrivBase58Check string, isRemove bool, emojiReaction rune) ( + _utxoOps []*UtxoOperation, _txn *MsgDeSoTxn, _height uint32, _err error) { + + require := require.New(testMeta.t) + + senderPkBytes, _, err := Base58CheckDecode(senderPkBase58Check) + require.NoError(err) + + utxoView, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(err) + + txn, totalInputMake, changeAmountMake, feesMake, err := testMeta.chain.CreateReactTxn( + senderPkBytes, postHash, isRemove, emojiReaction, feeRateNanosPerKB, nil, []*DeSoOutput{}) + if err != nil { + return nil, nil, 0, err + } + + require.Equal(totalInputMake, changeAmountMake+feesMake) + + // Sign the transaction now that its inputs are set up. + _signTxn(testMeta.t, txn, senderPrivBase58Check) + + txHash := txn.Hash() + // Always use height+1 for validation since it's assumed the transaction will + // get mined into the next block. + blockHeight := testMeta.chain.blockTip().Height + 1 + utxoOps, totalInput, totalOutput, fees, err := + utxoView.ConnectTransaction(txn, txHash, getTxnSize(*txn), blockHeight, true, /*verifySignature*/ + false /*ignoreUtxos*/) + // ConnectTransaction should treat the amount locked as contributing to the + // output. + if err != nil { + return nil, nil, 0, err + } + require.Equal(totalInput, totalOutput+fees) + require.Equal(totalInput, totalInputMake) + + // We should have one SPEND UtxoOperation for each input, one ADD operation + // for each output, and one OperationTypeReact operation at the end. + require.Equal(len(txn.TxInputs)+len(txn.TxOutputs)+1, len(utxoOps)) + for ii := 0; ii < len(txn.TxInputs); ii++ { + require.Equal(OperationTypeSpendUtxo, utxoOps[ii].Type) + } + require.Equal(OperationTypeReact, utxoOps[len(utxoOps)-1].Type) + + require.NoError(utxoView.FlushToDb(0)) + + return utxoOps, txn, blockHeight, nil +} + +func TestReactTxns(t *testing.T) { + // Test constants + const feeRateNanosPerKb = uint64(101) + var err error + + //Initialize test chain and miner + chain, params, db := NewLowDifficultyBlockchain() + mempool, miner := NewTestMiner(t, chain, params, true /*isSender*/) + + // Mine a few blocks to give the senderPkString some money. + for ii := 0; ii < 20; ii++ { + _, err := miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) + require.NoError(t, err) + } + + // We build the testMeta obj after mining blocks so that we save the correct block height. + testMeta := &TestMeta{ + t: t, + chain: chain, + params: params, + db: db, + mempool: mempool, + miner: miner, + savedHeight: chain.blockTip().Height + 1, + } + + // Helpers + type User struct { + Pub string + Priv string + PkBytes []byte + PublicKey *PublicKey + Pkid *PKID + } + + //TODO Use this correctly + //deso := User{ + // PublicKey: &ZeroPublicKey, + // Pkid: &ZeroPKID, + //} + + m0 := User{ + Pub: m0Pub, + Priv: m0Priv, + PkBytes: m0PkBytes, + PublicKey: NewPublicKey(m0PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m0PkBytes).PKID, + } + + m1 := User{ + Pub: m1Pub, + Priv: m1Priv, + PkBytes: m1PkBytes, + PublicKey: NewPublicKey(m1PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m1PkBytes).PKID, + } + + m2 := User{ + Pub: m2Pub, + Priv: m2Priv, + PkBytes: m2PkBytes, + PublicKey: NewPublicKey(m2PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m2PkBytes).PKID, + } + + m3 := User{ + Pub: m3Pub, + Priv: m3Priv, + PkBytes: m3PkBytes, + PublicKey: NewPublicKey(m3PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m3PkBytes).PKID, + } + + // Setup some convenience functions for the test. + var txnOps [][]*UtxoOperation + var txns []*MsgDeSoTxn + var expectedSenderBalances []uint64 + var expectedRecipientBalances []uint64 + + // We take the block tip to be the blockchain height rather than the + // header chain height. + savedHeight := chain.blockTip().Height + 1 + + // Fund all the keys. + for ii := 0; ii < 5; ii++ { + _registerOrTransferWithTestMeta(testMeta, "m0", senderPkString, m0.Pub, senderPrivString, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1", senderPkString, m1.Pub, senderPrivString, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m2", senderPkString, m2.Pub, senderPrivString, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m3", senderPkString, m3.Pub, senderPrivString, 7e6) + } + + //TODO Fix this + //params.ExtraRegtestParamUpdaterKeys[MakePkMapKey(paramUpdaterPkBytes)] = true + // + //_updateGlobalParamsEntryWithTestMeta( + // testMeta, feeRateNanosPerKB, paramUpdaterPub, + // paramUpdaterPriv, -1, int64(feeRateNanosPerKb), -1, -1, -1, + //) + + doReactTxn := func( + senderPkBase58Check string, postHash BlockHash, + senderPrivBase58Check string, isRemove bool, emojiReaction rune, feeRateNanosPerKB uint64) { + + expectedSenderBalances = append( + expectedSenderBalances, _getBalance(t, chain, nil, senderPkString)) + expectedRecipientBalances = append( + expectedRecipientBalances, _getBalance(t, chain, nil, recipientPkString)) + + currentOps, currentTxn, _, err := _doReactTxn( + testMeta, feeRateNanosPerKB, senderPkBase58Check, + postHash, senderPrivBase58Check, isRemove, emojiReaction) + require.NoError(t, err) + + txnOps = append(txnOps, currentOps) + txns = append(txns, currentTxn) + } + + submitPost := func( + feeRateNanosPerKB uint64, updaterPkBase58Check string, + updaterPrivBase58Check string, + postHashToModify []byte, + parentStakeID []byte, + bodyObj *DeSoBodySchema, + repostedPostHash []byte, + tstampNanos uint64, + isHidden bool) { + + expectedSenderBalances = append( + expectedSenderBalances, _getBalance(t, chain, nil, senderPkString)) + expectedRecipientBalances = append( + expectedRecipientBalances, _getBalance(t, chain, nil, recipientPkString)) + + currentOps, currentTxn, _, err := _submitPost( + t, chain, db, params, feeRateNanosPerKB, + updaterPkBase58Check, + updaterPrivBase58Check, + postHashToModify, + parentStakeID, + bodyObj, + repostedPostHash, + tstampNanos, + isHidden) + + require.NoError(t, err) + + txnOps = append(txnOps, currentOps) + txns = append(txns, currentTxn) + } + + fakePostHash := BlockHash{ + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, + 0x30, 0x31, + } + // Attempting "m0 -> fakePostHash" should fail since the post doesn't exist. + _, _, _, err = _doReactTxn( + testMeta, 10 /*feeRateNanosPerKB*/, m0Pub, + fakePostHash, m0Priv, false /*isRemove*/, HappyReaction) + require.Error(t, err) + require.Contains(t, err.Error(), RuleErrorCannotReactNonexistentPost) + + // p1 + submitPost( + 10, /*feeRateNanosPerKB*/ + m0Pub, /*updaterPkBase58Check*/ + m0Priv, /*updaterPrivBase58Check*/ + []byte{}, /*postHashToModify*/ + []byte{}, /*parentStakeID*/ + &DeSoBodySchema{Body: "m0 post body 1 no profile"}, /*body*/ + []byte{}, + 1602947011*1e9, /*tstampNanos*/ + false /*isHidden*/) + post1Txn := txns[len(txns)-1] + post1Hash := *post1Txn.Hash() + + // p2 + { + submitPost( + 10, /*feeRateNanosPerKB*/ + m0Pub, /*updaterPkBase58Check*/ + m0Priv, /*updaterPrivBase58Check*/ + []byte{}, /*postHashToModify*/ + []byte{}, /*parentStakeID*/ + &DeSoBodySchema{Body: "m0 post body 2 no profile"}, /*body*/ + []byte{}, + 1502947012*1e9, /*tstampNanos*/ + false /*isHidden*/) + } + post2Txn := txns[len(txns)-1] + post2Hash := *post2Txn.Hash() + + // p3 + { + submitPost( + 10, /*feeRateNanosPerKB*/ + m1Pub, /*updaterPkBase58Check*/ + m1Priv, /*updaterPrivBase58Check*/ + []byte{}, /*postHashToModify*/ + []byte{}, /*parentStakeID*/ + &DeSoBodySchema{Body: "m1 post body 1 no profile"}, /*body*/ + []byte{}, + 1502947013*1e9, /*tstampNanos*/ + false /*isHidden*/) + } + post3Txn := txns[len(txns)-1] + post3Hash := *post3Txn.Hash() + + // m0 -> p1 (happy) + doReactTxn(m0Pub, post1Hash, m0Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // Duplicating "m0 -> p1" should fail. + _, _, _, err = _doReactTxn( + testMeta, 10 /*feeRateNanosPerKB*/, m0Pub, + post1Hash, m0Priv, false /*isRemove*/, HappyReaction) + require.Error(t, err) + require.Contains(t, err.Error(), RuleErrorReactEntryAlreadyExists) + + // m2 -> p1 (happy) + doReactTxn(m2Pub, post1Hash, m2Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m3 -> p1 (surprised) + doReactTxn(m3Pub, post1Hash, m3Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m3 -> p2 (sad) + doReactTxn(m3Pub, post2Hash, m3Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m1 -> p2 (angry) + doReactTxn(m1Pub, post2Hash, m1Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m2 -> p3 (surprised) + doReactTxn(m2Pub, post3Hash, m2Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + reactingP1 := [][]byte{ + _strToPk(t, m0Pub), + _strToPk(t, m2Pub), + _strToPk(t, m3Pub), + } + + reactingP2 := [][]byte{ + _strToPk(t, m1Pub), + _strToPk(t, m3Pub), + } + + reactingP3 := [][]byte{ + _strToPk(t, m2Pub), + } + + // Verify pks reacting p1 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post1Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP1), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP1, reactingPks[ii]) + } + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(len(reactingP1)), post1.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p2 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post2Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP2), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP2, reactingPks[ii]) + } + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(len(reactingP2)), post2.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p3 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post3Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP3), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP3, reactingPks[ii]) + } + post3 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post3Hash) + require.Equal(t, uint64(len(reactingP3)), post3.EmojiCount[HappyReaction]) + } + + m0Reacts := []BlockHash{ + post1Hash, + } + + m1Reacts := []BlockHash{ + post2Hash, + } + + m2Reacts := []BlockHash{ + post1Hash, + post3Hash, + } + + m3Reacts := []BlockHash{ + post1Hash, + post2Hash, + } + + // Verify m0's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m0Pub)) + require.NoError(t, err) + require.Equal(t, len(m0Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m0Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m1's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m1Pub)) + require.NoError(t, err) + require.Equal(t, len(m1Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m1Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m2's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m2Pub)) + require.NoError(t, err) + require.Equal(t, len(m2Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m2Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m3's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m3Pub)) + require.NoError(t, err) + require.Equal(t, len(m3Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m3Reacts, *reactedPostHashes[ii]) + } + } + + // Try an removing a reaction. + // + // m0 -> p1 (unfollow, happy) + doReactTxn(m0Pub, post1Hash, m0Priv, true /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m3 -> p2 (unfollow, happy) + doReactTxn(m3Pub, post2Hash, m3Priv, true /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // Duplicating "m0 -> p1" (unfollow) should fail. + _, _, _, err = _doReactTxn( + testMeta, 10 /*feeRateNanosPerKB*/, m0Pub, + post1Hash, m0Priv, true /*isRemove*/, HappyReaction) + require.Error(t, err) + require.Contains(t, err.Error(), RuleErrorCannotRemoveReactionWithoutAnExistingReaction) + + reactingP1 = [][]byte{ + _strToPk(t, m2Pub), + _strToPk(t, m3Pub), + } + + reactingP2 = [][]byte{ + _strToPk(t, m1Pub), + } + + // Verify pks reacting p1 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post1Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP1), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP1, reactingPks[ii]) + } + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(len(reactingP1)), post1.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p2 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post2Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP2), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP2, reactingPks[ii]) + } + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(len(reactingP2)), post2.EmojiCount[HappyReaction]) + } + + m3Reacts = []BlockHash{ + post1Hash, + } + + // Verify m0 has no reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m0Pub)) + require.NoError(t, err) + require.Equal(t, 0, len(reactedPostHashes)) + } + + // Verify m3's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m3Pub)) + require.NoError(t, err) + require.Equal(t, len(m3Reacts), len(reactedPostHashes)) + for i := 0; i < len(reactedPostHashes); i++ { + require.Contains(t, m3Reacts, *reactedPostHashes[i]) + } + } + + // =================================================================================== + // Finish it off with some transactions + // =================================================================================== + _registerOrTransferWithTestMeta(testMeta, "m0", senderPkString, m0.Pub, senderPrivString, 42e6) + _registerOrTransferWithTestMeta(testMeta, "m1", senderPkString, m1.Pub, senderPrivString, 42e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + + // Roll back all of the above using the utxoOps from each. + for ii := 0; ii < len(txnOps); ii++ { + backwardIter := len(txnOps) - 1 - ii + currentOps := txnOps[backwardIter] + currentTxn := txns[backwardIter] + fmt.Printf( + "Disconnecting transaction with type %v index %d (going backwards)\n", + currentTxn.TxnMeta.GetTxnType(), backwardIter) + + utxoView, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(t, err) + + currentHash := currentTxn.Hash() + err = utxoView.DisconnectTransaction(currentTxn, currentHash, currentOps, savedHeight) + require.NoError(t, err) + + require.NoError(t, utxoView.FlushToDb(0)) + + // After disconnecting, the balances should be restored to what they + // were before this transaction was applied. + require.Equal(t, + int64(expectedSenderBalances[backwardIter]), + int64(_getBalance(t, chain, nil, senderPkString))) + require.Equal(t, + expectedRecipientBalances[backwardIter], + _getBalance(t, chain, nil, recipientPkString)) + + // Here we check the reactcounts after all the reactentries have been disconnected. + if backwardIter == 19 { + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(0), post1.EmojiCount[HappyReaction]) + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(0), post2.EmojiCount[HappyReaction]) + post3 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post3Hash) + require.Equal(t, uint64(0), post3.EmojiCount[HappyReaction]) + } + } + + _executeAllTestRollbackAndFlush(testMeta) + + // Apply all the transactions to a mempool object and make sure we don't get any + // errors. Verify the balances align as we go. + for ii, tx := range txns { + // See comment above on this transaction. + fmt.Printf("Adding txn %d of type %v to mempool\n", ii, tx.TxnMeta.GetTxnType()) + + require.Equal(t, expectedSenderBalances[ii], _getBalance(t, chain, mempool, senderPkString)) + require.Equal(t, expectedRecipientBalances[ii], _getBalance(t, chain, mempool, recipientPkString)) + + _, err := mempool.ProcessTransaction(tx, false, false, 0, true) + require.NoError(t, err, "Problem adding transaction %d to mempool: %v", ii, tx) + } + + // Apply all the transactions to a view and flush the view to the db. + utxoView, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(t, err) + for ii, txn := range txns { + fmt.Printf("Adding txn %v of type %v to UtxoView\n", ii, txn.TxnMeta.GetTxnType()) + + // Always use height+1 for validation since it's assumed the transaction will + // get mined into the next block. + txHash := txn.Hash() + blockHeight := chain.blockTip().Height + 1 + _, _, _, _, err := + utxoView.ConnectTransaction(txn, txHash, getTxnSize(*txn), blockHeight, true /*verifySignature*/, false /*ignoreUtxos*/) + require.NoError(t, err) + } + // Flush the utxoView after having added all the transactions. + require.NoError(t, utxoView.FlushToDb(0)) + + testConnectedState := func() { + reactingP1 = [][]byte{ + _strToPk(t, m2Pub), + _strToPk(t, m3Pub), + } + + reactingP2 = [][]byte{ + _strToPk(t, m1Pub), + } + + reactingP3 := [][]byte{ + _strToPk(t, m2Pub), + } + + // Verify pks reacting p1 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post1Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP1), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP1, reactingPks[ii]) + } + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(len(reactingP1)), post1.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p2 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post2Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP2), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP2, reactingPks[ii]) + } + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(len(reactingP2)), post2.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p3 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post3Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP3), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP3, reactingPks[ii]) + } + post3 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post3Hash) + require.Equal(t, uint64(len(reactingP3)), post3.EmojiCount[HappyReaction]) + } + + m1Reacts := []BlockHash{ + post2Hash, + } + + m2Reacts := []BlockHash{ + post1Hash, + post3Hash, + } + + m3Reacts = []BlockHash{ + post1Hash, + } + + // Verify m0 has no reactions. + { + followPks, err := DbGetPostHashesYouReact(db, _strToPk(t, m0Pub)) + require.NoError(t, err) + require.Equal(t, 0, len(followPks)) + } + + // Verify m1's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m1Pub)) + require.NoError(t, err) + require.Equal(t, len(m1Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m1Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m2's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m2Pub)) + require.NoError(t, err) + require.Equal(t, len(m2Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m2Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m3's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m3Pub)) + require.NoError(t, err) + require.Equal(t, len(m3Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m3Reacts, *reactedPostHashes[ii]) + } + } + } + testConnectedState() + + // Disconnect the transactions from a single view in the same way as above + // i.e. without flushing each time. + utxoView2, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(t, err) + for ii := 0; ii < len(txnOps); ii++ { + backwardIter := len(txnOps) - 1 - ii + fmt.Printf("Disconnecting transaction with index %d (going backwards)\n", backwardIter) + currentOps := txnOps[backwardIter] + currentTxn := txns[backwardIter] + + currentHash := currentTxn.Hash() + err = utxoView2.DisconnectTransaction(currentTxn, currentHash, currentOps, savedHeight) + require.NoError(t, err) + } + require.NoError(t, utxoView2.FlushToDb(0)) + require.Equal(t, expectedSenderBalances[0], _getBalance(t, chain, nil, senderPkString)) + require.Equal(t, expectedRecipientBalances[0], _getBalance(t, chain, nil, recipientPkString)) + + _executeAllTestRollbackAndFlush(testMeta) + + // All the txns should be in the mempool already so mining a block should put + // all those transactions in it. + block, err := miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) + require.NoError(t, err) + // Add one for the block reward. Now we have a meaty block. + require.Equal(t, len(txnOps)+1, len(block.Txns)) + // Estimate the transaction fees of the tip block in various ways. + { + // Threshold above what's in the block should return the default fee at all times. + require.Equal(t, int64(0), int64(chain.EstimateDefaultFeeRateNanosPerKB(.1, 0))) + require.Equal(t, int64(7), int64(chain.EstimateDefaultFeeRateNanosPerKB(.1, 7))) + // Threshold below what's in the block should return the max of the median + // and the minfee. This means with a low minfee the value returned should be + // higher. And with a high minfee the value returned should be equal to the + // fee. + require.Equal(t, int64(7), int64(chain.EstimateDefaultFeeRateNanosPerKB(0, 7))) + require.Equal(t, int64(4), int64(chain.EstimateDefaultFeeRateNanosPerKB(0, 0))) + require.Equal(t, int64(7), int64(chain.EstimateDefaultFeeRateNanosPerKB(.01, 7))) + require.Equal(t, int64(4), int64(chain.EstimateDefaultFeeRateNanosPerKB(.01, 1))) + } + + testConnectedState() + + _executeAllTestRollbackAndFlush(testMeta) +} + +// func TestReactTxns +// - one successful happy, sad, angry, confused +// - one failure (invalid character?, not amongst the other characters) + +// func _createReactTxn +// func _connectReactTxn +// func _doReactTxnWithTestMeta +// func _doReactRxnErrorToBeDefined +// func Eq +// func ToEntry +// func TestFlushingReactTxn diff --git a/lib/block_view_types.go b/lib/block_view_types.go index 4700f9129..96f5179d0 100644 --- a/lib/block_view_types.go +++ b/lib/block_view_types.go @@ -79,6 +79,7 @@ const ( EncoderTypeMessagingGroupMember EncoderTypeForbiddenPubKeyEntry EncoderTypeLikeEntry + EncoderTypeReactEntry EncoderTypeNFTEntry EncoderTypeNFTBidEntry EncoderTypeNFTBidEntryBundle @@ -118,6 +119,7 @@ const ( EncoderTypeUpdateProfileTxindexMetadata EncoderTypeSubmitPostTxindexMetadata EncoderTypeLikeTxindexMetadata + EncoderTypeReactTxindexMetadata EncoderTypeFollowTxindexMetadata EncoderTypePrivateMessageTxindexMetadata EncoderTypeSwapIdentityTxindexMetadata @@ -225,6 +227,8 @@ func (encoderType EncoderType) New() DeSoEncoder { return &SubmitPostTxindexMetadata{} case EncoderTypeLikeTxindexMetadata: return &LikeTxindexMetadata{} + case EncoderTypeReactTxindexMetadata: + return &ReactTxindexMetadata{} case EncoderTypeFollowTxindexMetadata: return &FollowTxindexMetadata{} case EncoderTypePrivateMessageTxindexMetadata: @@ -517,8 +521,9 @@ const ( OperationTypeDAOCoinTransfer OperationType = 26 OperationTypeSpendingLimitAccounting OperationType = 27 OperationTypeDAOCoinLimitOrder OperationType = 28 + OperationTypeReact OperationType = 29 - // NEXT_TAG = 29 + // NEXT_TAG = 30 ) func (op OperationType) String() string { @@ -688,6 +693,10 @@ type UtxoOperation struct { PrevLikeEntry *LikeEntry PrevLikeCount uint64 + // Save the previous emoji reactions + PrevReactEntry *ReactionEntry + PrevEmojiCount map[rune]uint64 + // For disconnecting diamonds. PrevDiamondEntry *DiamondEntry @@ -2263,6 +2272,61 @@ func (likeEntry *LikeEntry) GetEncoderType() EncoderType { return EncoderTypeLikeEntry } +func MakeReactionKey(userPk []byte, ReactPostHash BlockHash, ReactEmoji rune) ReactionKey { + return ReactionKey{ + // Avoid using the pointer so that it is easier to compare Reaction key structs + ReactorPubKey: *NewPublicKey(userPk), + ReactedPostHash: ReactPostHash, + ReactEmoji: ReactEmoji, + } +} + +type ReactionKey struct { + ReactorPubKey PublicKey + ReactedPostHash BlockHash + ReactEmoji rune +} + +type ReactionEntry struct { + ReactorPubKey []byte + ReactedPostHash *BlockHash + ReactEmoji rune + // Whether this entry is deleted in the view + isDeleted bool +} + +func (reactEntry *ReactionEntry) RawEncodeWithoutMetadata(blockHeight uint64, skipMetadata ...bool) []byte { + var data []byte + + data = append(data, EncodeByteArray(reactEntry.ReactorPubKey)...) + data = append(data, EncodeToBytes(blockHeight, reactEntry.ReactedPostHash, skipMetadata...)...) + return data +} + +func (reactEntry *ReactionEntry) RawDecodeWithoutMetadata(blockHeight uint64, rr *bytes.Reader) error { + var err error + + reactEntry.ReactorPubKey, err = DecodeByteArray(rr) + if err != nil { + return errors.Wrapf(err, "ReactionEntry.Decode: problem reading ReactorPubKey") + } + reactedPostHash := &BlockHash{} + if exist, err := DecodeFromBytes(reactedPostHash, rr); exist && err == nil { + reactEntry.ReactedPostHash = reactedPostHash + } else if err != nil { + return errors.Wrapf(err, "ReactionEntry.Decode: problem reading ReactedPostHash") + } + return nil +} + +func (reactEntry *ReactionEntry) GetVersionByte(blockHeight uint64) byte { + return 0 +} + +func (reactEntry *ReactionEntry) GetEncoderType() EncoderType { + return EncoderTypeReactEntry +} + func MakeNFTKey(nftPostHash *BlockHash, serialNumber uint64) NFTKey { return NFTKey{ NFTPostHash: *nftPostHash, @@ -2992,6 +3056,9 @@ type PostEntry struct { // Counter of users that have liked this post. LikeCount uint64 + // Counter of emoji reactions that this post has. + EmojiCount map[rune]uint64 + // Counter of users that have reposted this post. RepostCount uint64 diff --git a/lib/blockchain.go b/lib/blockchain.go index 9a894e499..74e45202c 100644 --- a/lib/blockchain.go +++ b/lib/blockchain.go @@ -6,6 +6,7 @@ import ( "encoding/hex" "fmt" "github.com/holiman/uint256" + "golang.org/x/text/unicode/norm" "math" "math/big" "reflect" @@ -3054,6 +3055,59 @@ func (bc *Blockchain) CreateLikeTxn( return txn, totalInput, changeAmount, fees, nil } +func (bc *Blockchain) CreateReactTxn( + userPublicKey []byte, postHash BlockHash, isRemove bool, emojiReaction rune, + minFeeRateNanosPerKB uint64, mempool *DeSoMempool, additionalOutputs []*DeSoOutput) ( + _txn *MsgDeSoTxn, _totalInput uint64, _changeAmount uint64, _fees uint64, + _err error) { + + //TODO Where would be the best place to place the validation function? + // At the moment, only support happy, sad, angry and surprised + AcceptedReactions := [4]rune{'😊', '😥', '😠', '😮'} + // Validate emoji reaction + var isValidEmoji = func(emoji rune) bool { + for _, acceptedReaction := range AcceptedReactions { + if emoji == acceptedReaction { + return true + } + } + return false + } + + // TODO Fix bug returning invalid for valid inputs + normalizedReaction := norm.NFC.String(string(emojiReaction)) + if !isValidEmoji(rune(normalizedReaction[0])) { + return nil, 0, 0, 0, errors.New("CreateReactTxn: Invalid emoji input: ") + } + + // A React transaction doesn't need any inputs or outputs (except additionalOutputs provided). + txn := &MsgDeSoTxn{ + PublicKey: userPublicKey, + TxnMeta: &ReactMetadata{ + PostHash: &postHash, + EmojiReaction: rune(normalizedReaction[0]), + IsRemove: isRemove, + }, + TxOutputs: additionalOutputs, + // We wait to compute the signature until we've added all the + // inputs and change. + } + + totalInput, spendAmount, changeAmount, fees, err := + bc.AddInputsAndChangeToTransaction(txn, minFeeRateNanosPerKB, mempool) + if err != nil { + return nil, 0, 0, 0, errors.Wrapf( + err, "CreateReactTxn: Problem adding inputs: ") + } + + // Sanity-check that the spendAmount is zero. + if err = amountEqualsAdditionalOutputs(spendAmount, additionalOutputs); err != nil { + return nil, 0, 0, 0, fmt.Errorf("CreateReactTxn: %v", err) + } + + return txn, totalInput, changeAmount, fees, nil +} + func (bc *Blockchain) CreateFollowTxn( senderPublicKey []byte, followedPublicKey []byte, isUnfollow bool, minFeeRateNanosPerKB uint64, mempool *DeSoMempool, additionalOutputs []*DeSoOutput) ( diff --git a/lib/db_utils.go b/lib/db_utils.go index 1cbdeb075..7f1d63993 100644 --- a/lib/db_utils.go +++ b/lib/db_utils.go @@ -174,6 +174,12 @@ type DBPrefixes struct { PrefixLikerPubKeyToLikedPostHash []byte `prefix_id:"[30]" is_state:"true"` PrefixLikedPostHashToLikerPubKey []byte `prefix_id:"[31]" is_state:"true"` + // Prefixes for reactions: + // -> <> + // -> <> + PrefixReactorPubKeyToPostHash []byte `prefix_id:"[63]" is_state:"true"` + PrefixPostHashToReactorPubKey []byte `prefix_id:"[64]" is_state:"true"` + // Prefixes for creator coin fields: // -> // -> @@ -322,7 +328,7 @@ type DBPrefixes struct { PrefixDAOCoinLimitOrder []byte `prefix_id:"[60]" is_state:"true"` PrefixDAOCoinLimitOrderByTransactorPKID []byte `prefix_id:"[61]" is_state:"true"` PrefixDAOCoinLimitOrderByOrderID []byte `prefix_id:"[62]" is_state:"true"` - // NEXT_TAG: 63 + // NEXT_TAG: 65 } // StatePrefixToDeSoEncoder maps each state prefix to a DeSoEncoder type that is stored under that prefix. @@ -479,8 +485,13 @@ func StatePrefixToDeSoEncoder(prefix []byte) (_isEncoder bool, _encoder DeSoEnco } else if bytes.Equal(prefix, Prefixes.PrefixDAOCoinLimitOrderByOrderID) { // prefix_id:"[62]" return true, &DAOCoinLimitOrderEntry{} + } else if bytes.Equal(prefix, Prefixes.PrefixReactorPubKeyToPostHash) { + // prefix_id:"[63]" + return false, nil + } else if bytes.Equal(prefix, Prefixes.PrefixPostHashToReactorPubKey) { + // prefix_id:"[64]" + return false, nil } - return true, nil } @@ -2032,6 +2043,164 @@ func DbGetLikerPubKeysLikingAPostHash(handle *badger.DB, likedPostHash BlockHash return userPubKeys, nil } +// ------------------------------------------------------------------------------------- +// React mapping functions +// -> <> +// -> <> +// ------------------------------------------------------------------------------------- +// +//TODO these will probably need to change slightly to accommodate multiple reactions with different emojis per post +func _dbKeyForReactorPubKeyToPostHashMapping( + userPubKey []byte, postHash BlockHash, reactionEmoji rune) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixReactorPubKeyToPostHash...) + key := append(prefixCopy, userPubKey...) + key = append(key, postHash[:]...) + key = append(key, []byte(string(reactionEmoji))...) + return key +} + +func _dbKeyForPostHashToReactorPubKeyMapping( + postHash BlockHash, userPubKey []byte) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixPostHashToReactorPubKey...) + key := append(prefixCopy, postHash[:]...) + key = append(key, userPubKey...) + return key +} + +func _dbSeekPrefixForPostHashesYouReact(yourPubKey []byte) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixReactorPubKeyToPostHash...) + return append(prefixCopy, yourPubKey...) +} + +func _dbSeekPrefixForReactorPubKeysReactingToPostHash(likedPostHash BlockHash) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixPostHashToReactorPubKey...) + return append(prefixCopy, likedPostHash[:]...) +} + +// Note that this adds a mapping for the user *and* the liked post. +func DbPutReactMappingsWithTxn( + txn *badger.Txn, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) error { + + if len(userPubKey) != btcec.PubKeyBytesLenCompressed { + return fmt.Errorf("DbPutReactMappingsWithTxn: User public key "+ + "length %d != %d", len(userPubKey), btcec.PubKeyBytesLenCompressed) + } + + if err := txn.Set(_dbKeyForReactorPubKeyToPostHashMapping(userPubKey, likedPostHash, reactionEmoji), []byte{}); err != nil { + return errors.Wrapf(err, "DbPutReactMappingsWithTxn: Problem adding user to reacted post mapping: ") + } + + if err := txn.Set(_dbKeyForPostHashToReactorPubKeyMapping(likedPostHash, userPubKey), []byte{}); err != nil { + return errors.Wrapf(err, "DbPutReactMappingsWithTxn: Problem adding reacted post to user mapping: ") + } + + return nil +} + +func DbPutReactMappings( + handle *badger.DB, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) error { + + return handle.Update(func(txn *badger.Txn) error { + return DbPutReactMappingsWithTxn(txn, userPubKey, likedPostHash, reactionEmoji) + }) +} + +func DbGetReactorPubKeyToPostHashMappingWithTxn( + txn *badger.Txn, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) []byte { + + key := _dbKeyForReactorPubKeyToPostHashMapping(userPubKey, likedPostHash, reactionEmoji) + _, err := txn.Get(key) + if err != nil { + return nil + } + + // Typically we return a DB entry here but we don't store anything for like mappings. + // We use this function instead of one returning true / false for feature consistency. + return []byte{} +} + +func DbGetReactorPubKeyToPostHashMapping( + db *badger.DB, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) []byte { + var ret []byte + db.View(func(txn *badger.Txn) error { + ret = DbGetReactorPubKeyToPostHashMappingWithTxn(txn, userPubKey, likedPostHash, reactionEmoji) + return nil + }) + return ret +} + +// Note this deletes the like for the user *and* the liked post since a mapping +// should exist for each. +func DbDeleteReactMappingsWithTxn( + txn *badger.Txn, userPubKey []byte, postHash BlockHash, reactionEmoji rune) error { + + // First check that a mapping exists. If one doesn't exist then there's nothing to do. + existingMapping := DbGetReactorPubKeyToPostHashMappingWithTxn(txn, userPubKey, postHash, reactionEmoji) + if existingMapping == nil { + return nil + } + + // When a message exists, delete the mapping for the sender and receiver. + if err := txn.Delete( + _dbKeyForReactorPubKeyToPostHashMapping(userPubKey, postHash, reactionEmoji)); err != nil { + return errors.Wrapf(err, "DbDeleteLikeMappingsWithTxn: Deleting "+ + "userPubKey %s and postHash %s failed", + PkToStringBoth(userPubKey), postHash) + } + if err := txn.Delete( + _dbKeyForPostHashToReactorPubKeyMapping(postHash, userPubKey)); err != nil { + return errors.Wrapf(err, "DbDeleteLikeMappingsWithTxn: Deleting "+ + "postHash %s and userPubKey %s failed", + PkToStringBoth(postHash[:]), PkToStringBoth(userPubKey)) + } + + return nil +} + +func DbDeleteReactMappings( + handle *badger.DB, userPubKey []byte, postHash BlockHash, reactionEmoji rune) error { + return handle.Update(func(txn *badger.Txn) error { + return DbDeleteReactMappingsWithTxn(txn, userPubKey, postHash, reactionEmoji) + }) +} + +func DbGetPostHashesYouReact(handle *badger.DB, yourPublicKey []byte) ( + _postHashes []*BlockHash, _err error) { + + prefix := _dbSeekPrefixForPostHashesYouReact(yourPublicKey) + keysFound, _ := _enumerateKeysForPrefix(handle, prefix) + + var postHashesYouReact []*BlockHash + for _, keyBytes := range keysFound { + // We must slice off the first byte and userPubKey to get the postHash. + postHash := &BlockHash{} + copy(postHash[:], keyBytes[1+btcec.PubKeyBytesLenCompressed:]) + postHashesYouReact = append(postHashesYouReact, postHash) + } + + return postHashesYouReact, nil +} + +func DbGetReactorPubKeysReactingToPostHash(handle *badger.DB, postHash BlockHash) ( + _pubKeys [][]byte, _err error) { + + prefix := _dbSeekPrefixForReactorPubKeysReactingToPostHash(postHash) + keysFound, _ := _enumerateKeysForPrefix(handle, prefix) + + var userPubKeys [][]byte + for _, keyBytes := range keysFound { + // We must slice off the first byte and postHash to get the userPubKey. + userPubKey := keyBytes[1+HashSizeBytes:] + userPubKeys = append(userPubKeys, userPubKey) + } + + return userPubKeys, nil +} + // ------------------------------------------------------------------------------------- // Reposts mapping functions // -> <> @@ -4844,6 +5013,52 @@ func (txnMeta *LikeTxindexMetadata) GetEncoderType() EncoderType { return EncoderTypeLikeTxindexMetadata } +type ReactTxindexMetadata struct { + // ReactorPublicKeyBase58Check = TransactorPublicKeyBase58Check + IsRemove bool + EmojiReaction rune + + PostHashHex string + // PosterPublicKeyBase58Check in AffectedPublicKeys +} + +func (txnMeta *ReactTxindexMetadata) RawEncodeWithoutMetadata(blockHeight uint64, skipMetadata ...bool) []byte { + var data []byte + + data = append(data, BoolToByte(txnMeta.IsRemove)) + data = append(data, []byte(string(txnMeta.EmojiReaction))...) + data = append(data, EncodeByteArray([]byte(txnMeta.PostHashHex))...) + return data +} + +func (txnMeta *ReactTxindexMetadata) RawDecodeWithoutMetadata(blockHeight uint64, rr *bytes.Reader) error { + var err error + + txnMeta.IsRemove, err = ReadBoolByte(rr) + if err != nil { + return errors.Wrapf(err, "ReactTxindexMetadata.Decode: Empty IsRemove") + } + txnMeta.EmojiReaction, _, err = rr.ReadRune() + if err != nil { + return errors.Wrapf(err, "ReactTxindexMetadata.Decode: Empty EmojiReaction") + } + postHashHexBytes, err := DecodeByteArray(rr) + if err != nil { + return errors.Wrapf(err, "ReactTxindexMetadata.Decode: problem reading PostHashHex") + } + txnMeta.PostHashHex = string(postHashHexBytes) + + return nil +} + +func (txnMeta *ReactTxindexMetadata) GetVersionByte(blockHeight uint64) byte { + return 0 +} + +func (txnMeta *ReactTxindexMetadata) GetEncoderType() EncoderType { + return EncoderTypeReactTxindexMetadata +} + type FollowTxindexMetadata struct { // FollowerPublicKeyBase58Check = TransactorPublicKeyBase58Check // FollowedPublicKeyBase58Check in AffectedPublicKeys @@ -5353,6 +5568,7 @@ type TransactionMetadata struct { UpdateProfileTxindexMetadata *UpdateProfileTxindexMetadata `json:",omitempty"` SubmitPostTxindexMetadata *SubmitPostTxindexMetadata `json:",omitempty"` LikeTxindexMetadata *LikeTxindexMetadata `json:",omitempty"` + ReactTxindexMetadata *ReactTxindexMetadata `json:",omitempty"` FollowTxindexMetadata *FollowTxindexMetadata `json:",omitempty"` PrivateMessageTxindexMetadata *PrivateMessageTxindexMetadata `json:",omitempty"` SwapIdentityTxindexMetadata *SwapIdentityTxindexMetadata `json:",omitempty"` diff --git a/lib/errors.go b/lib/errors.go index 67eef8a21..e91b16e7a 100644 --- a/lib/errors.go +++ b/lib/errors.go @@ -112,6 +112,10 @@ const ( RuleErrorCannotLikeNonexistentPost RuleError = "RuleErrorCannotLikeNonexistentPost" RuleErrorCannotUnlikeWithoutAnExistingLike RuleError = "RuleErrorCannotUnlikeWithoutAnExistingLike" + RuleErrorReactEntryAlreadyExists RuleError = "RuleErrorReactEntryAlreadyExists" + RuleErrorCannotReactNonexistentPost RuleError = "RuleErrorCannotReactNonexistentPost" + RuleErrorCannotRemoveReactionWithoutAnExistingReaction RuleError = "RuleErrorCannotRemoveReactionWithoutAnExistingReaction" + RuleErrorProfileUsernameTooShort RuleError = "RuleErrorProfileUsernameTooShort" RuleErrorProfileDescriptionTooShort RuleError = "RuleErrorProfileDescriptionTooShort" RuleErrorProfileUsernameTooLong RuleError = "RuleErrorProfileUsernameTooLong" diff --git a/lib/mempool.go b/lib/mempool.go index 597e76c0d..067c2246d 100644 --- a/lib/mempool.go +++ b/lib/mempool.go @@ -1387,6 +1387,34 @@ func ComputeTransactionMetadata(txn *MsgDeSoTxn, utxoView *UtxoView, blockHash * Metadata: "PosterPublicKeyBase58Check", }) } + case TxnTypeReact: + realTxMeta := txn.TxnMeta.(*ReactMetadata) + + // ReactorPublicKeyBase58Check = TransactorPublicKeyBase58Check + + txnMeta.ReactTxindexMetadata = &ReactTxindexMetadata{ + IsRemove: realTxMeta.IsRemove, + PostHashHex: hex.EncodeToString(realTxMeta.PostHash[:]), + EmojiReaction: realTxMeta.EmojiReaction, + } + + // Get the public key of the poster and set it as having been affected + // by this like. + // + // PosterPublicKeyBase58Check in AffectedPublicKeys + postHash := &BlockHash{} + copy(postHash[:], realTxMeta.PostHash[:]) + postEntry := utxoView.GetPostEntryForPostHash(postHash) + if postEntry == nil { + glog.V(2).Infof( + "UpdateTxindex: Error creating ReactTxindexMetadata; "+ + "missing post for hash %v: %v", postHash, err) + } else { + txnMeta.AffectedPublicKeys = append(txnMeta.AffectedPublicKeys, &AffectedPublicKey{ + PublicKeyBase58Check: PkToString(postEntry.PosterPublicKey, utxoView.Params), + Metadata: "PosterPublicKeyBase58Check", + }) + } case TxnTypeFollow: realTxMeta := txn.TxnMeta.(*FollowMetadata) diff --git a/lib/network.go b/lib/network.go index f22186f2b..4ce3a7e39 100644 --- a/lib/network.go +++ b/lib/network.go @@ -8,6 +8,7 @@ import ( "encoding/hex" "encoding/json" "fmt" + "golang.org/x/text/unicode/norm" "io" "math" "net" @@ -223,8 +224,9 @@ const ( TxnTypeDAOCoin TxnType = 24 TxnTypeDAOCoinTransfer TxnType = 25 TxnTypeDAOCoinLimitOrder TxnType = 26 + TxnTypeReact TxnType = 27 - // NEXT_ID = 27 + // NEXT_ID = 28 ) type TxnString string @@ -256,6 +258,7 @@ const ( TxnStringDAOCoin TxnString = "DAO_COIN" TxnStringDAOCoinTransfer TxnString = "DAO_COIN_TRANSFER" TxnStringDAOCoinLimitOrder TxnString = "DAO_COIN_LIMIT_ORDER" + TxnStringReact TxnString = "REACT" TxnStringUndefined TxnString = "TXN_UNDEFINED" ) @@ -266,7 +269,7 @@ var ( TxnTypeCreatorCoin, TxnTypeSwapIdentity, TxnTypeUpdateGlobalParams, TxnTypeCreatorCoinTransfer, TxnTypeCreateNFT, TxnTypeUpdateNFT, TxnTypeAcceptNFTBid, TxnTypeNFTBid, TxnTypeNFTTransfer, TxnTypeAcceptNFTTransfer, TxnTypeBurnNFT, TxnTypeAuthorizeDerivedKey, TxnTypeMessagingGroup, - TxnTypeDAOCoin, TxnTypeDAOCoinTransfer, TxnTypeDAOCoinLimitOrder, + TxnTypeDAOCoin, TxnTypeDAOCoinTransfer, TxnTypeDAOCoinLimitOrder, TxnTypeReact, } AllTxnString = []TxnString{ TxnStringUnset, TxnStringBlockReward, TxnStringBasicTransfer, TxnStringBitcoinExchange, TxnStringPrivateMessage, @@ -274,7 +277,7 @@ var ( TxnStringCreatorCoin, TxnStringSwapIdentity, TxnStringUpdateGlobalParams, TxnStringCreatorCoinTransfer, TxnStringCreateNFT, TxnStringUpdateNFT, TxnStringAcceptNFTBid, TxnStringNFTBid, TxnStringNFTTransfer, TxnStringAcceptNFTTransfer, TxnStringBurnNFT, TxnStringAuthorizeDerivedKey, TxnStringMessagingGroup, - TxnStringDAOCoin, TxnStringDAOCoinTransfer, TxnStringDAOCoinLimitOrder, + TxnStringDAOCoin, TxnStringDAOCoinTransfer, TxnStringDAOCoinLimitOrder, TxnStringReact, } ) @@ -340,6 +343,8 @@ func (txnType TxnType) GetTxnString() TxnString { return TxnStringDAOCoinTransfer case TxnTypeDAOCoinLimitOrder: return TxnStringDAOCoinLimitOrder + case TxnTypeReact: + return TxnStringReact default: return TxnStringUndefined } @@ -399,6 +404,8 @@ func GetTxnTypeFromString(txnString TxnString) TxnType { return TxnTypeDAOCoinTransfer case TxnStringDAOCoinLimitOrder: return TxnTypeDAOCoinLimitOrder + case TxnStringReact: + return TxnTypeReact default: // TxnTypeUnset means we couldn't find a matching txn type return TxnTypeUnset @@ -466,6 +473,8 @@ func NewTxnMetadata(txType TxnType) (DeSoTxnMetadata, error) { return (&DAOCoinTransferMetadata{}).New(), nil case TxnTypeDAOCoinLimitOrder: return (&DAOCoinLimitOrderMetadata{}).New(), nil + case TxnTypeReact: + return (&ReactMetadata{}).New(), nil default: return nil, fmt.Errorf("NewTxnMetadata: Unrecognized TxnType: %v; make sure you add the new type of transaction to NewTxnMetadata", txType) } @@ -3386,6 +3395,88 @@ func (txnData *LikeMetadata) New() DeSoTxnMetadata { return &LikeMetadata{} } +// ================================================================== +// ReactMetadata +// +// A reaction is an interaction where a user on the platform reacts to a post. +// ================================================================== + +type ReactMetadata struct { + // The user reacting is assumed to be the originator of the + // top-level transaction. + + // The post hash to react to. + PostHash *BlockHash + + // Set to true when a user is requesting to "remove" a reaction. + IsRemove bool + + // The Unicode for the emoji reaction. + EmojiReaction rune +} + +func (txnData *ReactMetadata) GetTxnType() TxnType { + return TxnTypeReact +} + +func (txnData *ReactMetadata) ToBytes(preSignature bool) ([]byte, error) { + // Validate the metadata before encoding it. + // + + var data []byte + + // Add PostHash + // + // We know the post hash is set and has the expected length, so we don't need + // to encode the length here. + data = append(data, txnData.PostHash[:]...) + + // Add IsRemove + data = append(data, BoolToByte(txnData.IsRemove)) + + // Add EmojiReaction. + // It is possible for a single character to be encoded with different code point sequences. + // By normalizing the Unicode (NFC), we ensure that a character will have a unique code point sequence. + data = append(data, norm.NFC.Bytes([]byte(string(txnData.EmojiReaction)))...) + + return data, nil +} + +func (txnData *ReactMetadata) FromBytes(data []byte) error { + ret := ReactMetadata{} + rr := bytes.NewReader(data) + + // PostHash + ret.PostHash = &BlockHash{} + _, err := io.ReadFull(rr, ret.PostHash[:]) + if err != nil { + return fmt.Errorf( + "ReactMetadata.FromBytes: Error reading PostHash: %v", err) + } + + // IsRemove + ret.IsRemove, err = ReadBoolByte(rr) + if err != nil { + return errors.Wrapf(err, "ReactMetadata.FromBytes: Problem reading IsRemove") + } + + // Emoji reaction + reaction, _, err := rr.ReadRune() + if err != nil { + return fmt.Errorf( + "ReactMetadata.FromBytes: Error reading EmojiReaction: %v", err) + } + + ret.EmojiReaction = reaction + *txnData = ret + + return nil +} + +func (txnData *ReactMetadata) New() DeSoTxnMetadata { + return &ReactMetadata{} +} + // ================================================================== // FollowMetadata // diff --git a/lib/network_test.go b/lib/network_test.go index a5653705e..0c5bf3b71 100644 --- a/lib/network_test.go +++ b/lib/network_test.go @@ -732,6 +732,64 @@ func TestSerializeUnlike(t *testing.T) { require.Equal(txMeta, testMeta) } +func TestSerializeNoReaction(t *testing.T) { + require := require.New(t) + + txMeta := &ReactMetadata{PostHash: &postHashForTesting1} + + data, err := txMeta.ToBytes(false) + require.NoError(err) + + testMeta, err := NewTxnMetadata(TxnTypeReact) + require.NoError(err) + err = testMeta.FromBytes(data) + require.NoError(err) + require.Equal(txMeta, testMeta) +} + +func TestSerializeRemoveReaction(t *testing.T) { + require := require.New(t) + + txMeta := &ReactMetadata{ + PostHash: &postHashForTesting1, + IsRemove: true, + } + + data, err := txMeta.ToBytes(false) + require.NoError(err) + + testMeta, err := NewTxnMetadata(TxnTypeReact) + require.NoError(err) + err = testMeta.FromBytes(data) + require.NoError(err) + require.Equal(txMeta, testMeta) +} + +func TestSerializeReactions(t *testing.T) { + ValidReactions := []rune{'😊', '😥', '😠', '😮'} + for _, r := range ValidReactions { + _testSerializeSingleReaction(t, r) + } +} + +func _testSerializeSingleReaction(t *testing.T, emoji rune) { + require := require.New(t) + + txMeta := &ReactMetadata{ + PostHash: &postHashForTesting1, + EmojiReaction: emoji, + } + + data, err := txMeta.ToBytes(false) + require.NoError(err) + + testMeta, err := NewTxnMetadata(TxnTypeReact) + require.NoError(err) + err = testMeta.FromBytes(data) + require.NoError(err) + require.Equal(txMeta, testMeta) +} + func TestSerializeFollow(t *testing.T) { assert := assert.New(t) require := require.New(t) diff --git a/lib/notifier.go b/lib/notifier.go index de5959dfb..4c93437c8 100644 --- a/lib/notifier.go +++ b/lib/notifier.go @@ -46,6 +46,7 @@ func (notifier *Notifier) Update() error { var transactions []*PGTransaction err = notifier.db.Model(&transactions).Where("block_hash = ?", block.Hash). Relation("Outputs").Relation("PGMetadataLike").Relation("PGMetadataFollow"). + Relation("PGMetadataReact"). Relation("PGMetadataCreatorCoin").Relation("PGMetadataCreatorCoinTransfer"). Relation("PGMetadataSubmitPost").Select() // TODO: Add NFTs @@ -97,6 +98,20 @@ func (notifier *Notifier) Update() error { Timestamp: block.Timestamp, }) } + } else if transaction.Type == TxnTypeReact { + postHash := transaction.MetadataReact.PostHash + post := DBGetPostEntryByPostHash(notifier.badger, nil, postHash) + if post != nil { + notifications = append(notifications, &PGNotification{ + TransactionHash: transaction.Hash, + Mined: true, + ToUser: post.PosterPublicKey, + FromUser: transaction.PublicKey, + Type: NotificationReact, + PostHash: postHash, + Timestamp: block.Timestamp, + }) + } } else if transaction.Type == TxnTypeFollow { if !transaction.MetadataFollow.IsUnfollow { notifications = append(notifications, &PGNotification{ diff --git a/lib/postgres.go b/lib/postgres.go index d8d2ccc43..8e56dbdb5 100644 --- a/lib/postgres.go +++ b/lib/postgres.go @@ -136,6 +136,7 @@ type PGTransaction struct { MetadataUpdateProfile *PGMetadataUpdateProfile `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataFollow *PGMetadataFollow `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataLike *PGMetadataLike `pg:"rel:belongs-to,join_fk:transaction_hash"` + MetadataReact *PGMetadataReact `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataCreatorCoin *PGMetadataCreatorCoin `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataCreatorCoinTransfer *PGMetadataCreatorCoinTransfer `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataSwapIdentity *PGMetadataSwapIdentity `pg:"rel:belongs-to,join_fk:transaction_hash"` @@ -260,6 +261,16 @@ type PGMetadataLike struct { IsUnlike bool `pg:",use_zero"` } +// PGMetadataReact represents ReactMetadata +type PGMetadataReact struct { + tableName struct{} `pg:"pg_metadata_reactions"` + + TransactionHash *BlockHash `pg:",pk,type:bytea"` + PostHash *BlockHash `pg:",type:bytea"` + IsRemove bool `pg:",use_zero"` + EmojiReaction rune `pg:",type:integer"` +} + // PGMetadataCreatorCoin represents CreatorCoinMetadataa type PGMetadataCreatorCoin struct { tableName struct{} `pg:"pg_metadata_creator_coins"` @@ -455,6 +466,7 @@ const ( NotificationUnknown NotificationType = iota NotificationSendDESO NotificationLike + NotificationReact NotificationFollow NotificationCoinPurchase NotificationCoinTransfer @@ -598,6 +610,22 @@ func (like *PGLike) NewLikeEntry() *LikeEntry { } } +type PGReact struct { + tableName struct{} `pg:"pg_react"` + + ReactorPublicKey []byte `pg:",pk,type:bytea"` + ReactorPostHash *BlockHash `pg:",pk,type:bytea"` + ReactionEmoji rune `pg:",pk,type:bytea"` +} + +func (react *PGReact) NewReactionEntry() *ReactionEntry { + return &ReactionEntry{ + ReactorPubKey: react.ReactorPublicKey, + ReactedPostHash: react.ReactorPostHash, + ReactEmoji: react.ReactionEmoji, + } +} + type PGFollow struct { tableName struct{} `pg:"pg_follows"` @@ -1560,6 +1588,9 @@ func (postgres *Postgres) FlushView(view *UtxoView) error { if err := postgres.flushLikes(tx, view); err != nil { return err } + if err := postgres.flushReacts(tx, view); err != nil { + return err + } if err := postgres.flushFollows(tx, view); err != nil { return err } @@ -1788,6 +1819,44 @@ func (postgres *Postgres) flushLikes(tx *pg.Tx, view *UtxoView) error { return nil } +func (postgres *Postgres) flushReacts(tx *pg.Tx, view *UtxoView) error { + var insertReacts []*PGReact + var deleteReacts []*PGReact + for _, reactionEntry := range view.ReactionKeyToReactionEntry { + if reactionEntry == nil { + continue + } + + react := &PGReact{ + ReactorPublicKey: reactionEntry.ReactorPubKey, + ReactorPostHash: reactionEntry.ReactedPostHash, + ReactionEmoji: reactionEntry.ReactEmoji, + } + + if reactionEntry.isDeleted { + deleteReacts = append(deleteReacts, react) + } else { + insertReacts = append(insertReacts, react) + } + } + + if len(insertReacts) > 0 { + _, err := tx.Model(&insertReacts).WherePK().OnConflict("DO NOTHING").Returning("NULL").Insert() + if err != nil { + return err + } + } + + if len(deleteReacts) > 0 { + _, err := tx.Model(&deleteReacts).Returning("NULL").Delete() + if err != nil { + return err + } + } + + return nil +} + func (postgres *Postgres) flushFollows(tx *pg.Tx, view *UtxoView) error { var insertFollows []*PGFollow var deleteFollows []*PGFollow @@ -2442,6 +2511,37 @@ func (postgres *Postgres) GetLikesForPost(postHash *BlockHash) []*PGLike { return likes } +// +// Reacts +// +func (postgres *Postgres) GetReaction(reactorPublicKey []byte, reactedPostHash *BlockHash, reactionEmoji rune) *PGReact { + react := PGReact{ + ReactorPublicKey: reactorPublicKey, + ReactorPostHash: reactedPostHash, + ReactionEmoji: reactionEmoji, + } + err := postgres.db.Model(&react).WherePK().First() + if err != nil { + return nil + } + return &react +} +func (postgres *Postgres) GetReacts(reacts []*PGReact) []*PGReact { + err := postgres.db.Model(&reacts).WherePK().Select() + if err != nil { + return nil + } + return reacts +} +func (postgres *Postgres) GetReactionsForPost(postHash *BlockHash) []*PGReact { + var reacts []*PGReact + err := postgres.db.Model(&reacts).Where("reactor_post_hash = ?", postHash).Select() + if err != nil { + return nil + } + return reacts +} + // // Follows // From 6979acbb7151a1b66b1f2bc4c69e3362f6a20d48 Mon Sep 17 00:00:00 2001 From: lazynina Date: Sat, 27 Aug 2022 13:36:51 -0400 Subject: [PATCH 02/11] [stable] Release 2.2.6 From 4ba4a3af270c3ff384d5ce0a77d8bc6d4ef303dd Mon Sep 17 00:00:00 2001 From: diamondhands Date: Sat, 27 Aug 2022 11:41:32 -0700 Subject: [PATCH 03/11] Fix IsNodeArchival flag to include SyncTypeBlockSync This was causing nodes to reject other nodes as sync peers when they have --sync-type=blocksync but --hypersync=false even though these nodes are valid sync peers. --- lib/server.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/server.go b/lib/server.go index 7182afd26..456fc048a 100644 --- a/lib/server.go +++ b/lib/server.go @@ -377,11 +377,11 @@ func NewServer( if err != nil { panic(err) } + } - // We only set archival mode true if we're a hypersync node. - if IsNodeArchival(_syncType) { - archivalMode = true - } + // We only set archival mode true if we're a hypersync node. + if IsNodeArchival(_syncType) { + archivalMode = true } // Create an empty Server object here so we can pass a reference to it to the From fda9a0f4c8e70f3e81f101b7b92c8cee72dbc931 Mon Sep 17 00:00:00 2001 From: Lazy Nina <81658138+lazynina@users.noreply.github.com> Date: Fri, 9 Sep 2022 17:13:34 -0400 Subject: [PATCH 04/11] Add support for a Get Single Derived Key endpoint (#393) * Add support for a Get Single Derived Key endpoint * Make _getDerivedKeyMappingForOwner public --- lib/block_view.go | 6 +++--- lib/block_view_derived_key.go | 6 +++--- lib/block_view_derived_key_test.go | 4 ++-- lib/block_view_profile.go | 14 +++----------- lib/block_view_types.go | 4 ++++ lib/db_adapter.go | 12 ++++++++++++ lib/postgres.go | 3 +++ lib/server.go | 26 +++++++++++++------------- 8 files changed, 43 insertions(+), 32 deletions(-) diff --git a/lib/block_view.go b/lib/block_view.go index 20eb61b9f..4785794d2 100644 --- a/lib/block_view.go +++ b/lib/block_view.go @@ -686,7 +686,7 @@ func (bav *UtxoView) _disconnectBasicTransfer(currentTxn *MsgDeSoTxn, txnHash *B PkToString(derivedPkBytes, bav.Params), err) } - derivedKeyEntry := bav._getDerivedKeyMappingForOwner(currentTxn.PublicKey, derivedPkBytes) + derivedKeyEntry := bav.GetDerivedKeyMappingForOwner(currentTxn.PublicKey, derivedPkBytes) if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { return fmt.Errorf("_disconnectBasicTransfer: could not find derived key entry") } @@ -1149,7 +1149,7 @@ func (bav *UtxoView) _verifySignature(txn *MsgDeSoTxn, blockHeight uint32) (_der } else { // Look for a derived key entry in UtxoView and DB, check to make sure it exists // and is not isDeleted. - derivedKeyEntry := bav._getDerivedKeyMappingForOwner(ownerPkBytes, derivedPkBytes) + derivedKeyEntry := bav.GetDerivedKeyMappingForOwner(ownerPkBytes, derivedPkBytes) if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { return nil, errors.Wrapf(RuleErrorDerivedKeyNotAuthorized, "Derived key mapping for owner not found: Owner: %v, "+ @@ -1498,7 +1498,7 @@ func (bav *UtxoView) _checkDerivedKeySpendingLimit( _utxoOpsForTxn []*UtxoOperation, _err error) { // Get the derived key entry - prevDerivedKeyEntry := bav._getDerivedKeyMappingForOwner(txn.PublicKey, derivedPkBytes) + prevDerivedKeyEntry := bav.GetDerivedKeyMappingForOwner(txn.PublicKey, derivedPkBytes) if prevDerivedKeyEntry == nil || prevDerivedKeyEntry.isDeleted { return utxoOpsForTxn, fmt.Errorf("_checkDerivedKeySpendingLimit: No derived key entry found") } diff --git a/lib/block_view_derived_key.go b/lib/block_view_derived_key.go index ab5540b1e..7aba46d91 100644 --- a/lib/block_view_derived_key.go +++ b/lib/block_view_derived_key.go @@ -105,7 +105,7 @@ func (bav *UtxoView) _connectAuthorizeDerivedKey( } // Get current (previous) derived key entry. We might revert to it later so we copy it. - prevDerivedKeyEntry := bav._getDerivedKeyMappingForOwner(ownerPublicKey, derivedPublicKey) + prevDerivedKeyEntry := bav.GetDerivedKeyMappingForOwner(ownerPublicKey, derivedPublicKey) // Authorize transactions can be signed by both owner and derived keys. However, this // poses a risk in a situation where a malicious derived key, which has previously been @@ -279,7 +279,7 @@ func (bav *UtxoView) _connectAuthorizeDerivedKey( // If we're past the derived key spending limit block height, we actually need to fetch the derived key // entry again since the basic transfer reduced the txn count on the derived key txn if blockHeight >= bav.Params.ForkHeights.DerivedKeySetSpendingLimitsBlockHeight { - derivedKeyEntry = *bav._getDerivedKeyMappingForOwner(ownerPublicKey, derivedPublicKey) + derivedKeyEntry = *bav.GetDerivedKeyMappingForOwner(ownerPublicKey, derivedPublicKey) } // Earlier we've set a temporary derived key entry that had OperationType set to Valid. @@ -344,7 +344,7 @@ func (bav *UtxoView) _disconnectAuthorizeDerivedKey( derivedPublicKey = txMeta.DerivedPublicKey // Get the derived key entry. If it's nil or is deleted then we have an error. - derivedKeyEntry := bav._getDerivedKeyMappingForOwner(ownerPublicKey, derivedPublicKey) + derivedKeyEntry := bav.GetDerivedKeyMappingForOwner(ownerPublicKey, derivedPublicKey) if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { return fmt.Errorf("_disconnectAuthorizeDerivedKey: DerivedKeyEntry for "+ "public key %v, derived key %v was found to be nil or deleted: %v", diff --git a/lib/block_view_derived_key_test.go b/lib/block_view_derived_key_test.go index e50f938d3..765fbfb44 100644 --- a/lib/block_view_derived_key_test.go +++ b/lib/block_view_derived_key_test.go @@ -732,7 +732,7 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { } else { utxoView, err := mempool.GetAugmentedUniversalView() require.NoError(err) - derivedKeyEntry := utxoView._getDerivedKeyMappingForOwner(senderPkBytes, derivedPublicKey) + derivedKeyEntry := utxoView.GetDerivedKeyMappingForOwner(senderPkBytes, derivedPublicKey) // If we removed the derivedKeyEntry from utxoView altogether, it'll be nil. // To pass the tests, we initialize it to a default struct. if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { @@ -1637,7 +1637,7 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { } else { utxoView, err := mempool.GetAugmentedUniversalView() require.NoError(err) - derivedKeyEntry := utxoView._getDerivedKeyMappingForOwner(senderPkBytes, derivedPublicKey) + derivedKeyEntry := utxoView.GetDerivedKeyMappingForOwner(senderPkBytes, derivedPublicKey) // If we removed the derivedKeyEntry from utxoView altogether, it'll be nil. // To pass the tests, we initialize it to a default struct. if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { diff --git a/lib/block_view_profile.go b/lib/block_view_profile.go index 20549c6f3..8d67550e8 100644 --- a/lib/block_view_profile.go +++ b/lib/block_view_profile.go @@ -335,8 +335,8 @@ func (bav *UtxoView) _deleteProfileEntryMappings(profileEntry *ProfileEntry) { bav._setProfileEntryMappings(&tombstoneProfileEntry) } -// _getDerivedKeyMappingForOwner fetches the derived key mapping from the utxoView -func (bav *UtxoView) _getDerivedKeyMappingForOwner(ownerPublicKey []byte, derivedPublicKey []byte) *DerivedKeyEntry { +// GetDerivedKeyMappingForOwner fetches the derived key mapping from the utxoView +func (bav *UtxoView) GetDerivedKeyMappingForOwner(ownerPublicKey []byte, derivedPublicKey []byte) *DerivedKeyEntry { // Check if the entry exists in utxoView. ownerPk := NewPublicKey(ownerPublicKey) derivedPk := NewPublicKey(derivedPublicKey) @@ -347,15 +347,7 @@ func (bav *UtxoView) _getDerivedKeyMappingForOwner(ownerPublicKey []byte, derive } // Check if the entry exists in the DB. - if bav.Postgres != nil { - if entryPG := bav.Postgres.GetDerivedKey(ownerPk, derivedPk); entryPG != nil { - entry = entryPG.NewDerivedKeyEntry() - } else { - entry = nil - } - } else { - entry = DBGetOwnerToDerivedKeyMapping(bav.Handle, bav.Snapshot, *ownerPk, *derivedPk) - } + entry = bav.GetDbAdapter().GetOwnerToDerivedKeyMapping(*ownerPk, *derivedPk) // If an entry exists, update the UtxoView map. if entry != nil { diff --git a/lib/block_view_types.go b/lib/block_view_types.go index 4700f9129..c2606d37e 100644 --- a/lib/block_view_types.go +++ b/lib/block_view_types.go @@ -2576,6 +2576,10 @@ type DerivedKeyEntry struct { isDeleted bool } +func (key *DerivedKeyEntry) IsDeleted() bool { + return key.isDeleted +} + func (key *DerivedKeyEntry) RawEncodeWithoutMetadata(blockHeight uint64, skipMetadata ...bool) []byte { var data []byte diff --git a/lib/db_adapter.go b/lib/db_adapter.go index b0dd74fe6..82f5e0461 100644 --- a/lib/db_adapter.go +++ b/lib/db_adapter.go @@ -38,6 +38,18 @@ func (adapter *DbAdapter) GetBalanceEntry(holder *PKID, creator *PKID, isDAOCoin return DbGetBalanceEntry(adapter.badgerDb, adapter.snapshot, holder, creator, isDAOCoin) } +// +// Derived keys +// + +func (adapter *DbAdapter) GetOwnerToDerivedKeyMapping(ownerPublicKey PublicKey, derivedPublicKey PublicKey) *DerivedKeyEntry { + if adapter.postgresDb != nil { + return adapter.postgresDb.GetDerivedKey(&ownerPublicKey, &derivedPublicKey).NewDerivedKeyEntry() + } + + return DBGetOwnerToDerivedKeyMapping(adapter.badgerDb, adapter.snapshot, ownerPublicKey, derivedPublicKey) +} + // // DAO coin limit order // diff --git a/lib/postgres.go b/lib/postgres.go index d8d2ccc43..427b68511 100644 --- a/lib/postgres.go +++ b/lib/postgres.go @@ -888,6 +888,9 @@ type PGDerivedKey struct { } func (key *PGDerivedKey) NewDerivedKeyEntry() *DerivedKeyEntry { + if key == nil { + return nil + } var tsl *TransactionSpendingLimit if len(key.TransactionSpendingLimitTracker) > 0 { tsl = &TransactionSpendingLimit{} diff --git a/lib/server.go b/lib/server.go index 456fc048a..48d13a2eb 100644 --- a/lib/server.go +++ b/lib/server.go @@ -307,26 +307,26 @@ func ValidateHyperSyncFlags(isHypersync bool, syncType NodeSyncType) { // NewServer initializes all of the internal data structures. Right now this basically // looks as follows: -// - ConnectionManager starts and keeps track of peers. -// - When messages are received from peers, they get forwarded on a channel to -// the Server to handle them. In that sense the ConnectionManager is basically -// just acting as a router. -// - When the Server receives a message from a peer, it can do any of the following: -// * Take no action. -// * Use the Blockchain data structure to validate the transaction or update the +// - ConnectionManager starts and keeps track of peers. +// - When messages are received from peers, they get forwarded on a channel to +// the Server to handle them. In that sense the ConnectionManager is basically +// just acting as a router. +// - When the Server receives a message from a peer, it can do any of the following: +// - Take no action. +// - Use the Blockchain data structure to validate the transaction or update the // Blockchain data structure. -// * Send a new message. This can be a message directed back to that actually sent this +// - Send a new message. This can be a message directed back to that actually sent this // message or it can be a message to another peer for whatever reason. When a message // is sent in this way it can also have a deadline on it that the peer needs to // respond by or else it will be disconnected. -// * Disconnect the peer. In this case the ConnectionManager gets notified about the +// - Disconnect the peer. In this case the ConnectionManager gets notified about the // disconnection and may opt to replace the now-disconnected peer with a new peer. // This happens for example when an outbound peer is disconnected in order to // maintain TargetOutboundPeers. -// - The server could also receive a control message that a peer has been disconnected. -// This can be useful to the server if, for example, it was expecting a response from -// a particular peer, which could be the case in initial block download where a single -// sync peer is used. +// - The server could also receive a control message that a peer has been disconnected. +// This can be useful to the server if, for example, it was expecting a response from +// a particular peer, which could be the case in initial block download where a single +// sync peer is used. // // TODO: Refactor all these arguments into a config object or something. func NewServer( From ca2041877e097b4bbadd29469c3764b72f447b34 Mon Sep 17 00:00:00 2001 From: Piotr Nojszewski <29924594+AeonSw4n@users.noreply.github.com> Date: Mon, 12 Sep 2022 07:56:49 -0700 Subject: [PATCH 05/11] Recoverable signatures, Metamask access bytes, unlimited derived keys (#379) * Test metamask spending limits encoding * Add public key recovery for derived keys * test for global spending limit * Remove the test * Test refactor * Refactor signature verification * Postgres testing framework; unlimited derived keys; postgres migration; some tests * Add comments on tests; finish testing signatures * connect logic for unlimited derived keys * Finish testing * gofmt * Fix indentation * Make verify ETH signature public * Fix nilptr; improve testing functions * Review round updates; better comments and simplifications * Review round * review round 1 * Refactor * review round updates * Update go version to 1.18 * Finish review round; hard-hardcore unlimited spending limit test * Update ci.yml go version * Reset block height * Refactor signature logic to abstract away btcec's weird CompactSignature * Fix tiny error-handling bug * Add --force-checksum flag and test * Public key recovery in signature verification (#380) * Add public key recovery for derived keys * Review round * P/k group muting (#394) * added MuteList to MessagingGroupEntry struct * added TxnType validation in _connectMessagingGroup() * added MuteList to MessagingGroupEntry (for txns adding new members) * added muting and unmuting mechanism to _connectMessagingGroup() * added MuteList to RawEncodeWithoutMetadata * added MuteList to RawDecodeWithoutMetadata * moved MuteList to end of MessagingGroupEntry for backwards compatibility * added clarifying comment for MuteList * corrected typo * added MuteList to memberGroupEntry HACK * changed iii to ii * added RuleErrorMessagingMemberMuted * major muting code added (needs cleanup) * cleanup comments * fixed for loop error * deleted unused inline func * added TODO for making MuteList retrieval more efficient * fixed test typo * commented out MuteList from hacked memberGroupEntry for now * go.mod random change * fixed bug * fixed all pre-testing bugs * FIXED ALL BUGS AND ADDED TESTS * cleaned up comments * 33rd waking hour and counting... * added helpful comment * fixed unmuting bug * added unmuting tests and all successful * code cleanup * added MessagingGroupOperationMute and MessagingGroupOperationUnmute constants * replaced more constants * replaced more constants * fixed deepEqual to compare byte slices and NOT PublicKeys * fixed deepEqual to compare byte slices and NOT PublicKeys AGAIN * added gated condition to have sender and recipient in ExtraData * added comment * removed code from _disconnectMessagingGroup * added blockheight gating for messages muting * fixed existingEntry.MuteList deep copy bug * added encoder migration for DeSoV3MessagesMutingMigration * fixed HUGE testnet bug and migration bug * fixed muting code positioning * fixed deep copy bug * fixed extradata operationtype bug * fixed redundant if condition * made constant for MessagingGroupOperationType * moved contains() * throwing errors when muting already muted member or unmuting already unmuted member * made concise * removed comment * added super helpful comment * temporarily changed migration version to pass tests * FIXED MAJOR ENCODE DECODE BUG * added hacked entry optimization; fixed txn.PublicKey bug * removed comment * changed optimization comment * added prefix deprecation and replacement code * added more Deprecation code * refactored db_utils funcs and created new OptimizedMessagingGroupEntry using better prefix key structure * fixed refactoring bug; added more tests for muting while blockheight below threshold * fixed new prefix name * fixed 2 nits * cleaned up 'contains' code * added test; fixed deep equal bug * added additional unmute test * fixed deep equal nit * fixed problematic loop; added test; added RuleError * added code for groupowner not allowed to mute/unmute herself * fixed conditional dup; added extra data merging * deduplicated utxoOpsForTxn * changed comment * fixed comment grammar * added enlightening comments * added groupowner sender to ganggang in tests * [stable] Release 2.2.6 * Fix IsNodeArchival flag to include SyncTypeBlockSync This was causing nodes to reject other nodes as sync peers when they have --sync-type=blocksync but --hypersync=false even though these nodes are valid sync peers. * Simplify connect logic; start making hacked member prefix more user-friendly * Testing * More thorough testing * Temporary fix for newly-added state prefix * Another fix * fix encoding * One more pass * small rename * another pass * Fix txindex and gofmt * Rename fork height * Nina review round * Fix nil utxoview fetch Co-authored-by: Keshav Maheshwari Co-authored-by: lazynina Co-authored-by: diamondhands * nits * Remove default key signature verification after the block height * Resolve merge conflicts with main (#397) * diamondhands nitpicks * Refactor V3 changes (#398) * Refactor message updates * Rename migration * gofmt... * filter out deleted entries * Remove sig validation for default key registration (#399) * Remove sig validation for default key registration * Comment out test that requires errors for bad sig for default key * use fork height to remove check on default key registration validation * Remove rogue new line * Fix isDeleted * Add comment * Add txn type check Co-authored-by: Lazy Nina <81658138+lazynina@users.noreply.github.com> * Fix forkheight change bug * Re-format args * more encoder migration fixes * Ignore migration block height when value is MaxUint32 Not doing this was causing an error when setting the fork height * Simplify setting IsUnlimited on spending limit object when connecting authorize derived key txn Co-authored-by: diamondhands Co-authored-by: Keshav Maheshwari Co-authored-by: lazynina Co-authored-by: Lazy Nina <81658138+lazynina@users.noreply.github.com> --- .github/workflows/ci.yml | 4 +- cmd/config.go | 10 + cmd/node.go | 2 +- cmd/run.go | 2 + go.mod | 94 +- go.sum | 32 +- integration_testing/blocksync_test.go | 2 +- integration_testing/connection_bridge.go | 2 +- integration_testing/hypersync_test.go | 2 +- integration_testing/migrations_test.go | 2 +- integration_testing/mining_test.go | 2 +- integration_testing/rollback_test.go | 2 +- integration_testing/tools.go | 2 +- integration_testing/txindex_test.go | 2 +- lib/block_view.go | 165 +- lib/block_view_bitcoin.go | 2 +- lib/block_view_derived_key.go | 134 +- lib/block_view_derived_key_test.go | 1412 ++++++++++++----- lib/block_view_flush.go | 2 +- lib/block_view_message.go | 27 +- lib/block_view_profile.go | 20 +- lib/block_view_profile_test.go | 41 +- lib/block_view_test.go | 409 ++++- lib/block_view_types.go | 14 +- lib/block_view_types_test.go | 10 + lib/blockchain.go | 2 + lib/blockchain_test.go | 37 +- lib/constants.go | 33 +- lib/db_adapter.go | 8 + lib/db_utils.go | 60 +- lib/errors.go | 26 +- lib/network.go | 415 ++++- lib/network_test.go | 283 ++++ lib/postgres.go | 49 +- lib/server.go | 42 +- lib/snapshot.go | 19 +- lib/txindex.go | 11 + lib/utils.go | 39 + ...e_signatures_and_unlimited_derived_keys.go | 50 + 39 files changed, 2766 insertions(+), 704 deletions(-) create mode 100644 migrate/20220711054349_recoverable_signatures_and_unlimited_derived_keys.go diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 99de7b280..d6f00a88c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: - name: Install Go uses: actions/setup-go@v2 with: - go-version: "1.16" + go-version: "1.18" - name: Checkout branch uses: actions/checkout@v3 @@ -52,7 +52,7 @@ jobs: - name: Install Go uses: actions/setup-go@v2 with: - go-version: "1.16" + go-version: "1.18" - name: Checkout branch uses: actions/checkout@v3 diff --git a/cmd/config.go b/cmd/config.go index 6a351fe32..64b2e8c00 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -35,6 +35,7 @@ type Config struct { // Snapshot HyperSync bool + ForceChecksum bool SyncType lib.NodeSyncType MaxSyncBlockHeight uint32 SnapshotBlockHeightPeriod uint64 @@ -95,6 +96,7 @@ func LoadConfig() *Config { config.Regtest = viper.GetBool("regtest") config.PostgresURI = viper.GetString("postgres-uri") config.HyperSync = viper.GetBool("hypersync") + config.ForceChecksum = viper.GetBool("force-checksum") config.SyncType = lib.NodeSyncType(viper.GetString("sync-type")) config.MaxSyncBlockHeight = viper.GetUint32("max-sync-block-height") config.SnapshotBlockHeightPeriod = viper.GetUint64("snapshot-block-height-period") @@ -162,6 +164,14 @@ func (config *Config) Print() { glog.Infof("HyperSync: ON") } + if config.ForceChecksum { + glog.Infof("ForceChecksum: ON") + } else { + glog.V(0).Infof(lib.CLog(lib.Red, "ForceChecksum: OFF - This could "+ + "allow a peer to trick you into downloading bad hypersync state. Be sure you're "+ + "connecting to a trustworthy sync peer.")) + } + if config.SnapshotBlockHeightPeriod > 0 { glog.Infof("SnapshotBlockHeightPeriod: %v", config.SnapshotBlockHeightPeriod) } diff --git a/cmd/node.go b/cmd/node.go index f7333474b..354496ccb 100644 --- a/cmd/node.go +++ b/cmd/node.go @@ -225,7 +225,7 @@ func (node *Node) Start(exitChannels ...*chan struct{}) { node.Config.TrustedBlockProducerStartHeight, eventManager, node.nodeMessageChan, - ) + node.Config.ForceChecksum) if err != nil { if shouldRestart { glog.Infof(lib.CLog(lib.Red, fmt.Sprintf("Start: Got en error while starting server and shouldRestart "+ diff --git a/cmd/run.go b/cmd/run.go index 0159f3aa5..57a03c8a3 100644 --- a/cmd/run.go +++ b/cmd/run.go @@ -61,6 +61,8 @@ func SetupRunFlags(cmd *cobra.Command) { "Max sync block height") // Hyper Sync cmd.PersistentFlags().Bool("hypersync", true, "Use hyper sync protocol for faster block syncing") + cmd.PersistentFlags().Bool("force-checksum", true, "When true, the node will panic if the "+ + "local state checksum differs from the network checksum reported by its peers.") // Snapshot cmd.PersistentFlags().Uint64("snapshot-block-height-period", 1000, "Set the snapshot epoch period. Snapshots are taken at block heights divisible by the period.") // Archival mode diff --git a/go.mod b/go.mod index d409be68c..f2235e8a6 100644 --- a/go.mod +++ b/go.mod @@ -1,48 +1,32 @@ module github.com/deso-protocol/core -go 1.14 +go 1.18 require ( github.com/DataDog/datadog-go v4.5.0+incompatible - github.com/DataDog/zstd v1.4.8 // indirect - github.com/Microsoft/go-winio v0.4.16 // indirect github.com/NVIDIA/sortedmap v0.0.0-20210902154213-c8c741ed94c5 - github.com/brianvoe/gofakeit v3.18.0+incompatible // indirect + github.com/brianvoe/gofakeit v3.18.0+incompatible github.com/btcsuite/btcd v0.21.0-beta github.com/btcsuite/btcutil v1.0.2 - github.com/bwesterb/go-ristretto v1.2.0 github.com/bxcodec/faker v2.0.1+incompatible github.com/cloudflare/circl v1.1.0 github.com/davecgh/go-spew v1.1.1 + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 github.com/decred/dcrd/lru v1.1.1 github.com/deso-protocol/go-deadlock v1.0.0 github.com/deso-protocol/go-merkle-tree v1.0.0 github.com/dgraph-io/badger/v3 v3.2103.0 - github.com/dgraph-io/ristretto v0.1.0 github.com/ethereum/go-ethereum v1.9.25 - github.com/fatih/color v1.13.0 // indirect + github.com/fatih/color v1.13.0 github.com/gernest/mention v2.0.0+incompatible github.com/go-pg/pg/v10 v10.10.0 - github.com/gobuffalo/packr v1.30.1 github.com/golang/glog v1.0.0 - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/golang/snappy v0.0.3 // indirect - github.com/google/flatbuffers v2.0.0+incompatible // indirect - github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5 // indirect - github.com/google/uuid v1.2.0 // indirect github.com/holiman/uint256 v1.1.1 - github.com/kr/text v0.2.0 // indirect github.com/mitchellh/go-homedir v1.1.0 github.com/oleiade/lane v1.0.1 - github.com/onsi/ginkgo v1.15.0 // indirect - github.com/onsi/gomega v1.10.5 // indirect - github.com/pelletier/go-toml v1.7.0 // indirect - github.com/philhofer/fwd v1.1.1 // indirect github.com/pkg/errors v0.9.1 github.com/pmezard/go-difflib v1.0.0 github.com/robinjoseph08/go-pg-migrations/v3 v3.0.0 - github.com/sasha-s/go-deadlock v0.3.1 github.com/shibukawa/configdir v0.0.0-20170330084843-e180dbdc8da0 github.com/spf13/cobra v1.1.3 github.com/spf13/pflag v1.0.5 @@ -50,14 +34,78 @@ require ( github.com/stretchr/testify v1.7.0 github.com/tyler-smith/go-bip39 v1.0.2 github.com/unrolled/secure v1.0.8 - go.opencensus.io v0.23.0 // indirect golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c + gopkg.in/DataDog/dd-trace-go.v1 v1.29.0 +) + +require ( + github.com/DataDog/zstd v1.4.8 // indirect + github.com/Microsoft/go-winio v0.4.16 // indirect + github.com/NVIDIA/cstruct v0.0.0-20210817223100-441a06a021c8 // indirect + github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect + github.com/bwesterb/go-ristretto v1.2.0 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.1.1 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect + github.com/dgraph-io/ristretto v0.1.0 // indirect + github.com/dustin/go-humanize v1.0.0 // indirect + github.com/fsnotify/fsnotify v1.4.9 // indirect + github.com/git-chglog/git-chglog v0.0.0-20200414013904-db796966b373 // indirect + github.com/go-pg/zerochecker v0.2.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/snappy v0.0.3 // indirect + github.com/google/flatbuffers v2.0.0+incompatible // indirect + github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5 // indirect + github.com/google/uuid v1.2.0 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/imdario/mergo v0.3.8 // indirect + github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/magiconair/properties v1.8.1 // indirect + github.com/mattn/go-colorable v0.1.9 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mattn/goveralls v0.0.6 // indirect + github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b // indirect + github.com/mitchellh/mapstructure v1.1.2 // indirect + github.com/onsi/ginkgo v1.15.0 // indirect + github.com/onsi/gomega v1.10.5 // indirect + github.com/pelletier/go-toml v1.7.0 // indirect + github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 // indirect + github.com/philhofer/fwd v1.1.1 // indirect + github.com/russross/blackfriday/v2 v2.0.1 // indirect + github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect + github.com/spf13/afero v1.1.2 // indirect + github.com/spf13/cast v1.3.0 // indirect + github.com/spf13/jwalterweatherman v1.0.0 // indirect + github.com/subosito/gotenv v1.2.0 // indirect + github.com/tinylib/msgp v1.1.2 // indirect + github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect + github.com/tsuyoshiwada/go-gitcmd v0.0.0-20180205145712-5f1f5f9475df // indirect + github.com/urfave/cli v1.22.1 // indirect + github.com/vmihailenco/bufpool v0.1.11 // indirect + github.com/vmihailenco/msgpack/v5 v5.3.1 // indirect + github.com/vmihailenco/tagparser v0.1.2 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + go.opencensus.io v0.23.0 // indirect golang.org/x/mod v0.4.2 // indirect golang.org/x/net v0.0.0-20210614182718-04defd469f4e // indirect - golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c + golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac // indirect + golang.org/x/text v0.3.6 // indirect golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 // indirect golang.org/x/tools v0.1.0 // indirect - gopkg.in/DataDog/dd-trace-go.v1 v1.29.0 + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + google.golang.org/protobuf v1.26.0 // indirect + gopkg.in/AlecAivazis/survey.v1 v1.8.7 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect + gopkg.in/ini.v1 v1.51.0 // indirect + gopkg.in/kyokomi/emoji.v1 v1.5.1 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect + mellium.im/sasl v0.2.1 // indirect ) diff --git a/go.sum b/go.sum index 7b814c22c..0a34a3666 100644 --- a/go.sum +++ b/go.sum @@ -105,7 +105,6 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= @@ -116,6 +115,10 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ= +github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0= +github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs= github.com/decred/dcrd/lru v1.0.0/go.mod h1:mxKOwFd7lFjN2GZYsiz/ecgqR6kkYAl+0pz0tEMk218= github.com/decred/dcrd/lru v1.1.1 h1:kWFDaW0OWx6AD6Ki342c+JPmHbiVdE6rK81pT3fuo/Y= github.com/decred/dcrd/lru v1.1.1/go.mod h1:mxKOwFd7lFjN2GZYsiz/ecgqR6kkYAl+0pz0tEMk218= @@ -146,7 +149,6 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/ethereum/go-ethereum v1.9.25 h1:mMiw/zOOtCLdGLWfcekua0qPrJTe7FVIiHJ4IKNTfR0= github.com/ethereum/go-ethereum v1.9.25/go.mod h1:vMkFiYLHI4tgPw4k2j4MHKoovchFE8plZ0M9VMk4/oM= github.com/fatih/color v1.3.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= @@ -172,14 +174,6 @@ github.com/go-pg/zerochecker v0.2.0 h1:pp7f72c3DobMWOb2ErtZsnrPaSvHd2W4o9//8HtF4 github.com/go-pg/zerochecker v0.2.0/go.mod h1:NJZ4wKL0NmTtz0GKCoJ8kym6Xn/EQzXRl2OnAe7MmDo= github.com/go-sourcemap/sourcemap v2.1.2+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gobuffalo/envy v1.7.0 h1:GlXgaiBkmrYMHco6t4j7SacKO4XUjvh5pwXh0f4uxXU= -github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= -github.com/gobuffalo/logger v1.0.0/go.mod h1:2zbswyIUa45I+c+FLXuWl9zSWEiVuthsk8ze5s8JvPs= -github.com/gobuffalo/packd v0.3.0 h1:eMwymTkA1uXsqxS0Tpoop3Lc0u3kTfiMBE6nKtQU4g4= -github.com/gobuffalo/packd v0.3.0/go.mod h1:zC7QkmNkYVGKPw4tHpBQ+ml7W/3tIebgeo1b36chA3Q= -github.com/gobuffalo/packr v1.30.1 h1:hu1fuVR3fXEZR7rXNW3h8rqSML8EVAf6KNm0NKO/wKg= -github.com/gobuffalo/packr v1.30.1/go.mod h1:ljMyFO2EcrnzsHsN99cvbq055Y9OhRrIaviy289eRuk= -github.com/gobuffalo/packr/v2 v2.5.1/go.mod h1:8f9c96ITobJlPzI44jj+4tHnEKNt0xXWSVlXRN9X1Iw= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= @@ -292,8 +286,6 @@ github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJS github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= @@ -303,7 +295,6 @@ github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfV github.com/julienschmidt/httprouter v1.1.1-0.20170430222011-975b5c4c7c21/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/karalabe/usb v0.0.0-20190919080040-51dc0efba356/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU= -github.com/karrick/godirwalk v1.10.12/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= @@ -311,7 +302,6 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= @@ -327,7 +317,6 @@ github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzR github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.0/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -335,7 +324,6 @@ github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HN github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.5-0.20180830101745-3fb116b82035/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= @@ -419,18 +407,13 @@ github.com/rjeczalik/notify v0.9.1/go.mod h1:rKwnCoCGeuQnwBtTSPL9Dad03Vh2n40ePRr github.com/robinjoseph08/go-pg-migrations/v3 v3.0.0 h1:0/H63lDsoNYVn5YmP6VLDEnnKkoVYiHx7udTWCK4BUI= github.com/robinjoseph08/go-pg-migrations/v3 v3.0.0/go.mod h1:nOkSFfwwDUBFnDDQqMRC2p4PDE7GZb/KSVqILVB3bmw= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= -github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.3.0 h1:RR9dF3JtopPvtkroDZuVD7qquD0bnHlKSqaQhgwt8yk= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rs/cors v0.0.0-20160617231935-a62a804a8a00/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/xhandler v0.0.0-20160618193221-ed27b6fd6521/go.mod h1:RvLn4FgxWubrpZHtQLnOf6EwhN2hEMusxZOhcW9H3UQ= -github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0= -github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/shibukawa/configdir v0.0.0-20170330084843-e180dbdc8da0 h1:Xuk8ma/ibJ1fOy4Ee11vHhUFHQNpHhrBneOCNHVXS5w= github.com/shibukawa/configdir v0.0.0-20170330084843-e180dbdc8da0/go.mod h1:7AwjWCpdPhkSmNAgUv5C7EJ4AbmjEB3r047r3DXWu3Y= @@ -439,7 +422,6 @@ github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5I github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= @@ -534,7 +516,6 @@ golang.org/x/crypto v0.0.0-20190123085648-057139ce5d2b/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190909091759-094676da4a83/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -627,10 +608,8 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190515120540-06a5c4944438/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -651,13 +630,11 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -682,7 +659,6 @@ golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190624180213-70d37148ca0c/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= diff --git a/integration_testing/blocksync_test.go b/integration_testing/blocksync_test.go index c05fe427a..af1bc3637 100644 --- a/integration_testing/blocksync_test.go +++ b/integration_testing/blocksync_test.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "fmt" diff --git a/integration_testing/connection_bridge.go b/integration_testing/connection_bridge.go index 2360293c8..0cf247af3 100644 --- a/integration_testing/connection_bridge.go +++ b/integration_testing/connection_bridge.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "fmt" diff --git a/integration_testing/hypersync_test.go b/integration_testing/hypersync_test.go index acf8685f1..fc0b9bd87 100644 --- a/integration_testing/hypersync_test.go +++ b/integration_testing/hypersync_test.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "fmt" diff --git a/integration_testing/migrations_test.go b/integration_testing/migrations_test.go index 67e1a641b..b0a692b52 100644 --- a/integration_testing/migrations_test.go +++ b/integration_testing/migrations_test.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "fmt" diff --git a/integration_testing/mining_test.go b/integration_testing/mining_test.go index 13732d687..49a23333c 100644 --- a/integration_testing/mining_test.go +++ b/integration_testing/mining_test.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "github.com/deso-protocol/core/cmd" diff --git a/integration_testing/rollback_test.go b/integration_testing/rollback_test.go index 6d13d71ca..154a392c4 100644 --- a/integration_testing/rollback_test.go +++ b/integration_testing/rollback_test.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "github.com/deso-protocol/core/cmd" diff --git a/integration_testing/tools.go b/integration_testing/tools.go index 5d770039f..b393c0e44 100644 --- a/integration_testing/tools.go +++ b/integration_testing/tools.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "encoding/hex" diff --git a/integration_testing/txindex_test.go b/integration_testing/txindex_test.go index a5fb16a27..b01f7d3b2 100644 --- a/integration_testing/txindex_test.go +++ b/integration_testing/txindex_test.go @@ -1,4 +1,4 @@ -package testing +package integration_testing import ( "fmt" diff --git a/lib/block_view.go b/lib/block_view.go index 4785794d2..147840688 100644 --- a/lib/block_view.go +++ b/lib/block_view.go @@ -676,9 +676,10 @@ func (bav *UtxoView) _disconnectBasicTransfer(currentTxn *MsgDeSoTxn, txnHash *B if len(utxoOpsForTxn) > 0 && utxoOpsForTxn[operationIndex].Type == OperationTypeSpendingLimitAccounting { currentOperation := utxoOpsForTxn[operationIndex] // Get the current derived key entry - derivedPkBytes, isDerived := IsDerivedSignature(currentTxn) - if !isDerived { - return fmt.Errorf("_disconnectBasicTransfer: Found Spending Limit Accounting op with non-derived key signature") + derivedPkBytes, isDerived, err := IsDerivedSignature(currentTxn) + if !isDerived || err != nil { + return fmt.Errorf("_disconnectBasicTransfer: Found Spending Limit Accounting op with non-derived "+ + "key signature or got an error %v", err) } if err := IsByteArrayValidPublicKey(derivedPkBytes); err != nil { return fmt.Errorf( @@ -1114,6 +1115,9 @@ func _isEntryImmatureBlockReward(utxoEntry *UtxoEntry, blockHeight uint32, param } func (bav *UtxoView) _verifySignature(txn *MsgDeSoTxn, blockHeight uint32) (_derivedPkBytes []byte, _err error) { + if txn.Signature.Sign == nil { + return nil, fmt.Errorf("_verifySignature: Transaction signature is empty") + } // Compute a hash of the transaction. txBytes, err := txn.ToBytes(true /*preSignature*/) if err != nil { @@ -1122,13 +1126,20 @@ func (bav *UtxoView) _verifySignature(txn *MsgDeSoTxn, blockHeight uint32) (_der txHash := Sha256DoubleHash(txBytes) // Look for the derived key in transaction ExtraData and validate it. For transactions - // signed using a derived key, the derived public key is passed to ExtraData. + // signed using a derived key, the derived public key is passed in ExtraData. Alternatively, + // if the signature uses DeSo-DER encoding, meaning we can recover the derived public key from + // the signature. var derivedPk *btcec.PublicKey - derivedPkBytes, isDerived := IsDerivedSignature(txn) + derivedPkBytes, isDerived, err := IsDerivedSignature(txn) + if err != nil { + return nil, errors.Wrapf(err, "_verifySignature: Something went wrong while checking for "+ + "derived key signature") + } + // If we got a derived key then try parsing it. if isDerived { derivedPk, err = btcec.ParsePubKey(derivedPkBytes, btcec.S256()) if err != nil { - return nil, RuleErrorDerivedKeyInvalidExtraData + return nil, fmt.Errorf("%v %v", RuleErrorDerivedKeyInvalidExtraData, RuleErrorDerivedKeyInvalidRecoveryId) } } @@ -1139,49 +1150,21 @@ func (bav *UtxoView) _verifySignature(txn *MsgDeSoTxn, blockHeight uint32) (_der return nil, errors.Wrapf(err, "_verifySignature: Problem parsing owner public key: ") } - // If no derived key is present in ExtraData, we check if transaction was signed by the owner. - // If derived key is present in ExtraData, we check if transaction was signed by the derived key. + // If no derived key was used, we check if transaction was signed by the owner. + // If derived key *was* used, we check if transaction was signed by the derived key. if derivedPk == nil { // Verify that the transaction is signed by the specified key. if txn.Signature.Verify(txHash[:], ownerPk) { return nil, nil } } else { - // Look for a derived key entry in UtxoView and DB, check to make sure it exists - // and is not isDeleted. - derivedKeyEntry := bav.GetDerivedKeyMappingForOwner(ownerPkBytes, derivedPkBytes) - if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { - return nil, errors.Wrapf(RuleErrorDerivedKeyNotAuthorized, - "Derived key mapping for owner not found: Owner: %v, "+ - "Derived key: %v", PkToStringMainnet(ownerPkBytes), - PkToStringMainnet(derivedPkBytes)) - } - - // Sanity-check that transaction public keys line up with looked-up derivedKeyEntry public keys. - if !reflect.DeepEqual(ownerPkBytes, derivedKeyEntry.OwnerPublicKey[:]) || - !reflect.DeepEqual(derivedPkBytes, derivedKeyEntry.DerivedPublicKey[:]) { - return nil, errors.Wrapf(RuleErrorDerivedKeyNotAuthorized, "DB entry (OwnerPubKey, "+ - "DerivedPubKey) = (%v, %v) does not match keys used to "+ - "look up the entry: (%v, %v). This should never happen.", - PkToStringMainnet(derivedKeyEntry.OwnerPublicKey[:]), - PkToStringMainnet(derivedKeyEntry.DerivedPublicKey[:]), - PkToStringMainnet(ownerPkBytes), - PkToStringMainnet(derivedPkBytes)) - } - - // At this point, we know the derivedKeyEntry that we have is matching. - // We check if the derived key hasn't been de-authorized or hasn't expired. - if derivedKeyEntry.OperationType != AuthorizeDerivedKeyOperationValid || - derivedKeyEntry.ExpirationBlock <= uint64(blockHeight) { - return nil, errors.Wrapf(RuleErrorDerivedKeyNotAuthorized, "Derived key EITHER "+ - "deactivated or block height expired. Deactivation status: %v, "+ - "Expiration block height: %v, Current block height: %v", - derivedKeyEntry.OperationType, - derivedKeyEntry.ExpirationBlock, - blockHeight) - } - - // All checks passed so we try to verify the signature. + // Look for a derived key entry in UtxoView and DB, check to make sure it exists and is not isDeleted. + if err := bav.ValidateDerivedKey(ownerPkBytes, derivedPkBytes, uint64(blockHeight)); err != nil { + return nil, err + } + + // All checks passed so we try to verify the signature. This step can be avoided for DeSo-DER signatures + // but we run it redundantly just in case. if txn.Signature.Verify(txHash[:], derivedPk) { return derivedPk.SerializeCompressed(), nil } @@ -1192,12 +1175,75 @@ func (bav *UtxoView) _verifySignature(txn *MsgDeSoTxn, blockHeight uint32) (_der return nil, RuleErrorInvalidTransactionSignature } -func IsDerivedSignature(txn *MsgDeSoTxn) (_derivedPkBytes []byte, _isDerived bool) { - if txn.ExtraData == nil { - return nil, false +// ValidateDerivedKey checks if a derived key is authorized and valid. +func (bav *UtxoView) ValidateDerivedKey(ownerPkBytes []byte, derivedPkBytes []byte, blockHeight uint64) error { + derivedKeyEntry := bav.GetDerivedKeyMappingForOwner(ownerPkBytes, derivedPkBytes) + if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { + return errors.Wrapf(RuleErrorDerivedKeyNotAuthorized, "Derived key mapping for owner not found: Owner: %v, "+ + "Derived key: %v", PkToStringBoth(ownerPkBytes), PkToStringBoth(derivedPkBytes)) + } + + // Sanity-check that transaction public keys line up with looked-up derivedKeyEntry public keys. + if !reflect.DeepEqual(ownerPkBytes, derivedKeyEntry.OwnerPublicKey[:]) || + !reflect.DeepEqual(derivedPkBytes, derivedKeyEntry.DerivedPublicKey[:]) { + return errors.Wrapf(RuleErrorDerivedKeyNotAuthorized, "DB entry (OwnerPubKey, DerivedPubKey) = (%v, %v) does not "+ + "match keys used to look up the entry: (%v, %v). This should never happen.", + PkToStringBoth(derivedKeyEntry.OwnerPublicKey[:]), PkToStringBoth(derivedKeyEntry.DerivedPublicKey[:]), + PkToStringBoth(ownerPkBytes), PkToStringBoth(derivedPkBytes)) + } + + // At this point, we know the derivedKeyEntry that we have is matching. + // We check if the derived key hasn't been de-authorized or hasn't expired. + if derivedKeyEntry.OperationType != AuthorizeDerivedKeyOperationValid || + derivedKeyEntry.ExpirationBlock <= blockHeight { + return errors.Wrapf(RuleErrorDerivedKeyNotAuthorized, "Derived key EITHER deactivated or block height expired. "+ + "Deactivation status: %v, Expiration block height: %v, Current block height: %v", + derivedKeyEntry.OperationType, derivedKeyEntry.ExpirationBlock, blockHeight) + } + + // If we get to this point, we got a valid derived key. + return nil +} + +// IsDerivedSignature checks if a transaction was signed using a derived key. If so, it will recover the derived key used +// to sign the transaction. There are two possible ways to serialize transaction's ECDSA signature for a derived key. +// Either to use the DER encoding and place the derived public key in transaction's ExtraData, or to use DeSo-DER signature +// encoding and pass a special recovery ID into the signature's bytes. However, both encodings can't be used at the same time. +func IsDerivedSignature(txn *MsgDeSoTxn) (_derivedPkBytes []byte, _isDerived bool, _err error) { + // If transaction contains ExtraData, then check if the DerivedPublicKey was passed along. + if txn.ExtraData != nil { + derivedPkBytes, isDerived := txn.ExtraData[DerivedPublicKey] + // Make sure both encodings aren't used concurrently. + if isDerived && txn.Signature.IsRecoverable { + return nil, false, errors.Wrapf(RuleErrorDerivedKeyHasBothExtraDataAndRecoveryId, + "IsDerivedSignature: transaction signed with a derived key can either store public key in "+ + "ExtraData or use the DeSo-DER recoverable signature encoding but not BOTH") + } + if isDerived { + return derivedPkBytes, isDerived, nil + } + } + + // If transaction doesn't contain a derived key in ExtraData, then check if it contains the recovery ID. + if txn.Signature.IsRecoverable { + // Assemble the transaction hash; we need it in order to recover the public key. + txBytes, err := txn.ToBytes(true /*preSignature*/) + if err != nil { + return nil, false, errors.Wrapf(err, "IsDerivedSignature: Problem "+ + "serializing txn without signature: ") + } + txHash := Sha256DoubleHash(txBytes)[:] + + // Recover the public key from the signature. + derivedPublicKey, err := txn.Signature.RecoverPublicKey(txHash) + if err != nil { + return nil, false, errors.Wrapf(err, "IsDerivedSignature: Problem recovering "+ + "public key from signature") + } + return derivedPublicKey.SerializeCompressed(), true, nil } - derivedPkBytes, isDerived := txn.ExtraData[DerivedPublicKey] - return derivedPkBytes, isDerived + return nil, false, nil + } func (bav *UtxoView) _connectBasicTransfer( @@ -1467,7 +1513,7 @@ func (bav *UtxoView) _connectBasicTransfer( // also not allowed to have any inputs because they by construction cannot authorize // the spending of any inputs. if txn.TxnMeta.GetTxnType() == TxnTypeBlockReward { - if len(txn.PublicKey) != 0 || txn.Signature != nil { + if len(txn.PublicKey) != 0 || txn.Signature.Sign != nil { return 0, 0, nil, RuleErrorBlockRewardTxnNotAllowedToHaveSignature } } else { @@ -1478,8 +1524,11 @@ func (bav *UtxoView) _connectBasicTransfer( } if blockHeight >= bav.Params.ForkHeights.DerivedKeyTrackSpendingLimitsBlockHeight { - if derivedPkBytes, isDerivedSig := IsDerivedSignature(txn); isDerivedSig { - var err error + if derivedPkBytes, isDerivedSig, err := IsDerivedSignature(txn); isDerivedSig { + if err != nil { + return 0, 0, nil, errors.Wrapf(err, "_connectBasicTransfer: "+ + "It looks like this transaction was signed with a derived key, but the signature is malformed: ") + } // Now we check the transaction limits on the derived key utxoOpsForTxn, err = bav._checkDerivedKeySpendingLimit(txn, derivedPkBytes, totalInput, utxoOpsForTxn) if err != nil { @@ -1505,6 +1554,15 @@ func (bav *UtxoView) _checkDerivedKeySpendingLimit( // Create a copy of the prevDerivedKeyEntry so we can safely modify the new entry derivedKeyEntry := *prevDerivedKeyEntry.Copy() + // Make sure spending limit is not nil. + if derivedKeyEntry.TransactionSpendingLimitTracker == nil { + return utxoOpsForTxn, errors.Wrap(RuleErrorDerivedKeyNotAuthorized, + "_checkDerivedKeySpendingLimit: TransactionSpendingLimitTracker is nil") + } + // If the derived key is an unlimited key, we don't need to check spending limits whatsoever. + if derivedKeyEntry.TransactionSpendingLimitTracker.IsUnlimited { + return utxoOpsForTxn, nil + } // Spend amount is total inputs minus sum of AddUtxo type operations // going to transactor (i.e. change). @@ -1533,11 +1591,6 @@ func (bav *UtxoView) _checkDerivedKeySpendingLimit( } } - if derivedKeyEntry.TransactionSpendingLimitTracker == nil { - return utxoOpsForTxn, errors.Wrap(RuleErrorDerivedKeyNotAuthorized, - "_checkDerivedKeySpendingLimit: TransactionSpendingLimitTracker is nil") - } - // If the spend amount exceeds the Global DESO limit, this derived key is not authorized to spend this DESO. if spendAmount > derivedKeyEntry.TransactionSpendingLimitTracker.GlobalDESOLimit { return utxoOpsForTxn, errors.Wrapf(RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit, diff --git a/lib/block_view_bitcoin.go b/lib/block_view_bitcoin.go index 6fb57884e..428cb5a06 100644 --- a/lib/block_view_bitcoin.go +++ b/lib/block_view_bitcoin.go @@ -167,7 +167,7 @@ func (bav *UtxoView) _connectBitcoinExchange( if len(txn.PublicKey) != 0 { return 0, 0, nil, RuleErrorBitcoinExchangeShouldNotHavePublicKey } - if txn.Signature != nil { + if txn.Signature.Sign != nil { return 0, 0, nil, RuleErrorBitcoinExchangeShouldNotHaveSignature } diff --git a/lib/block_view_derived_key.go b/lib/block_view_derived_key.go index 7aba46d91..060e06b49 100644 --- a/lib/block_view_derived_key.go +++ b/lib/block_view_derived_key.go @@ -6,6 +6,7 @@ import ( "github.com/btcsuite/btcd/btcec" "github.com/pkg/errors" "reflect" + "strconv" ) // _verifyAccessSignature verifies if the accessSignature is correct. Valid @@ -49,12 +50,47 @@ func _verifyAccessSignatureWithTransactionSpendingLimit(ownerPublicKey []byte, d if len(transactionSpendingLimitBytes) == 0 { return fmt.Errorf("_verifyAccessSignatureWithTransactionSpendingLimit: Transaction Spending limit object is required") } + transactionSpendingLimit := &TransactionSpendingLimit{} + rr := bytes.NewReader(transactionSpendingLimitBytes) + // This error is fine because transaction should fail anyway if spending limit cannot be decoded. + if err := transactionSpendingLimit.FromBytes(blockHeight, rr); err != nil { + return errors.Wrapf(err, "Error decoding transaction spending limit from extra data") + } - // Compute a hash of derivedPublicKey+expirationBlock. + // Check if signature matches Access Bytes Encoding 1.0 + // Assemble standard access signature of derivedPublicKey || expirationBlock || transactionSpendingLimits expirationBlockBytes := EncodeUint64(expirationBlock) accessBytes := append(derivedPublicKey, expirationBlockBytes[:]...) accessBytes = append(accessBytes, transactionSpendingLimitBytes[:]...) - return _verifyBytesSignature(ownerPublicKey, accessBytes, accessSignature, uint32(blockHeight), params) + verifySignature := _verifyBytesSignature(ownerPublicKey, accessBytes, accessSignature, uint32(blockHeight), params) + if verifySignature == nil { + return nil + } + + // Check if signature matches Access Bytes Encoding 2.0 + // Assemble access bytes that use Metamask-compatible strings. + accessBytes = AssembleAccessBytesWithMetamaskStrings(derivedPublicKey, expirationBlock, transactionSpendingLimit, params) + verifySignatureNew := _verifyBytesSignature(ownerPublicKey, accessBytes, accessSignature, uint32(blockHeight), params) + if verifySignatureNew != nil { + return fmt.Errorf("Failed to verify signature under all possible encodings. Access Bytes Encoding 1.0 "+ + "Error: %v. Access Bytes Encoding 2.0 Error: %v", verifySignature, verifySignatureNew) + } + return nil +} + +// AssembleAccessBytesWithMetamaskStrings constructs Access Bytes Encoding 2.0. It encodes the derived key access bytes into a +// Metamask-compatible string. There are three components of a derived key that comprise the access bytes, it is the +// derived public key, expiration block, and transaction spending limit. We encode these three into a single string that +// is unique, displays nicely, and can be signed with MetaMask. This is intended to be an equivalent alternative to the +// standard Access Bytes Encoding 1.0. +func AssembleAccessBytesWithMetamaskStrings(derivedPublicKey []byte, expirationBlock uint64, + transactionSpendingLimit *TransactionSpendingLimit, params *DeSoParams) []byte { + + encodingString := "DECENTRALIZED SOCIAL\n\n" + encodingString += "Your derived public key: " + Base58CheckEncode(derivedPublicKey, false, params) + "\n\n" + encodingString += "The expiration block of your key: " + strconv.FormatUint(expirationBlock, 10) + "\n\n" + encodingString += transactionSpendingLimit.ToMetamaskString(params) + return []byte(encodingString) } func (bav *UtxoView) _connectAuthorizeDerivedKey( @@ -156,60 +192,77 @@ func (bav *UtxoView) _connectAuthorizeDerivedKey( if memoBytes, exists := txn.ExtraData[DerivedPublicKey]; exists { memo = memoBytes } - // If the transaction spending limit key exists, parse it and merge it into the existing transaction - // spending limit tracker exists := false + // If the transaction spending limit key exists, parse it and merge it into the existing transaction + // spending limit tracker. if transactionSpendingLimitBytes, exists = txn.ExtraData[TransactionSpendingLimitKey]; exists { transactionSpendingLimit = &TransactionSpendingLimit{} rr := bytes.NewReader(transactionSpendingLimitBytes) - if err := transactionSpendingLimit.FromBytes(rr); err != nil { + if err := transactionSpendingLimit.FromBytes(uint64(blockHeight), rr); err != nil { return 0, 0, nil, errors.Wrapf( err, "Error decoding transaction spending limit from extra data") } - // TODO: how can we serialize this in a way that we don't have to specify it everytime - // Always overwrite the global DESO limit... - newTransactionSpendingLimit.GlobalDESOLimit = transactionSpendingLimit.GlobalDESOLimit - // Iterate over transaction types and update the counts. Delete keys if the transaction count is zero. - for txnType, transactionCount := range transactionSpendingLimit.TransactionCountLimitMap { - if transactionCount == 0 { - delete(newTransactionSpendingLimit.TransactionCountLimitMap, txnType) - } else { - newTransactionSpendingLimit.TransactionCountLimitMap[txnType] = transactionCount - } + + isUnlimited, err := bav.CheckIfValidUnlimitedSpendingLimit(transactionSpendingLimit, blockHeight) + if err != nil { + return 0, 0, nil, errors.Wrapf(err, + "_connectAuthorizeDerivedKey: invalid unlimited spending limit") } - for ccLimitKey, transactionCount := range transactionSpendingLimit.CreatorCoinOperationLimitMap { - if transactionCount == 0 { - delete(newTransactionSpendingLimit.CreatorCoinOperationLimitMap, ccLimitKey) - } else { - newTransactionSpendingLimit.CreatorCoinOperationLimitMap[ccLimitKey] = transactionCount + + // A valid unlimited spending limit object only has the IsUnlimited field set. + newTransactionSpendingLimit.IsUnlimited = isUnlimited + if !newTransactionSpendingLimit.IsUnlimited { + + // TODO: how can we serialize this in a way that we don't have to specify it everytime + // Always overwrite the global DESO limit... + newTransactionSpendingLimit.GlobalDESOLimit = transactionSpendingLimit.GlobalDESOLimit + // Iterate over transaction types and update the counts. Delete keys if the transaction count is zero. + for txnType, transactionCount := range transactionSpendingLimit.TransactionCountLimitMap { + if transactionCount == 0 { + delete(newTransactionSpendingLimit.TransactionCountLimitMap, txnType) + } else { + newTransactionSpendingLimit.TransactionCountLimitMap[txnType] = transactionCount + } } - } - for daoCoinLimitKey, transactionCount := range transactionSpendingLimit.DAOCoinOperationLimitMap { - if transactionCount == 0 { - delete(newTransactionSpendingLimit.DAOCoinOperationLimitMap, daoCoinLimitKey) - } else { - newTransactionSpendingLimit.DAOCoinOperationLimitMap[daoCoinLimitKey] = transactionCount + for ccLimitKey, transactionCount := range transactionSpendingLimit.CreatorCoinOperationLimitMap { + if transactionCount == 0 { + delete(newTransactionSpendingLimit.CreatorCoinOperationLimitMap, ccLimitKey) + } else { + newTransactionSpendingLimit.CreatorCoinOperationLimitMap[ccLimitKey] = transactionCount + } } - } - for nftLimitKey, transactionCount := range transactionSpendingLimit.NFTOperationLimitMap { - if transactionCount == 0 { - delete(newTransactionSpendingLimit.NFTOperationLimitMap, nftLimitKey) - } else { - newTransactionSpendingLimit.NFTOperationLimitMap[nftLimitKey] = transactionCount + for daoCoinLimitKey, transactionCount := range transactionSpendingLimit.DAOCoinOperationLimitMap { + if transactionCount == 0 { + delete(newTransactionSpendingLimit.DAOCoinOperationLimitMap, daoCoinLimitKey) + } else { + newTransactionSpendingLimit.DAOCoinOperationLimitMap[daoCoinLimitKey] = transactionCount + } } - } - for daoCoinLimitOrderLimitKey, transactionCount := range transactionSpendingLimit.DAOCoinLimitOrderLimitMap { - if transactionCount == 0 { - delete(newTransactionSpendingLimit.DAOCoinLimitOrderLimitMap, daoCoinLimitOrderLimitKey) - } else { - newTransactionSpendingLimit.DAOCoinLimitOrderLimitMap[daoCoinLimitOrderLimitKey] = transactionCount + for nftLimitKey, transactionCount := range transactionSpendingLimit.NFTOperationLimitMap { + if transactionCount == 0 { + delete(newTransactionSpendingLimit.NFTOperationLimitMap, nftLimitKey) + } else { + newTransactionSpendingLimit.NFTOperationLimitMap[nftLimitKey] = transactionCount + } + } + for daoCoinLimitOrderLimitKey, transactionCount := range transactionSpendingLimit.DAOCoinLimitOrderLimitMap { + if transactionCount == 0 { + delete(newTransactionSpendingLimit.DAOCoinLimitOrderLimitMap, daoCoinLimitOrderLimitKey) + } else { + newTransactionSpendingLimit.DAOCoinLimitOrderLimitMap[daoCoinLimitOrderLimitKey] = transactionCount + } } } } } // We skip verifying the access signature if the transaction is signed by the owner. - if _, isDerived := IsDerivedSignature(txn); isDerived { - if err := _verifyAccessSignatureWithTransactionSpendingLimit( + _, isDerived, err := IsDerivedSignature(txn) + if err != nil { + return 0, 0, nil, errors.Wrapf(err, "_connectAuthorizeDerivedKey: "+ + "It looks like this transaction was signed with a derived key, but the signature is malformed: ") + } + if isDerived { + if err = _verifyAccessSignatureWithTransactionSpendingLimit( ownerPublicKey, derivedPublicKey, txMeta.ExpirationBlock, @@ -217,6 +270,7 @@ func (bav *UtxoView) _connectAuthorizeDerivedKey( txMeta.AccessSignature, uint64(blockHeight), bav.Params); err != nil { + return 0, 0, nil, errors.Wrap( RuleErrorAuthorizeDerivedKeyAccessSignatureNotValid, err.Error()) } diff --git a/lib/block_view_derived_key_test.go b/lib/block_view_derived_key_test.go index 765fbfb44..fc3b5088f 100644 --- a/lib/block_view_derived_key_test.go +++ b/lib/block_view_derived_key_test.go @@ -1,6 +1,7 @@ package lib import ( + "bytes" "encoding/hex" "encoding/json" "fmt" @@ -9,6 +10,7 @@ import ( "github.com/holiman/uint256" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "math/rand" "testing" "time" ) @@ -18,6 +20,93 @@ const ( BasicTransferAmount = "AMOUNT" ) +// We create this inline function for attempting a basic transfer. +// This helps us test that the DeSoChain recognizes a derived key. +func _derivedKeyBasicTransfer(t *testing.T, db *badger.DB, chain *Blockchain, params *DeSoParams, + senderPk []byte, recipientPk []byte, signerPriv string, utxoView *UtxoView, + mempool *DeSoMempool, isSignerSender bool) ([]*UtxoOperation, *MsgDeSoTxn, error) { + + require := require.New(t) + _ = require + + txn := &MsgDeSoTxn{ + // The inputs will be set below. + TxInputs: []*DeSoInput{}, + TxOutputs: []*DeSoOutput{ + { + PublicKey: recipientPk, + AmountNanos: 1, + }, + }, + PublicKey: senderPk, + TxnMeta: &BasicTransferMetadata{}, + ExtraData: make(map[string][]byte), + } + + totalInput, spendAmount, changeAmount, fees, err := + chain.AddInputsAndChangeToTransaction(txn, 10, mempool) + require.NoError(err) + require.Equal(totalInput, spendAmount+changeAmount+fees) + require.Greater(totalInput, uint64(0)) + + if isSignerSender { + // Sign the transaction with the provided derived key + _signTxn(t, txn, signerPriv) + } else { + // Sign the transaction with the provided derived key + _signTxnWithDerivedKey(t, txn, signerPriv) + } + + // Get utxoView if it doesn't exist + if mempool != nil { + utxoView, err = mempool.GetAugmentedUniversalView() + require.NoError(err) + } + if utxoView == nil { + utxoView, err = NewUtxoView(db, params, chain.postgres, chain.snapshot) + require.NoError(err) + } + + txHash := txn.Hash() + blockHeight := chain.blockTip().Height + 1 + utxoOps, _, _, _, err := + utxoView.ConnectTransaction(txn, txHash, getTxnSize(*txn), blockHeight, + true /*verifySignature*/, false /*ignoreUtxos*/) + return utxoOps, txn, err +} + +// Verify that the balance and expiration block in the db match expectation. +func _derivedKeyVerifyTest(t *testing.T, db *badger.DB, chain *Blockchain, transactionSpendingLimit *TransactionSpendingLimit, + derivedPublicKey []byte, expirationBlockExpected uint64, balanceExpected uint64, + operationTypeExpected AuthorizeDerivedKeyOperationType, mempool *DeSoMempool) { + + require := require.New(t) + _ = require + + senderPkBytes, _, err := Base58CheckDecode(senderPkString) + require.NoError(err) + + // Verify that expiration block was persisted in the db or is in mempool utxoView + var derivedKeyEntry *DerivedKeyEntry + if mempool == nil { + derivedKeyEntry = chain.NewDbAdapter().GetOwnerToDerivedKeyMapping(*NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey)) + } else { + utxoView, err := mempool.GetAugmentedUniversalView() + require.NoError(err) + derivedKeyEntry = utxoView.GetDerivedKeyMappingForOwner(senderPkBytes, derivedPublicKey) + } + // If we removed the derivedKeyEntry from utxoView altogether, it'll be nil. + // To pass the tests, we initialize it to a default struct. + if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { + derivedKeyEntry = &DerivedKeyEntry{*NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey), 0, AuthorizeDerivedKeyOperationValid, nil, transactionSpendingLimit, nil, false} + } + require.Equal(derivedKeyEntry.ExpirationBlock, expirationBlockExpected) + require.Equal(derivedKeyEntry.OperationType, operationTypeExpected) + + // Verify that the balance of recipient is equal to expected balance + require.Equal(_getBalance(t, chain, mempool, recipientPkString), balanceExpected) +} + func _doTxn( testMeta *TestMeta, feeRateNanosPerKB uint64, @@ -26,7 +115,33 @@ func _doTxn( isDerivedTransactor bool, txnType TxnType, txnMeta DeSoTxnMetadata, - extraData map[string]interface{}) ( + extraData map[string]interface{}, + blockHeight uint64) ( + _utxoOps []*UtxoOperation, _txn *MsgDeSoTxn, _height uint32, _err error) { + + return _doTxnWithBlockHeight( + testMeta, + feeRateNanosPerKB, + TransactorPublicKeyBase58Check, + TransactorPrivKeyBase58Check, + isDerivedTransactor, + txnType, + txnMeta, + extraData, + blockHeight, + ) +} + +func _doTxnWithBlockHeight( + testMeta *TestMeta, + feeRateNanosPerKB uint64, + TransactorPublicKeyBase58Check string, + TransactorPrivKeyBase58Check string, + isDerivedTransactor bool, + txnType TxnType, + txnMeta DeSoTxnMetadata, + extraData map[string]interface{}, + encoderBlockHeight uint64) ( _utxoOps []*UtxoOperation, _txn *MsgDeSoTxn, _height uint32, _err error) { assert := assert.New(testMeta.t) require := require.New(testMeta.t) @@ -36,7 +151,7 @@ func _doTxn( transactorPublicKey, _, err := Base58CheckDecode(TransactorPublicKeyBase58Check) require.NoError(err) - utxoView, err := NewUtxoView(testMeta.db, testMeta.params, nil, testMeta.chain.snapshot) + utxoView, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) require.NoError(err) chain := testMeta.chain @@ -61,6 +176,7 @@ func _doTxn( feeRateNanosPerKB, nil, nil) + require.NoError(err) operationType = OperationTypeCreatorCoin case TxnTypeCreatorCoinTransfer: realTxMeta := txnMeta.(*CreatorCoinTransferMetadataa) @@ -73,6 +189,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeCreatorCoinTransfer case TxnTypeDAOCoin: realTxMeta := txnMeta.(*DAOCoinMetadata) @@ -83,6 +200,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeDAOCoin case TxnTypeDAOCoinTransfer: realTxMeta := txnMeta.(*DAOCoinTransferMetadata) @@ -93,6 +211,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeDAOCoinTransfer case TxnTypeUpdateNFT: realTxMeta := txnMeta.(*UpdateNFTMetadata) @@ -114,6 +233,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeUpdateNFT case TxnTypeCreateNFT: realTxMeta := txnMeta.(*CreateNFTMetadata) @@ -152,6 +272,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeCreateNFT case TxnTypeAcceptNFTBid: realTxMeta := txnMeta.(*AcceptNFTBidMetadata) @@ -166,6 +287,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeAcceptNFTBid case TxnTypeAcceptNFTTransfer: realTxMeta := txnMeta.(*AcceptNFTTransferMetadata) @@ -177,6 +299,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeAcceptNFTTransfer case TxnTypeNFTBid: realTxMeta := txnMeta.(*NFTBidMetadata) @@ -189,6 +312,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeNFTBid nftKey := MakeNFTKey(realTxMeta.NFTPostHash, realTxMeta.SerialNumber) nftEntry := utxoView.GetNFTEntryForNFTKey(&nftKey) @@ -207,6 +331,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeNFTTransfer case TxnTypeBurnNFT: realTxMeta := txnMeta.(*BurnNFTMetadata) @@ -218,6 +343,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeBurnNFT case TxnTypeAuthorizeDerivedKey: realTxMeta := txnMeta.(*AuthorizeDerivedKeyMetadata) @@ -233,7 +359,7 @@ func _doTxn( if realTxMeta.OperationType == AuthorizeDerivedKeyOperationNotValid { deleteKey = true } - transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes() + transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes(encoderBlockHeight) require.NoError(err) txn, totalInputMake, changeAmountMake, feesMake, err = chain.CreateAuthorizeDerivedKeyTxn( transactorPublicKey, @@ -249,6 +375,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeAuthorizeDerivedKey case TxnTypeUpdateProfile: realTxMeta := txnMeta.(*UpdateProfileMetadata) @@ -267,6 +394,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeUpdateProfile case TxnTypeSubmitPost: realTxMeta := txnMeta.(*SubmitPostMetadata) @@ -285,6 +413,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeSubmitPost case TxnTypeUpdateGlobalParams: getGlobalParamValFromExtraData := func(key string) int64 { @@ -310,6 +439,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeUpdateGlobalParams case TxnTypeBasicTransfer: @@ -337,6 +467,7 @@ func _doTxn( // depending on what the user requested. totalInputMake, _, changeAmountMake, feesMake, err = chain.AddInputsAndChangeToTransaction( txn, feeRateNanosPerKB, nil) + require.NoError(err) operationType = OperationTypeSpendUtxo case TxnTypeDAOCoinLimitOrder: realTxMeta := txnMeta.(*DAOCoinLimitOrderMetadata) @@ -347,6 +478,7 @@ func _doTxn( nil, nil, ) + require.NoError(err) operationType = OperationTypeDAOCoinLimitOrder default: return nil, nil, 0, fmt.Errorf("Unsupported Txn Type") @@ -376,7 +508,14 @@ func _doTxn( // TODO: generalize? utxoOpExpectation := len(txn.TxInputs) + len(txn.TxOutputs) + 1 if isDerivedTransactor && blockHeight >= testMeta.params.ForkHeights.DerivedKeyTrackSpendingLimitsBlockHeight { - utxoOpExpectation++ + // If we got an unlimited derived key, we will not have an additional spending limit utxoop. + transactorPrivBytes, _, err := Base58CheckDecode(TransactorPrivKeyBase58Check) + _, transactorPub := btcec.PrivKeyFromBytes(btcec.S256(), transactorPrivBytes) + transactorPubBytes := transactorPub.SerializeCompressed() + require.NoError(err) + if !utxoView.GetDerivedKeyMappingForOwner(txn.PublicKey, transactorPubBytes).TransactionSpendingLimitTracker.IsUnlimited { + utxoOpExpectation++ + } } if txnType == TxnTypeBasicTransfer { utxoOpExpectation-- @@ -393,7 +532,7 @@ func _doTxn( require.Equal(operationType, utxoOps[len(utxoOps)-1].Type) } - require.NoError(utxoView.FlushToDb(0)) + require.NoError(utxoView.FlushToDb(encoderBlockHeight)) return utxoOps, txn, blockHeight, nil } @@ -406,18 +545,68 @@ func _doTxnWithTestMeta( IsDerivedTransactor bool, TxnType TxnType, TxnMeta DeSoTxnMetadata, - ExtraData map[string]interface{}) { + ExtraData map[string]interface{}, + encoderBlockHeight uint64) { + + _doTxnWithTestMetaWithBlockHeight( + testMeta, + feeRateNanosPerKB, + TransactorPublicKeyBase58Check, + TransactorPrivateKeyBase58Check, + IsDerivedTransactor, + TxnType, + TxnMeta, + ExtraData, + encoderBlockHeight, + ) +} + +func _doTxnWithTestMetaWithBlockHeight( + testMeta *TestMeta, + feeRateNanosPerKB uint64, + TransactorPublicKeyBase58Check string, + TransactorPrivateKeyBase58Check string, + IsDerivedTransactor bool, + TxnType TxnType, + TxnMeta DeSoTxnMetadata, + ExtraData map[string]interface{}, + encoderBlockHeight uint64) { testMeta.expectedSenderBalances = append(testMeta.expectedSenderBalances, _getBalance(testMeta.t, testMeta.chain, nil, TransactorPublicKeyBase58Check)) - currentOps, currentTxn, _, err := _doTxn(testMeta, + currentOps, currentTxn, _, err := _doTxnWithBlockHeight(testMeta, feeRateNanosPerKB, TransactorPublicKeyBase58Check, TransactorPrivateKeyBase58Check, IsDerivedTransactor, - TxnType, TxnMeta, ExtraData) - + TxnType, TxnMeta, ExtraData, encoderBlockHeight) require.NoError(testMeta.t, err) testMeta.txnOps = append(testMeta.txnOps, currentOps) testMeta.txns = append(testMeta.txns, currentTxn) } +func _doTxnWithTextMetaWithBlockHeightWithError( + testMeta *TestMeta, + feeRateNanosPerKB uint64, + TransactorPublicKeyBase58Check string, + TransactorPrivateKeyBase58Check string, + IsDerivedTransactor bool, + TxnType TxnType, + TxnMeta DeSoTxnMetadata, + ExtraData map[string]interface{}, + encoderBlockHeight uint64) error { + + initialBalance := _getBalance(testMeta.t, testMeta.chain, nil, TransactorPublicKeyBase58Check) + + currentOps, currentTxn, _, err := _doTxnWithBlockHeight(testMeta, + feeRateNanosPerKB, TransactorPublicKeyBase58Check, TransactorPrivateKeyBase58Check, IsDerivedTransactor, + TxnType, TxnMeta, ExtraData, encoderBlockHeight) + if err != nil { + return err + } + + testMeta.expectedSenderBalances = append(testMeta.expectedSenderBalances, initialBalance) + testMeta.txnOps = append(testMeta.txnOps, currentOps) + testMeta.txns = append(testMeta.txns, currentTxn) + return nil +} + func _getAuthorizeDerivedKeyMetadata( t *testing.T, ownerPrivateKey *btcec.PrivateKey, @@ -457,7 +646,8 @@ func _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( ownerPrivateKey *btcec.PrivateKey, expirationBlock uint64, transactionSpendingLimit *TransactionSpendingLimit, - isDeleted bool) (*AuthorizeDerivedKeyMetadata, *btcec.PrivateKey) { + isDeleted bool, + blockHeight uint64) (*AuthorizeDerivedKeyMetadata, *btcec.PrivateKey) { require := require.New(t) // Generate a random derived key pair @@ -465,18 +655,6 @@ func _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( require.NoError(err, "_getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit: Error generating a derived key pair") derivedPublicKey := derivedPrivateKey.PubKey().SerializeCompressed() - // Create access signature - expirationBlockByte := EncodeUint64(expirationBlock) - accessBytes := append(derivedPublicKey, expirationBlockByte[:]...) - - var transactionSpendingLimitBytes []byte - transactionSpendingLimitBytes, err = transactionSpendingLimit.ToBytes() - require.NoError(err, "_getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit: Error in transaction spending limit to bytes") - accessBytes = append(accessBytes, transactionSpendingLimitBytes[:]...) - - accessSignature, err := ownerPrivateKey.Sign(Sha256DoubleHash(accessBytes)[:]) - require.NoError(err, "_getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit: Error creating access signature") - // Determine operation type var operationType AuthorizeDerivedKeyOperationType if isDeleted { @@ -485,11 +663,31 @@ func _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( operationType = AuthorizeDerivedKeyOperationValid } + // We randomly use standard or the metamask derived key access signature. + var accessBytes []byte + accessBytesEncodingType := rand.Int() % 2 + if accessBytesEncodingType == 0 { + // Create access signature + expirationBlockByte := EncodeUint64(expirationBlock) + accessBytes = append(derivedPublicKey, expirationBlockByte[:]...) + + var transactionSpendingLimitBytes []byte + transactionSpendingLimitBytes, err = transactionSpendingLimit.ToBytes(blockHeight) + require.NoError(err, "_getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit: Error in transaction spending limit to bytes") + accessBytes = append(accessBytes, transactionSpendingLimitBytes[:]...) + } else { + accessBytes = AssembleAccessBytesWithMetamaskStrings(derivedPublicKey, expirationBlock, + transactionSpendingLimit, &DeSoTestnetParams) + } + signature, err := ownerPrivateKey.Sign(Sha256DoubleHash(accessBytes)[:]) + accessSignature := signature.Serialize() + require.NoError(err, "_getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit: Error creating access signature") + return &AuthorizeDerivedKeyMetadata{ derivedPublicKey, expirationBlock, operationType, - accessSignature.Serialize(), + accessSignature, }, derivedPrivateKey } @@ -499,7 +697,8 @@ func _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivat expirationBlock uint64, transactionSpendingLimit *TransactionSpendingLimit, derivedPrivateKey *btcec.PrivateKey, - isDeleted bool) (*AuthorizeDerivedKeyMetadata, *btcec.PrivateKey) { + isDeleted bool, + blockHeight uint64) (*AuthorizeDerivedKeyMetadata, *btcec.PrivateKey) { require := require.New(t) derivedPublicKey := derivedPrivateKey.PubKey().SerializeCompressed() @@ -508,7 +707,7 @@ func _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivat expirationBlockByte := EncodeUint64(expirationBlock) accessBytes := append(derivedPublicKey, expirationBlockByte[:]...) - transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes() + transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes(blockHeight) require.NoError(err, "_getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit: Error in transaction spending limit to bytes") accessBytes = append(accessBytes, transactionSpendingLimitBytes[:]...) @@ -535,9 +734,10 @@ func _getAccessSignature( derivedPublicKey []byte, expirationBlock uint64, transactionSpendingLimit *TransactionSpendingLimit, - ownerPrivateKey *btcec.PrivateKey) ([]byte, error) { + ownerPrivateKey *btcec.PrivateKey, + blockHeight uint64) ([]byte, error) { accessBytes := append(derivedPublicKey, EncodeUint64(expirationBlock)...) - transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes() + transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes(blockHeight) if err != nil { return nil, err } @@ -573,7 +773,7 @@ func _doAuthorizeTxnWithExtraDataAndSpendingLimits(t *testing.T, chain *Blockcha _ = assert _ = require - transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes() + transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes(0) require.NoError(err) txn, totalInput, changeAmount, fees, err := chain.CreateAuthorizeDerivedKeyTxn( ownerPublicKey, @@ -639,6 +839,7 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { chain, params, db := NewLowDifficultyBlockchain() mempool, miner := NewTestMiner(t, chain, params, true /*isSender*/) + dbAdapter := chain.NewDbAdapter() params.ForkHeights.NFTTransferOrBurnAndDerivedKeysBlockHeight = uint32(0) params.ForkHeights.ExtraDataOnEntriesBlockHeight = uint32(0) @@ -664,114 +865,27 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { derivedPkBytes := derivedPriv.PubKey().SerializeCompressed() fmt.Println("Derived public key:", hex.EncodeToString(derivedPkBytes)) - // We create this inline function for attempting a basic transfer. - // This helps us test that the DeSoChain recognizes a derived key. - _basicTransfer := func(senderPk []byte, recipientPk []byte, signerPriv string, utxoView *UtxoView, - mempool *DeSoMempool, isSignerSender bool) ([]*UtxoOperation, *MsgDeSoTxn, error) { - - txn := &MsgDeSoTxn{ - // The inputs will be set below. - TxInputs: []*DeSoInput{}, - TxOutputs: []*DeSoOutput{ - { - PublicKey: recipientPk, - AmountNanos: 1, - }, - }, - PublicKey: senderPk, - TxnMeta: &BasicTransferMetadata{}, - ExtraData: make(map[string][]byte), - } - - totalInput, spendAmount, changeAmount, fees, err := - chain.AddInputsAndChangeToTransaction(txn, 10, mempool) - require.NoError(err) - require.Equal(totalInput, spendAmount+changeAmount+fees) - require.Greater(totalInput, uint64(0)) - - if isSignerSender { - // Sign the transaction with the provided derived key - _signTxn(t, txn, signerPriv) - } else { - // Sign the transaction with the provided derived key - _signTxnWithDerivedKey(t, txn, signerPriv) - } - - // Get utxoView if it doesn't exist - if mempool != nil { - utxoView, err = mempool.GetAugmentedUniversalView() - require.NoError(err) - } - if utxoView == nil { - utxoView, err = NewUtxoView(db, params, nil, chain.snapshot) - require.NoError(err) - } - - txHash := txn.Hash() - blockHeight := chain.blockTip().Height + 1 - utxoOps, _, _, _, err := - utxoView.ConnectTransaction(txn, txHash, getTxnSize(*txn), blockHeight, - true /*verifySignature*/, false /*ignoreUtxos*/) - return utxoOps, txn, err - } - - // Verify that the balance and expiration block in the db match expectation. - _verifyTestWithExtraData := func(derivedPublicKey []byte, expirationBlockExpected uint64, - balanceExpected uint64, operationTypeExpected AuthorizeDerivedKeyOperationType, extraData map[string][]byte, - mempool *DeSoMempool) { - // Verify that expiration block was persisted in the db or is in mempool utxoView - if mempool == nil { - derivedKeyEntry := DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey)) - // If we removed the derivedKeyEntry from utxoView altogether, it'll be nil. - // To pass the tests, we initialize it to a default struct. - if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { - derivedKeyEntry = &DerivedKeyEntry{*NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey), 0, AuthorizeDerivedKeyOperationValid, nil, transactionSpendingLimit, nil, false} - } - assert.Equal(derivedKeyEntry.ExpirationBlock, expirationBlockExpected) - assert.Equal(derivedKeyEntry.OperationType, operationTypeExpected) - } else { - utxoView, err := mempool.GetAugmentedUniversalView() - require.NoError(err) - derivedKeyEntry := utxoView.GetDerivedKeyMappingForOwner(senderPkBytes, derivedPublicKey) - // If we removed the derivedKeyEntry from utxoView altogether, it'll be nil. - // To pass the tests, we initialize it to a default struct. - if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { - derivedKeyEntry = &DerivedKeyEntry{*NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey), 0, AuthorizeDerivedKeyOperationValid, nil, transactionSpendingLimit, nil, false} - } - assert.Equal(derivedKeyEntry.ExpirationBlock, expirationBlockExpected) - assert.Equal(derivedKeyEntry.OperationType, operationTypeExpected) - } - - // Verify that the balance of recipient is equal to expected balance - assert.Equal(_getBalance(t, chain, mempool, recipientPkString), balanceExpected) - } - - _verifyTest := func(derivedPublicKey []byte, expirationBlockExpected uint64, - balanceExpected uint64, operationTypeExpected AuthorizeDerivedKeyOperationType, mempool *DeSoMempool) { - _verifyTestWithExtraData(derivedPublicKey, expirationBlockExpected, balanceExpected, - operationTypeExpected, nil, mempool) - } - // We will use these to keep track of added utxo ops and txns testUtxoOps := [][]*UtxoOperation{} testTxns := []*MsgDeSoTxn{} - // Just for the sake of consistency, we run the _basicTransfer on unauthorized + // Just for the sake of consistency, we run the _derivedKeyBasicTransfer on unauthorized // derived key. It should fail since blockchain hasn't seen this key yet. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed basic transfer signed with unauthorized derived key") } // Attempt sending an AuthorizeDerivedKey txn signed with an invalid private key. // This must fail because the txn has to be signed either by owner or derived key. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) @@ -794,13 +908,14 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { ) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn signed with an unauthorized private key.") } // Attempt sending an AuthorizeDerivedKey txn where access signature is signed with // an invalid private key. This must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) @@ -826,25 +941,27 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { ) require.Error(err) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn signed with an invalid access signature.") } // Check basic transfer signed with still unauthorized derived key. // Should fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed basic transfer signed with unauthorized derived key") } // Now attempt to send the same transaction but signed with the correct derived key. // This must pass. The new derived key will be flushed to the db here. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) extraData := map[string][]byte{ @@ -874,39 +991,42 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { testTxns = append(testTxns, txn) // Verify that expiration block was persisted in the db - _verifyTestWithExtraData(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 0, AuthorizeDerivedKeyOperationValid, extraData, nil) - derivedKeyEntry := DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(senderPkBytes), *NewPublicKey(authTxnMeta.DerivedPublicKey)) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 0, AuthorizeDerivedKeyOperationValid, nil) + derivedKeyEntry := dbAdapter.GetOwnerToDerivedKeyMapping(*NewPublicKey(senderPkBytes), *NewPublicKey(authTxnMeta.DerivedPublicKey)) require.Equal(derivedKeyEntry.ExtraData["test"], []byte("result")) fmt.Println("Passed connecting AuthorizeDerivedKey txn signed with an authorized private key. Flushed to Db.") } // Check basic transfer signed by the owner key. // Should succeed. Flush to db. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 1, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 1, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with owner key. Flushed to Db.") } // Check basic transfer signed with now authorized derived key. // Should succeed. Flush to db. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with authorized derived key. Flushed to Db.") } // Check basic transfer signed with a random key. @@ -916,13 +1036,14 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Fail basic transfer signed with random key.") } // Try disconnecting all transactions so that key is deauthorized. @@ -935,7 +1056,7 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { fmt.Println("currentTxn.String()", currentTxn.String()) // Disconnect the transaction - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) blockHeight := chain.blockTip().Height + 1 fmt.Printf("Disconnecting test index: %v\n", testIndex) @@ -946,25 +1067,27 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { require.NoErrorf(utxoView.FlushToDb(0), "SimpleDisconnect: Index: %v", testIndex) } - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txns. Flushed to Db.") } // After disconnecting, check basic transfer signed with unauthorized derived key. // Should fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed basic transfer signed with unauthorized derived key after disconnecting") } // Connect all txns to a single UtxoView flushing only at the end. { // Create a new UtxoView - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) for testIndex, txn := range testTxns { fmt.Printf("Applying test index: %v\n", testIndex) @@ -980,7 +1103,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { require.NoError(utxoView.FlushToDb(0)) // Verify that expiration block and balance was persisted in the db - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed re-connecting all txn to a single utxoView") } // Check basic transfer signed with a random key. @@ -990,19 +1114,20 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Fail basic transfer signed with random key.") } // Disconnect all txns on a single UtxoView flushing only at the end { // Create a new UtxoView - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) for iterIndex := range testTxns { testIndex := len(testTxns) - 1 - iterIndex @@ -1017,7 +1142,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { require.NoError(utxoView.FlushToDb(0)) // Verify that expiration block and balance was persisted in the db - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txn on a single utxoView") } // Connect transactions to a single mempool, should pass. @@ -1031,7 +1157,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed connecting all txn to the mempool") } // Check basic transfer signed with a random key, when passing mempool. @@ -1041,11 +1168,12 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, nil, mempool, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Fail basic transfer signed with random key with mempool.") } // Remove all the transactions from the mempool. Should pass. @@ -1054,17 +1182,19 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { mempool.inefficientRemoveTransaction(burnTxn) } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed removing all txn from the mempool.") } // After disconnecting, check basic transfer signed with unauthorized derived key. // Should fail. { - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, nil, mempool, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Failed basic transfer signed with unauthorized derived key after disconnecting") } // Re-connect transactions to a single mempool, should pass. @@ -1078,7 +1208,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed connecting all txn to the mempool.") } // We will be adding some blocks so we define an array to keep track of them. @@ -1097,9 +1228,9 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { // Check basic transfer signed by the owner key. // Should succeed. Flush to db. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) @@ -1107,21 +1238,23 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { testTxns = append(testTxns, txn) fmt.Println("Passed basic transfer signed with owner key. Flushed to Db.") - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) } // Check basic transfer signed with authorized derived key. Now the auth txn is persisted in the db. // Should succeed. Flush to db. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with authorized derived key. Flushed to Db.") } // Check basic transfer signed with a random key. @@ -1131,13 +1264,14 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Fail basic transfer signed with random key.") } // Try disconnecting all transactions. Should succeed. @@ -1149,7 +1283,7 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { fmt.Println("currentTxn.String()", currentTxn.String()) // Disconnect the transaction - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) blockHeight := chain.blockTip().Height + 1 fmt.Printf("Disconnecting test index: %v\n", testIndex) @@ -1160,7 +1294,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { require.NoErrorf(utxoView.FlushToDb(0), "SimpleDisconnect: Index: %v", testIndex) } - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txns. Flushed to Db.") } // Mine a few more blocks so that the authorization should expire @@ -1175,26 +1310,28 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { // Check basic transfer signed by the owner key. // Should succeed. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) // We're not persisting in the db so balance should remain at 2. - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with owner key.") } // Check basic transfer signed with expired authorized derived key. // Should fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed a txn signed with an expired derived key.") } @@ -1210,7 +1347,7 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { // Send an authorize transaction signed with the correct derived key. // This must pass. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) utxoOps, txn, _, err := _doAuthorizeTxn( t, @@ -1233,7 +1370,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { testTxns = append(testTxns, txn) // Verify that expiration block was persisted in the db - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, 0, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, 0, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed connecting AuthorizeDerivedKey txn signed with an authorized private key.") } // Re-connect transactions to a single mempool, should pass. @@ -1247,7 +1385,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed connecting all txn to the mempool.") } // Mine a block so that mempool gets flushed to db @@ -1263,9 +1402,9 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { // Check basic transfer signed with new authorized derived key. // Sanity check. Should pass. We're not flushing to the db yet. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, utxoView, nil, false) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) @@ -1273,14 +1412,15 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { testTxns = append(testTxns, txn) // We're persisting to the db so balance should change to 3. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with derived key.") } // Send a de-authorize transaction signed with a derived key. // Doesn't matter if it's signed by the owner or not, once a isDeleted // txn appears, the key should be forever expired. This must pass. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) utxoOps, txn, _, err := _doAuthorizeTxn( t, @@ -1303,28 +1443,30 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) // Verify the expiration block in the db - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Passed connecting AuthorizeDerivedKey txn with isDeleted signed with an authorized private key.") } // Check basic transfer signed with new authorized derived key. // Now that key has been de-authorized this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) // Since this should fail, balance wouldn't change. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed basic transfer signed with de-authorized derived key.") } // Sanity check basic transfer signed by the owner key. // Should succeed. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) @@ -1332,13 +1474,14 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { testTxns = append(testTxns, txn) // Balance should change to 4 - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Passed basic transfer signed with owner key.") } // Send an authorize transaction signed with a derived key. // Since we've already deleted this derived key, this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) _, _, _, err = _doAuthorizeTxn( t, @@ -1358,7 +1501,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { ) require.Contains(err.Error(), RuleErrorAuthorizeDerivedKeyDeletedDerivedPublicKey) - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn with de-authorized private key.") } // Try disconnecting all transactions. Should succeed. @@ -1370,7 +1514,7 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { fmt.Println("currentTxn.String()", currentTxn.String()) // Disconnect the transaction - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) blockHeight := chain.blockTip().Height + 1 fmt.Printf("Disconnecting test index: %v\n", testIndex) @@ -1381,7 +1525,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { require.NoErrorf(utxoView.FlushToDb(0), "SimpleDisconnect: Index: %v", testIndex) } - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txns. Flushed to Db.") } // Connect transactions to a single mempool, should pass. @@ -1395,18 +1540,20 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) fmt.Println("Passed connecting all txn to the mempool") } // Check adding basic transfer to mempool signed with new authorized derived key. // Now that key has been de-authorized this must fail. { - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, nil, mempool, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) // Since this should fail, balance wouldn't change. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) fmt.Println("Failed basic transfer signed with de-authorized derived key in mempool.") } // Attempt re-authorizing a previously de-authorized derived key. @@ -1432,7 +1579,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { ) require.Contains(err.Error(), RuleErrorAuthorizeDerivedKeyDeletedDerivedPublicKey) - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) fmt.Println("Failed connecting AuthorizeDerivedKey txn with de-authorized private key.") } // Mine a block so that mempool gets flushed to db @@ -1445,20 +1593,21 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { // Check adding basic transfer signed with new authorized derived key. // Now that key has been de-authorized this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) // Since this should fail, balance wouldn't change. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed basic transfer signed with de-authorized derived key.") } // Attempt re-authorizing a previously de-authorized derived key. // Since we've already deleted this derived key, this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) _, _, _, err = _doAuthorizeTxn( t, @@ -1478,20 +1627,22 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { ) require.Contains(err.Error(), RuleErrorAuthorizeDerivedKeyDeletedDerivedPublicKey) - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn with de-authorized private key.") } // Sanity check basic transfer signed by the owner key. // Should succeed. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) // Balance should change to 4 - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Passed basic transfer signed with owner key.") } // Roll back the blocks and make sure we don't hit any errors. @@ -1509,7 +1660,7 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { require.NoError(utxoView.DisconnectBlock(blockToDisconnect, txHashes, utxoOps, 0)) } { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) for iterIndex := range testBlocks { @@ -1524,7 +1675,8 @@ func TestAuthorizeDerivedKeyBasic(t *testing.T) { } // After we rolled back the blocks, db should reset - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Successfuly run TestAuthorizeDerivedKeyBasic()") } @@ -1563,114 +1715,35 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { GlobalDESOLimit: NanosPerUnit, // 1 DESO limit TransactionCountLimitMap: transactionCountLimitMap, } + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) authTxnMeta, derivedPriv := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( - t, senderPriv, 6, transactionSpendingLimit, false) + t, senderPriv, 6, transactionSpendingLimit, false, blockHeight+1) derivedPrivBase58Check := Base58CheckEncode(derivedPriv.Serialize(), true, params) derivedPkBytes := derivedPriv.PubKey().SerializeCompressed() fmt.Println("Derived public key:", hex.EncodeToString(derivedPkBytes)) - // We create this inline function for attempting a basic transfer. - // This helps us test that the DeSoChain recognizes a derived key. - _basicTransfer := func(senderPk []byte, recipientPk []byte, signerPriv string, utxoView *UtxoView, - mempool *DeSoMempool, isSignerSender bool) ([]*UtxoOperation, *MsgDeSoTxn, error) { - - txn := &MsgDeSoTxn{ - // The inputs will be set below. - TxInputs: []*DeSoInput{}, - TxOutputs: []*DeSoOutput{ - { - PublicKey: recipientPk, - AmountNanos: 1, - }, - }, - PublicKey: senderPk, - TxnMeta: &BasicTransferMetadata{}, - ExtraData: make(map[string][]byte), - } - - totalInput, spendAmount, changeAmount, fees, err := - chain.AddInputsAndChangeToTransaction(txn, 10, mempool) - require.NoError(err) - require.Equal(totalInput, spendAmount+changeAmount+fees) - require.Greater(totalInput, uint64(0)) - - if isSignerSender { - // Sign the transaction with the provided derived key - _signTxn(t, txn, signerPriv) - } else { - // Sign the transaction with the provided derived key - _signTxnWithDerivedKey(t, txn, signerPriv) - } - - // Get utxoView if it doesn't exist - if mempool != nil { - utxoView, err = mempool.GetAugmentedUniversalView() - require.NoError(err) - } - if utxoView == nil { - utxoView, err = NewUtxoView(db, params, nil, chain.snapshot) - require.NoError(err) - } - - txHash := txn.Hash() - blockHeight := chain.blockTip().Height + 1 - utxoOps, _, _, _, err := - utxoView.ConnectTransaction(txn, txHash, getTxnSize(*txn), blockHeight, - true /*verifySignature*/, false /*ignoreUtxos*/) - return utxoOps, txn, err - } - - // Verify that the balance and expiration block in the db match expectation. - _verifyTest := func(derivedPublicKey []byte, expirationBlockExpected uint64, - balanceExpected uint64, operationTypeExpected AuthorizeDerivedKeyOperationType, mempool *DeSoMempool) { - // Verify that expiration block was persisted in the db or is in mempool utxoView - if mempool == nil { - derivedKeyEntry := DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey)) - // If we removed the derivedKeyEntry from utxoView altogether, it'll be nil. - // To pass the tests, we initialize it to a default struct. - if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { - derivedKeyEntry = &DerivedKeyEntry{ - *NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey), 0, AuthorizeDerivedKeyOperationValid, nil, transactionSpendingLimit, nil, false} - } - assert.Equal(derivedKeyEntry.ExpirationBlock, expirationBlockExpected) - assert.Equal(derivedKeyEntry.OperationType, operationTypeExpected) - } else { - utxoView, err := mempool.GetAugmentedUniversalView() - require.NoError(err) - derivedKeyEntry := utxoView.GetDerivedKeyMappingForOwner(senderPkBytes, derivedPublicKey) - // If we removed the derivedKeyEntry from utxoView altogether, it'll be nil. - // To pass the tests, we initialize it to a default struct. - if derivedKeyEntry == nil || derivedKeyEntry.isDeleted { - derivedKeyEntry = &DerivedKeyEntry{*NewPublicKey(senderPkBytes), *NewPublicKey(derivedPublicKey), 0, AuthorizeDerivedKeyOperationValid, nil, transactionSpendingLimit, nil, false} - } - assert.Equal(derivedKeyEntry.ExpirationBlock, expirationBlockExpected) - assert.Equal(derivedKeyEntry.OperationType, operationTypeExpected) - } - - // Verify that the balance of recipient is equal to expected balance - assert.Equal(_getBalance(t, chain, mempool, recipientPkString), balanceExpected) - } - // We will use these to keep track of added utxo ops and txns testUtxoOps := [][]*UtxoOperation{} testTxns := []*MsgDeSoTxn{} - // Just for the sake of consistency, we run the _basicTransfer on unauthorized + // Just for the sake of consistency, we run the _derivedKeyBasicTransfer on unauthorized // derived key. It should fail since blockchain hasn't seen this key yet. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed basic transfer signed with unauthorized derived key") } // Attempt sending an AuthorizeDerivedKey txn signed with an invalid private key. // This must fail because the txn has to be signed either by owner or derived key. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) @@ -1693,13 +1766,14 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { ) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn signed with an unauthorized private key.") } // Attempt sending an AuthorizeDerivedKey txn where access signature is signed with // an invalid private key. This must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) @@ -1725,25 +1799,27 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { ) require.Error(err) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn signed with an invalid access signature.") } // Check basic transfer signed with still unauthorized derived key. // Should fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed basic transfer signed with unauthorized derived key") } // Now attempt to send the same transaction but signed with the correct derived key. // This must pass. The new derived key will be flushed to the db here. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) utxoOps, txn, _, err := _doAuthorizeTxn( t, @@ -1768,37 +1844,47 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testTxns = append(testTxns, txn) // Verify that expiration block was persisted in the db - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed connecting AuthorizeDerivedKey txn signed with an authorized private key. Flushed to Db.") } // Check basic transfer signed by the owner key. // Should succeed. Flush to db. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 1, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 1, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with owner key. Flushed to Db.") } // Check basic transfer signed with now authorized derived key. // Should succeed. Flush to db. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.NoError(err) - require.NoError(utxoView.FlushToDb(0)) - testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) + testUtxoOps = append(testUtxoOps, utxoOps) + require.NoError(utxoView.FlushToDb(0)) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + // Attempting the basic transfer again should error because the spending limit authorized only 1 transfer. + utxoView, err = NewUtxoView(db, params, chain.postgres, chain.snapshot) + require.NoError(err) + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, + derivedPrivBase58Check, utxoView, nil, false) + require.Contains(err.Error(), RuleErrorDerivedKeyTxnTypeNotAuthorized) + + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with authorized derived key. Flushed to Db.") } // Check basic transfer signed with a random key. @@ -1808,13 +1894,14 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Fail basic transfer signed with random key.") } // Try disconnecting all transactions so that key is deauthorized. @@ -1827,7 +1914,7 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { fmt.Println("currentTxn.String()", currentTxn.String()) // Disconnect the transaction - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) blockHeight := chain.blockTip().Height + 1 fmt.Printf("Disconnecting test index: %v\n", testIndex) @@ -1838,25 +1925,27 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { require.NoErrorf(utxoView.FlushToDb(0), "SimpleDisconnect: Index: %v", testIndex) } - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txns. Flushed to Db.") } // After disconnecting, check basic transfer signed with unauthorized derived key. // Should fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed basic transfer signed with unauthorized derived key after disconnecting") } // Connect all txns to a single UtxoView flushing only at the end. { // Create a new UtxoView - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) for testIndex, txn := range testTxns { fmt.Printf("Applying test index: %v\n", testIndex) @@ -1872,7 +1961,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { require.NoError(utxoView.FlushToDb(0)) // Verify that expiration block and balance was persisted in the db - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed re-connecting all txn to a single utxoView") } // Check basic transfer signed with a random key. @@ -1882,19 +1972,20 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Fail basic transfer signed with random key.") } // Disconnect all txns on a single UtxoView flushing only at the end { // Create a new UtxoView - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) for iterIndex := range testTxns { testIndex := len(testTxns) - 1 - iterIndex @@ -1909,7 +2000,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { require.NoError(utxoView.FlushToDb(0)) // Verify that expiration block and balance was persisted in the db - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txn on a single utxoView") } // Connect transactions to a single mempool, should pass. @@ -1923,7 +2015,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed connecting all txn to the mempool") } // Check basic transfer signed with a random key, when passing mempool. @@ -1933,11 +2026,12 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, nil, mempool, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Fail basic transfer signed with random key with mempool.") } // Remove all the transactions from the mempool. Should pass. @@ -1946,17 +2040,19 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { mempool.inefficientRemoveTransaction(burnTxn) } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed removing all txn from the mempool.") } // After disconnecting, check basic transfer signed with unauthorized derived key. // Should fail. { - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, nil, mempool, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Failed basic transfer signed with unauthorized derived key after disconnecting") } // Re-connect transactions to a single mempool, should pass. @@ -1970,7 +2066,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed connecting all txn to the mempool.") } // We will be adding some blocks so we define an array to keep track of them. @@ -1989,9 +2086,9 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { // Check basic transfer signed by the owner key. // Should succeed. Flush to db. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) @@ -1999,14 +2096,15 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testTxns = append(testTxns, txn) fmt.Println("Passed basic transfer signed with owner key. Flushed to Db.") - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) } // Check basic transfer signed with authorized derived key. Now the auth txn is persisted in the db. // Should succeed. Flush to db. { // We authorize an additional basic transfer before the derived key can do this. - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) addlBasicTransferMap := make(map[TxnType]uint64) addlBasicTransferMap[TxnTypeBasicTransfer] = 1 @@ -2036,14 +2134,20 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testUtxoOps = append(testUtxoOps, authorizeUTXOOps) testTxns = append(testTxns, authorizeTxn) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) + // Try sending another basic transfer from the derived key. Should fail because we only authorized 2 basic transfers in total. + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, + derivedPrivBase58Check, utxoView, nil, false) + require.Contains(err.Error(), RuleErrorDerivedKeyTxnTypeNotAuthorized) + + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with authorized derived key. Flushed to Db.") } // Check basic transfer signed with a random key. @@ -2053,13 +2157,14 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { randomPrivateKey, err := btcec.NewPrivateKey(btcec.S256()) require.NoError(err) randomPrivBase58Check := Base58CheckEncode(randomPrivateKey.Serialize(), true, params) - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, randomPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 4, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Fail basic transfer signed with random key.") } // Try disconnecting all transactions. Should succeed. @@ -2071,7 +2176,7 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { fmt.Println("currentTxn.String()", currentTxn.String()) // Disconnect the transaction - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) blockHeight := chain.blockTip().Height + 1 fmt.Printf("Disconnecting test index: %v\n", testIndex) @@ -2082,7 +2187,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { require.NoErrorf(utxoView.FlushToDb(0), "SimpleDisconnect: Index: %v", testIndex) } - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txns. Flushed to Db.") } // Mine a few more blocks so that the authorization should expire @@ -2097,26 +2203,28 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { // Check basic transfer signed by the owner key. // Should succeed. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) // We're not persisting in the db so balance should remain at 2. - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with owner key.") } // Check basic transfer signed with expired authorized derived key. // Should fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) - _verifyTest(authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, authTxnMeta.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Failed a txn signed with an expired derived key.") } @@ -2125,14 +2233,17 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testTxns = []*MsgDeSoTxn{} // Get another AuthorizeDerivedKey txn metadata with expiration at block 10 // We will try to de-authorize this key with a txn before it expires. - authTxnMetaDeAuth, derivedDeAuthPriv := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit(t, senderPriv, 10, transactionSpendingLimit, false) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + authTxnMetaDeAuth, derivedDeAuthPriv := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( + t, senderPriv, 10, transactionSpendingLimit, false, blockHeight+1) derivedPrivDeAuthBase58Check := Base58CheckEncode(derivedDeAuthPriv.Serialize(), true, params) derivedDeAuthPkBytes := derivedDeAuthPriv.PubKey().SerializeCompressed() fmt.Println("Derived public key:", hex.EncodeToString(derivedDeAuthPkBytes)) // Send an authorize transaction signed with the correct derived key. // This must pass. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) utxoOps, txn, _, err := _doAuthorizeTxn( t, @@ -2155,7 +2266,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testTxns = append(testTxns, txn) // Verify that expiration block was persisted in the db - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, 0, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, 0, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed connecting AuthorizeDerivedKey txn signed with an authorized private key.") } // Re-connect transactions to a single mempool, should pass. @@ -2169,7 +2281,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, mempool) fmt.Println("Passed connecting all txn to the mempool.") } // Mine a block so that mempool gets flushed to db @@ -2185,9 +2298,9 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { // Check basic transfer signed with new authorized derived key. // Sanity check. Should pass. We're not flushing to the db yet. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, utxoView, nil, false) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) @@ -2195,14 +2308,15 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testTxns = append(testTxns, txn) // We're persisting to the db so balance should change to 3. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed basic transfer signed with derived key.") } // Send a de-authorize transaction signed with a derived key. // Doesn't matter if it's signed by the owner or not, once a isDeleted // txn appears, the key should be forever expired. This must pass. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) utxoOps, txn, _, err := _doAuthorizeTxn( t, @@ -2225,28 +2339,30 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testUtxoOps = append(testUtxoOps, utxoOps) testTxns = append(testTxns, txn) // Verify the expiration block in the db - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Passed connecting AuthorizeDerivedKey txn with isDeleted signed with an authorized private key.") } // Check basic transfer signed with new authorized derived key. // Now that key has been de-authorized this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) // Since this should fail, balance wouldn't change. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 3, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed basic transfer signed with de-authorized derived key.") } // Sanity check basic transfer signed by the owner key. // Should succeed. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - utxoOps, txn, err := _basicTransfer(senderPkBytes, recipientPkBytes, + utxoOps, txn, err := _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) require.NoError(utxoView.FlushToDb(0)) @@ -2254,13 +2370,14 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { testTxns = append(testTxns, txn) // Balance should change to 4 - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Passed basic transfer signed with owner key.") } // Send an authorize transaction signed with a derived key. // Since we've already deleted this derived key, this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) _, _, _, err = _doAuthorizeTxn( t, @@ -2280,7 +2397,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { ) require.Contains(err.Error(), RuleErrorAuthorizeDerivedKeyDeletedDerivedPublicKey) - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn with de-authorized private key.") } // Try disconnecting all transactions. Should succeed. @@ -2292,7 +2410,7 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { fmt.Println("currentTxn.String()", currentTxn.String()) // Disconnect the transaction - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) blockHeight := chain.blockTip().Height + 1 fmt.Printf("Disconnecting test index: %v\n", testIndex) @@ -2303,7 +2421,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { require.NoErrorf(utxoView.FlushToDb(0), "SimpleDisconnect: Index: %v", testIndex) } - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 2, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Passed disconnecting all txns. Flushed to Db.") } // Connect transactions to a single mempool, should pass. @@ -2317,18 +2436,20 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { } // This will check the expiration block and balances according to the mempool augmented utxoView. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) fmt.Println("Passed connecting all txn to the mempool") } // Check adding basic transfer to mempool signed with new authorized derived key. // Now that key has been de-authorized this must fail. { - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, nil, mempool, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) // Since this should fail, balance wouldn't change. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) fmt.Println("Failed basic transfer signed with de-authorized derived key in mempool.") } // Attempt re-authorizing a previously de-authorized derived key. @@ -2354,7 +2475,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { ) require.Contains(err.Error(), RuleErrorAuthorizeDerivedKeyDeletedDerivedPublicKey) - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, mempool) fmt.Println("Failed connecting AuthorizeDerivedKey txn with de-authorized private key.") } // Mine a block so that mempool gets flushed to db @@ -2367,20 +2489,21 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { // Check adding basic transfer signed with new authorized derived key. // Now that key has been de-authorized this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, derivedPrivDeAuthBase58Check, utxoView, nil, false) require.Contains(err.Error(), RuleErrorDerivedKeyNotAuthorized) // Since this should fail, balance wouldn't change. - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed basic transfer signed with de-authorized derived key.") } // Attempt re-authorizing a previously de-authorized derived key. // Since we've already deleted this derived key, this must fail. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) _, _, _, err = _doAuthorizeTxn( t, @@ -2400,20 +2523,22 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { ) require.Contains(err.Error(), RuleErrorAuthorizeDerivedKeyDeletedDerivedPublicKey) - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Failed connecting AuthorizeDerivedKey txn with de-authorized private key.") } // Sanity check basic transfer signed by the owner key. // Should succeed. { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) - _, _, err = _basicTransfer(senderPkBytes, recipientPkBytes, + _, _, err = _derivedKeyBasicTransfer(t, db, chain, params, senderPkBytes, recipientPkBytes, senderPrivString, utxoView, nil, true) require.NoError(err) // Balance should change to 4 - _verifyTest(authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMetaDeAuth.DerivedPublicKey, authTxnMetaDeAuth.ExpirationBlock, 4, AuthorizeDerivedKeyOperationNotValid, nil) fmt.Println("Passed basic transfer signed with owner key.") } // Roll back the blocks and make sure we don't hit any errors. @@ -2431,7 +2556,7 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { require.NoError(utxoView.DisconnectBlock(blockToDisconnect, txHashes, utxoOps, 0)) } { - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, chain.postgres, chain.snapshot) require.NoError(err) for iterIndex := range testBlocks { @@ -2446,7 +2571,8 @@ func TestAuthorizeDerivedKeyBasicWithTransactionLimits(t *testing.T) { } // After we rolled back the blocks, db should reset - _verifyTest(authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) + _derivedKeyVerifyTest(t, db, chain, transactionSpendingLimit, + authTxnMeta.DerivedPublicKey, 0, 0, AuthorizeDerivedKeyOperationValid, nil) fmt.Println("Successfuly run TestAuthorizeDerivedKeyBasicWithTransactionLimits()") } @@ -2458,6 +2584,30 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { chain, params, db := NewLowDifficultyBlockchain() mempool, miner := NewTestMiner(t, chain, params, true /*isSender*/) + dbAdapter := chain.NewDbAdapter() + + // Set the block height for unlimited derived keys to 10. We will perform two sets of tests: + // 1) Before the unlimited derived keys block height for utxo_view and encoder migration. + // 2) Right at the unlimited derived keys block height. + // 3) After the block height. + const ( + unlimitedDerivedKeysBlockHeight = uint32(10) + TestStageBeforeUnlimitedDerivedBlockHeight = "TestStageBeforeUnlimitedDerivedBlockHeight" + TestStageAtUnlimitedDerivedBlockHeight = "TestStageAtUnlimitedDerivedBlockHeight" + TestStageAfterUnlimitedDerivedBlockHeight = "TestStageAfterUnlimitedDerivedBlockHeight" + ) + testStage := TestStageBeforeUnlimitedDerivedBlockHeight + + GlobalDeSoParams = *params + GlobalDeSoParams.ForkHeights.DeSoUnlimitedDerivedKeysBlockHeight = unlimitedDerivedKeysBlockHeight + for ii := range GlobalDeSoParams.EncoderMigrationHeightsList { + migration := GlobalDeSoParams.EncoderMigrationHeightsList[ii] + if migration.Name == UnlimitedDerivedKeysMigration { + GlobalDeSoParams.EncoderMigrationHeightsList[ii].Height = uint64(unlimitedDerivedKeysBlockHeight) + } else { + GlobalDeSoParams.EncoderMigrationHeightsList[ii].Height = 0 + } + } params.ForkHeights.NFTTransferOrBurnAndDerivedKeysBlockHeight = uint32(0) params.ForkHeights.DerivedKeySetSpendingLimitsBlockHeight = uint32(0) @@ -2466,6 +2616,8 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { params.ForkHeights.DAOCoinLimitOrderBlockHeight = uint32(0) params.ForkHeights.OrderBookDBFetchOptimizationBlockHeight = uint32(0) params.ForkHeights.BuyNowAndNFTSplitsBlockHeight = uint32(0) + params.ForkHeights.DeSoUnlimitedDerivedKeysBlockHeight = uint32(0) + params.ForkHeights.DerivedKeyEthSignatureCompatibilityBlockHeight = uint32(0) params.ExtraRegtestParamUpdaterKeys[MakePkMapKey(paramUpdaterPkBytes)] = true @@ -2479,10 +2631,8 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { _, err = miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) require.NoError(err) - // We take the block tip to be the blockchain height rather than the - // header chain height. - savedHeight := chain.blockTip().Height + 1 // We build the testMeta obj after mining blocks so that we save the correct block height. + // We take the block tip to be the blockchain height rather than the header chain height. testMeta := &TestMeta{ t: t, chain: chain, @@ -2490,26 +2640,29 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { db: db, mempool: mempool, miner: miner, - savedHeight: savedHeight, + savedHeight: chain.blockTip().Height + 1, } - _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m0Pub, senderPrivString, 100) - _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m1Pub, senderPrivString, 100) - _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m2Pub, senderPrivString, 100) - _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m3Pub, senderPrivString, 100) - _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m4Pub, senderPrivString, 100) + _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m0Pub, senderPrivString, 1000) + _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m1Pub, senderPrivString, 1000) + _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m2Pub, senderPrivString, 1000) + _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m3Pub, senderPrivString, 1000) + _registerOrTransferWithTestMeta(testMeta, "", senderPkString, m4Pub, senderPrivString, 1000) _registerOrTransferWithTestMeta(testMeta, "", senderPkString, paramUpdaterPub, senderPrivString, 100) - m0Balance := 100 - m1Balance := 100 - m2Balance := 100 - m3Balance := 100 - m4Balance := 100 - paramUpdaterBalance := 100 + m0Balance := 1000 + m1Balance := 1000 + m2Balance := 1000 + m3Balance := 1000 + m4Balance := 1000 + paramUpdaterBalance := 1000 + expirationBlockHeight := uint64(100) _, _, _, _, _, _ = m0Balance, m1Balance, m2Balance, m3Balance, m4Balance, paramUpdaterBalance // Create profiles for M0 and M1 // Create a profile for m0 + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) { _doTxnWithTestMeta( testMeta, @@ -2527,9 +2680,12 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { IsHidden: false, }, nil, + blockHeight+1, ) // Create a profile for m1 + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2546,9 +2702,11 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { IsHidden: false, }, nil, + blockHeight+1, ) } +REPEAT: utxoView, err := mempool.GetAugmentedUniversalView() require.NoError(err) m1PrivKeyBytes, _, err := Base58CheckDecode(m1Priv) @@ -2571,8 +2729,10 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { //transactionSpendingLimit.TransactionCountLimitMap[TxnTypeDAOCoinTransfer] = 1 transactionSpendingLimit.DAOCoinOperationLimitMap[MakeDAOCoinOperationLimitKey(*m1PKID, MintDAOCoinOperation)] = 1 transactionSpendingLimit.DAOCoinOperationLimitMap[MakeDAOCoinOperationLimitKey(*m1PKID, TransferDAOCoinOperation)] = 1 + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) authTxnMeta, derivedPriv := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( - t, m1PrivateKey, 6, transactionSpendingLimit, false) + t, m1PrivateKey, expirationBlockHeight, transactionSpendingLimit, false, blockHeight+1) derivedPrivBase58Check := Base58CheckEncode(derivedPriv.Serialize(), true, params) { extraData := make(map[string]interface{}) @@ -2586,11 +2746,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeAuthorizeDerivedKey, authTxnMeta, extraData, + blockHeight+1, ) } // Derived key for M1 mints 100 M1 DAO coins { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2605,11 +2768,33 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { CoinsToBurnNanos: *uint256.NewInt(), }, nil, + blockHeight+1, + ) + + // Attempting to mint DAO again should throw an error because we only authorized 1 mint. + _, _, _, err = _doTxn( + testMeta, + 10, + m1Pub, + derivedPrivBase58Check, + true, + TxnTypeDAOCoin, + &DAOCoinMetadata{ + ProfilePublicKey: m1PkBytes, + OperationType: DAOCoinOperationTypeMint, + CoinsToMintNanos: *uint256.NewInt().SetUint64(100 * NanosPerUnit), + CoinsToBurnNanos: *uint256.NewInt(), + }, + nil, + blockHeight+1, ) + require.Contains(err.Error(), RuleErrorDerivedKeyDAOCoinOperationNotAuthorized) } // Derived key for M1 transfers 10 M1 DAO Coins to M0 { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2623,11 +2808,156 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { DAOCoinToTransferNanos: *uint256.NewInt().SetUint64(10 * NanosPerUnit), }, nil, + blockHeight+1, + ) + + // Attempting to transfer DAO again should throw an error because we only authorized 1 transfer. + _, _, _, err = _doTxn( + testMeta, + 10, + m1Pub, + derivedPrivBase58Check, + true, + TxnTypeDAOCoinTransfer, + &DAOCoinTransferMetadata{ + ProfilePublicKey: m1PkBytes, + ReceiverPublicKey: m0PkBytes, + DAOCoinToTransferNanos: *uint256.NewInt().SetUint64(10 * NanosPerUnit), + }, + nil, + blockHeight+1, + ) + require.Contains(err.Error(), RuleErrorDerivedKeyDAOCoinOperationNotAuthorized) + } + + // Randomly try changing the spending limit on the derived key to an unlimited key. + { + // Get the mempool's utxoview and get the derived key bytes. + utxoView, err := mempool.GetAugmentedUniversalView() + require.NoError(err) + derivedPrivBytes, _, err := Base58CheckDecode(derivedPrivBase58Check) + _, derivedPub := btcec.PrivKeyFromBytes(btcec.S256(), derivedPrivBytes) + derivedPubBytes := derivedPub.SerializeCompressed() + require.NoError(err) + + // Persist the existing spending limit on the derived key. + prevDerivedKeyEntry := utxoView.GetDerivedKeyMappingForOwner(m1PkBytes, derivedPubBytes) + require.NotNil(prevDerivedKeyEntry) + require.Equal(false, prevDerivedKeyEntry.isDeleted) + prevTransactionSpendingLimit := &TransactionSpendingLimit{} + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) + prevTransactionSpendingLimitBytes, err := prevDerivedKeyEntry.TransactionSpendingLimitTracker.ToBytes(blockHeight + 1) + rr := bytes.NewReader(prevTransactionSpendingLimitBytes) + err = prevTransactionSpendingLimit.FromBytes(blockHeight+1, rr) + require.NoError(err) + + // Unlimited spending limit. + transactionSpendingLimit = &TransactionSpendingLimit{ + GlobalDESOLimit: 0, + TransactionCountLimitMap: make(map[TxnType]uint64), + CreatorCoinOperationLimitMap: make(map[CreatorCoinOperationLimitKey]uint64), + DAOCoinOperationLimitMap: make(map[DAOCoinOperationLimitKey]uint64), + NFTOperationLimitMap: make(map[NFTOperationLimitKey]uint64), + IsUnlimited: true, + } + + // Authorize the unlimited derived key + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + reauthTxnMeta, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m1PrivateKey, expirationBlockHeight, transactionSpendingLimit, derivedPriv, false, blockHeight+1) + extraData := make(map[string]interface{}) + extraData[TransactionSpendingLimitKey] = transactionSpendingLimit + // Use EncoderBlockHeight 1 to make sure we use the new spending limit encoding. + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + _doTxnWithTestMetaWithBlockHeight( + testMeta, + 10, + m1Pub, + m1Priv, + false, + TxnTypeAuthorizeDerivedKey, + reauthTxnMeta, + extraData, + blockHeight+1, + ) + + // Attempting to transfer should now pass because the key has unlimited permissions. + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + err = _doTxnWithTextMetaWithBlockHeightWithError( + testMeta, + 10, + m1Pub, + derivedPrivBase58Check, + true, + TxnTypeDAOCoinTransfer, + &DAOCoinTransferMetadata{ + ProfilePublicKey: m1PkBytes, + ReceiverPublicKey: m0PkBytes, + DAOCoinToTransferNanos: *uint256.NewInt().SetUint64(10 * NanosPerUnit), + }, + nil, + blockHeight+1, + ) + if blockHeight+1 < uint64(unlimitedDerivedKeysBlockHeight) { + require.Contains(err.Error(), RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit) + } else { + require.NoError(err) + } + + // Now try to mint some DAO coins, it should pass too. + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + err = _doTxnWithTextMetaWithBlockHeightWithError( + testMeta, + 10, + m1Pub, + derivedPrivBase58Check, + true, + TxnTypeDAOCoin, + &DAOCoinMetadata{ + ProfilePublicKey: m1PkBytes, + OperationType: DAOCoinOperationTypeMint, + CoinsToMintNanos: *uint256.NewInt().SetUint64(100 * NanosPerUnit), + CoinsToBurnNanos: *uint256.NewInt(), + }, + nil, + blockHeight+1, + ) + if blockHeight+1 < uint64(unlimitedDerivedKeysBlockHeight) { + require.Contains(err.Error(), RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit) + } else { + require.NoError(err) + } + + // Revert to the previous spending limit. + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + reauthTxnMeta, _ = _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m1PrivateKey, expirationBlockHeight, prevTransactionSpendingLimit, derivedPriv, false, blockHeight+1) + extraData = make(map[string]interface{}) + extraData[TransactionSpendingLimitKey] = prevTransactionSpendingLimit + // Use EncoderBlockHeight 1 to make sure we use the new spending limit encoding. + _doTxnWithTestMetaWithBlockHeight( + testMeta, + 10, + m1Pub, + m1Priv, + false, + TxnTypeAuthorizeDerivedKey, + reauthTxnMeta, + extraData, + blockHeight+1, ) } // Now the derived key can't do anything else for M1 DAO coin { + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -2641,6 +2971,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TransferRestrictionStatus: TransferRestrictionStatusProfileOwnerOnly, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyDAOCoinOperationNotAuthorized) @@ -2663,12 +2994,17 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { // This time we allow any operation 10x newTransactionSpendingLimit.DAOCoinOperationLimitMap[MakeDAOCoinOperationLimitKey(*m1PKID, AnyDAOCoinOperation)] = 10 newTransactionSpendingLimit.DAOCoinOperationLimitMap[MakeDAOCoinOperationLimitKey(*m1PKID, UpdateTransferRestrictionStatusDAOCoinOperation)] = 0 - newAuthTxnMeta, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey(t, m1PrivateKey, 6, newTransactionSpendingLimit, derivedPriv, false) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + newAuthTxnMeta, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m1PrivateKey, expirationBlockHeight, newTransactionSpendingLimit, derivedPriv, false, blockHeight+1) // Okay so let's update the derived key, but now let's let the derived key do any operation on our DAO coin { extraData := make(map[string]interface{}) extraData[TransactionSpendingLimitKey] = newTransactionSpendingLimit + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2678,11 +3014,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeAuthorizeDerivedKey, newAuthTxnMeta, extraData, + blockHeight+1, ) } // Updating the transfer restriction status should work - { + if testStage == TestStageBeforeUnlimitedDerivedBlockHeight { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2696,11 +3035,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TransferRestrictionStatus: TransferRestrictionStatusProfileOwnerOnly, }, nil, + blockHeight+1, ) } // Burning some DAO coins should work { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2714,6 +3056,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { CoinsToBurnNanos: *uint256.NewInt().SetUint64(10 * NanosPerUnit), }, nil, + blockHeight+1, ) } @@ -2727,13 +3070,18 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { m0PrivKeyBytes, _, err := Base58CheckDecode(m0Priv) m0PrivateKey, _ := btcec.PrivKeyFromBytes(btcec.S256(), m0PrivKeyBytes) - m0AuthTxnMeta, derived0Priv := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit(t, m0PrivateKey, 6, m0TransactionSpendingLimit, false) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + m0AuthTxnMeta, derived0Priv := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( + t, m0PrivateKey, expirationBlockHeight, m0TransactionSpendingLimit, false, blockHeight+1) derived0PrivBase58Check := Base58CheckEncode(derived0Priv.Serialize(), true, params) derived0PublicKeyBase58Check := Base58CheckEncode(m0AuthTxnMeta.DerivedPublicKey, false, params) // Okay let's have M0 authorize a derived key that doesn't allow anything to show errors { extraData := make(map[string]interface{}) extraData[TransactionSpendingLimitKey] = m0TransactionSpendingLimit + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2743,10 +3091,13 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeAuthorizeDerivedKey, m0AuthTxnMeta, extraData, + blockHeight+1, ) } { + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -2760,6 +3111,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { DeSoToSellNanos: 10, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit) @@ -2768,12 +3120,16 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { // Okay so now we update the derived key to have enough DESO to do this, but don't give it the ability to perform // any creator coin transactions m0TransactionSpendingLimit.GlobalDESOLimit = 15 + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) m0AuthTxnMetaWithSpendingLimitTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( - t, m0PrivateKey, 6, m0TransactionSpendingLimit, derived0Priv, false) + t, m0PrivateKey, expirationBlockHeight, m0TransactionSpendingLimit, derived0Priv, false, blockHeight+1) { extraData := make(map[string]interface{}) extraData[TransactionSpendingLimitKey] = m0TransactionSpendingLimit + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2783,10 +3139,13 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeAuthorizeDerivedKey, m0AuthTxnMetaWithSpendingLimitTxn, extraData, + blockHeight+1, ) } { + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -2800,6 +3159,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { DeSoToSellNanos: 10, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyCreatorCoinOperationNotAuthorized) @@ -2808,11 +3168,16 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { // Okay so now we update the derived key to have enough DESO to do this, but don't give it the ability to perform // any creator coin transactions m0TransactionSpendingLimit.TransactionCountLimitMap[TxnTypeCreatorCoin] = 1 - m0AuthTxnMetaWithCCTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey(t, m0PrivateKey, 6, m0TransactionSpendingLimit, derived0Priv, false) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + m0AuthTxnMetaWithCCTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m0PrivateKey, expirationBlockHeight, m0TransactionSpendingLimit, derived0Priv, false, blockHeight+1) { extraData := make(map[string]interface{}) extraData[TransactionSpendingLimitKey] = m0TransactionSpendingLimit + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2822,10 +3187,13 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeAuthorizeDerivedKey, m0AuthTxnMetaWithCCTxn, extraData, + blockHeight+1, ) } { + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -2839,20 +3207,118 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { DeSoToSellNanos: 10, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyCreatorCoinOperationNotAuthorized) } + // Randomly try changing the spending limit on the derived key to an unlimited key. + { + // Get the mempool's utxoview and get the derived key bytes. + utxoView, err := mempool.GetAugmentedUniversalView() + require.NoError(err) + derivedPub := derived0Priv.PubKey() + derivedPubBytes := derivedPub.SerializeCompressed() + require.NoError(err) + + // Persist the existing spending limit on the derived key. + prevDerivedKeyEntry := utxoView.GetDerivedKeyMappingForOwner(m0PkBytes, derivedPubBytes) + require.NotNil(prevDerivedKeyEntry) + require.Equal(false, prevDerivedKeyEntry.isDeleted) + prevTransactionSpendingLimit := &TransactionSpendingLimit{} + prevTransactionSpendingLimitBytes, err := prevDerivedKeyEntry.TransactionSpendingLimitTracker.ToBytes(1) + rr := bytes.NewReader(prevTransactionSpendingLimitBytes) + err = prevTransactionSpendingLimit.FromBytes(1, rr) + require.NoError(err) + + // Unlimited spending limit. + transactionSpendingLimit = &TransactionSpendingLimit{ + GlobalDESOLimit: 0, + TransactionCountLimitMap: make(map[TxnType]uint64), + CreatorCoinOperationLimitMap: make(map[CreatorCoinOperationLimitKey]uint64), + DAOCoinOperationLimitMap: make(map[DAOCoinOperationLimitKey]uint64), + NFTOperationLimitMap: make(map[NFTOperationLimitKey]uint64), + IsUnlimited: true, + } + + // Authorize the unlimited derived key + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) + reauthTxnMeta, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m0PrivateKey, expirationBlockHeight, transactionSpendingLimit, derived0Priv, false, blockHeight+1) + extraData := make(map[string]interface{}) + extraData[TransactionSpendingLimitKey] = transactionSpendingLimit + // Use EncoderBlockHeight 1 to make sure we use the new spending limit encoding. + _doTxnWithTestMetaWithBlockHeight( + testMeta, + 10, + m0Pub, + m0Priv, + false, + TxnTypeAuthorizeDerivedKey, + reauthTxnMeta, + extraData, + blockHeight+1, + ) + + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + err = _doTxnWithTextMetaWithBlockHeightWithError( + testMeta, + 10, + m0Pub, + derived0PrivBase58Check, + true, + TxnTypeCreatorCoin, + &CreatorCoinMetadataa{ + ProfilePublicKey: m1PkBytes, + OperationType: CreatorCoinOperationTypeBuy, + DeSoToSellNanos: 10, + }, + nil, + blockHeight+1, + ) + if blockHeight+1 < uint64(unlimitedDerivedKeysBlockHeight) { + require.Contains(err.Error(), RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit) + } else { + require.NoError(err) + } + + // Revert to the previous spending limit. + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + reauthTxnMeta, _ = _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m0PrivateKey, expirationBlockHeight, prevTransactionSpendingLimit, derived0Priv, false, blockHeight+1) + extraData = make(map[string]interface{}) + extraData[TransactionSpendingLimitKey] = prevTransactionSpendingLimit + // Use EncoderBlockHeight 1 to make sure we use the new spending limit encoding. + _doTxnWithTestMetaWithBlockHeight( + testMeta, + 10, + m0Pub, + m0Priv, + false, + TxnTypeAuthorizeDerivedKey, + reauthTxnMeta, + extraData, + blockHeight+1, + ) + } // Okay now let's just let this derived key do his single transaction, but then it won't be able to do anything else // Okay so now we update the derived key to have enough DESO to do this, but don't give it the ability to perform // any creator coin transactions m0TransactionSpendingLimit.CreatorCoinOperationLimitMap[MakeCreatorCoinOperationLimitKey(*m1PKID, BuyCreatorCoinOperation)] = 1 m0TransactionSpendingLimit.TransactionCountLimitMap = map[TxnType]uint64{} - m0AuthTxnMetaWithCCOpTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey(t, m0PrivateKey, 6, m0TransactionSpendingLimit, derived0Priv, false) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + m0AuthTxnMetaWithCCOpTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m0PrivateKey, expirationBlockHeight, m0TransactionSpendingLimit, derived0Priv, false, blockHeight+1) { extraData := make(map[string]interface{}) extraData[TransactionSpendingLimitKey] = m0TransactionSpendingLimit + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2862,11 +3328,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeAuthorizeDerivedKey, m0AuthTxnMetaWithCCOpTxn, extraData, + blockHeight+1, ) } // Derived Key tries to spend more than global deso limit { + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -2880,15 +3349,18 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { DeSoToSellNanos: 25, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit) } { - derivedKeyEntry := DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) + derivedKeyEntry := dbAdapter.GetOwnerToDerivedKeyMapping(*NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) require.Equal(derivedKeyEntry.TransactionSpendingLimitTracker.GlobalDESOLimit, uint64(15)) require.Equal(derivedKeyEntry.TransactionSpendingLimitTracker.CreatorCoinOperationLimitMap[MakeCreatorCoinOperationLimitKey(*m1PKID, BuyCreatorCoinOperation)], uint64(1)) + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2902,9 +3374,10 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { DeSoToSellNanos: 10, }, nil, + blockHeight+1, ) // Let's confirm that the global deso limit has been reduced on the tracker - derivedKeyEntry = DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) + derivedKeyEntry = dbAdapter.GetOwnerToDerivedKeyMapping(*NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) require.Equal(derivedKeyEntry.TransactionSpendingLimitTracker.GlobalDESOLimit, uint64(4)) // 15 - (10 + 1) (CC buy + fee) require.Equal(derivedKeyEntry.TransactionSpendingLimitTracker.CreatorCoinOperationLimitMap[MakeCreatorCoinOperationLimitKey(*m1PKID, BuyCreatorCoinOperation)], uint64(0)) } @@ -2915,6 +3388,8 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { var bodyBytes []byte bodyBytes, err = json.Marshal(&DeSoBodySchema{Body: "test NFT"}) require.NoError(err) + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2927,11 +3402,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TimestampNanos: uint64(time.Now().UnixNano()), }, nil, + blockHeight+1, ) post1Hash = testMeta.txns[len(testMeta.txns)-1].Hash() } { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2943,9 +3421,12 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ MaxCopiesPerNFTKey: int64(1000), }, + blockHeight+1, ) } { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) require.NotNil(post1Hash) _doTxnWithTestMeta( testMeta, @@ -2962,6 +3443,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ BuyNowPriceKey: uint64(5), }, + blockHeight+1, ) } @@ -2976,15 +3458,20 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { MakeNFTOperationLimitKey(*post1Hash, 1, NFTBidOperation): 1, }, } + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) m0AuthTxnMetaWithNFTBidOpTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( t, m0PrivateKey, - 6, + expirationBlockHeight, nftBidSpendingLimit, derived0Priv, false, + blockHeight+1, ) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -2996,11 +3483,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ TransactionSpendingLimitKey: nftBidSpendingLimit, }, + blockHeight+1, ) } // Derived key tries to buy now, but fails { + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -3014,6 +3504,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { BidAmountNanos: 5, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit) @@ -3023,7 +3514,10 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { globalDESOSpendingLimit := &TransactionSpendingLimit{ GlobalDESOLimit: 6, } - m0AuthTxnMetaWithGlobalDESOLimitTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey(t, m0PrivateKey, 6, globalDESOSpendingLimit, derived0Priv, false) + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) + m0AuthTxnMetaWithGlobalDESOLimitTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m0PrivateKey, expirationBlockHeight, globalDESOSpendingLimit, derived0Priv, false, blockHeight+1) _doTxnWithTestMeta( testMeta, @@ -3036,10 +3530,13 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ TransactionSpendingLimitKey: globalDESOSpendingLimit, }, + blockHeight+1, ) } // Derived key can buy { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -3053,9 +3550,10 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { BidAmountNanos: 5, }, nil, + blockHeight+1, ) // Let's confirm that the global deso limit has been reduced on the tracker - derivedKeyEntry := DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) + derivedKeyEntry := dbAdapter.GetOwnerToDerivedKeyMapping(*NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) require.Equal(derivedKeyEntry.TransactionSpendingLimitTracker.GlobalDESOLimit, uint64(0)) // 6 - (5 + 1) (Buy Now Price + fee) require.Equal(derivedKeyEntry.TransactionSpendingLimitTracker. @@ -3072,7 +3570,10 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeCreateNFT: 1, }, } - m0AuthTxnMetaWithGlobalDESOLimitTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey(t, m0PrivateKey, 6, globalDESOSpendingLimit, derived0Priv, false) + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) + m0AuthTxnMetaWithGlobalDESOLimitTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m0PrivateKey, expirationBlockHeight, globalDESOSpendingLimit, derived0Priv, false, blockHeight+1) _doTxnWithTestMeta( testMeta, @@ -3085,11 +3586,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ TransactionSpendingLimitKey: globalDESOSpendingLimit, }, + blockHeight+1, ) } // Derived Key can mint NFT { + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -3102,6 +3606,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TimestampNanos: uint64(time.Now().UnixNano()), }, nil, + blockHeight+1, ) nftPostHash := testMeta.txns[len(testMeta.txns)-1].Hash() _doTxnWithTestMeta( @@ -3117,6 +3622,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { IsForSale: true, }, nil, + blockHeight+1, ) } @@ -3124,8 +3630,10 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { _registerOrTransferWithTestMeta(testMeta, "", senderPkString, derived0PublicKeyBase58Check, senderPrivString, 100) // Derived key can spend its own money { - derivedKeyEntryBefore := DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) + derivedKeyEntryBefore := dbAdapter.GetOwnerToDerivedKeyMapping(*NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) require.Equal(derivedKeyEntryBefore.TransactionSpendingLimitTracker.TransactionCountLimitMap[TxnTypeBasicTransfer], uint64(0)) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -3137,8 +3645,10 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ BasicTransferAmount: uint64(10), BasicTransferRecipient: m0PkBytes, - }) - derivedKeyEntryAfter := DBGetOwnerToDerivedKeyMapping(db, chain.snapshot, *NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) + }, + blockHeight+1, + ) + derivedKeyEntryAfter := dbAdapter.GetOwnerToDerivedKeyMapping(*NewPublicKey(m0PkBytes), *NewPublicKey(m0AuthTxnMeta.DerivedPublicKey)) require.Equal(derivedKeyEntryBefore.TransactionSpendingLimitTracker.GlobalDESOLimit, derivedKeyEntryAfter.TransactionSpendingLimitTracker.GlobalDESOLimit) } @@ -3155,6 +3665,8 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { OperationType: DAOCoinLimitOrderOperationTypeBID, FillType: DAOCoinLimitOrderFillTypeGoodTillCancelled, } + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -3164,6 +3676,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeDAOCoinLimitOrder, metadata, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyDAOCoinLimitOrderNotAuthorized) @@ -3174,9 +3687,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { MakeDAOCoinLimitOrderLimitKey(*m1PKID, ZeroPKID): 1, }, } - m0AuthTxnMetaWithGlobalDESOLimitTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey(t, m0PrivateKey, 6, globalDESOSpendingLimit, derived0Priv, false) + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) + m0AuthTxnMetaWithGlobalDESOLimitTxn, _ := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( + t, m0PrivateKey, expirationBlockHeight, globalDESOSpendingLimit, derived0Priv, false, blockHeight+1) // Authorize derived key with a Limit Order spending limit of 1 + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -3188,11 +3706,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ TransactionSpendingLimitKey: globalDESOSpendingLimit, }, + blockHeight+1, ) // Submitting a Limit Order with the buyer and seller reversed won't work. metadata.BuyingDAOCoinCreatorPublicKey = &ZeroPublicKey metadata.SellingDAOCoinCreatorPublicKey = NewPublicKey(m1PkBytes) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -3202,6 +3723,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeDAOCoinLimitOrder, metadata, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyDAOCoinLimitOrderNotAuthorized) @@ -3209,6 +3731,8 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { // Submitting with the authorized buyer and seller should work metadata.SellingDAOCoinCreatorPublicKey = &ZeroPublicKey metadata.BuyingDAOCoinCreatorPublicKey = NewPublicKey(m1PkBytes) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _doTxnWithTestMeta( testMeta, 10, @@ -3218,10 +3742,11 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { TxnTypeDAOCoinLimitOrder, metadata, nil, + blockHeight+1, ) var orders []*DAOCoinLimitOrderEntry - orders, err = DBGetAllDAOCoinLimitOrders(db) + orders, err = dbAdapter.GetAllDAOCoinLimitOrders() require.NoError(err) require.Len(orders, 1) require.Equal(*orders[0], DAOCoinLimitOrderEntry{ @@ -3231,7 +3756,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { SellingDAOCoinCreatorPKID: &ZeroPKID, ScaledExchangeRateCoinsToSellPerCoinToBuy: metadata.ScaledExchangeRateCoinsToSellPerCoinToBuy, QuantityToFillInBaseUnits: metadata.QuantityToFillInBaseUnits, - BlockHeight: savedHeight, + BlockHeight: testMeta.savedHeight, OperationType: DAOCoinLimitOrderOperationTypeBID, FillType: DAOCoinLimitOrderFillTypeGoodTillCancelled, }) @@ -3239,6 +3764,8 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { // Cancelling an order should fail with an authorization failure error code if the derived key isn't authorized // to trade the buying and selling coins orderID := *orders[0].OrderID + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) _, _, _, err = _doTxn( testMeta, 10, @@ -3250,18 +3777,22 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { CancelOrderID: &orderID, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyDAOCoinLimitOrderNotAuthorized) // Re-authorize the derived key with a spending limit of 1 for the buying and selling coins + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) m0AuthTxnMetaWithGlobalDESOLimitTxn, _ = _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimitAndDerivedPrivateKey( t, m0PrivateKey, - 6, + expirationBlockHeight, globalDESOSpendingLimit, derived0Priv, false, + blockHeight+1, ) _doTxnWithTestMeta( testMeta, @@ -3274,6 +3805,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { map[string]interface{}{ TransactionSpendingLimitKey: globalDESOSpendingLimit, }, + blockHeight+1, ) // Cancelling an existing order using CancelOrderID should work if the derived key is authorized for the @@ -3289,8 +3821,9 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { CancelOrderID: &orderID, }, nil, + blockHeight+1, ) - orders, err = DBGetAllDAOCoinLimitOrders(db) + orders, err = dbAdapter.GetAllDAOCoinLimitOrders() require.NoError(err) require.Len(orders, 0) @@ -3307,6 +3840,7 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { CancelOrderID: &orderID, }, nil, + blockHeight+1, ) require.Error(err) require.Contains(err.Error(), RuleErrorDerivedKeyInvalidDAOCoinLimitOrderOrderID) @@ -3315,11 +3849,14 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { // M0 deauthorizes the derived key { emptyTransactionSpendingLimit := &TransactionSpendingLimit{} - accessSignature, err := _getAccessSignature(m0AuthTxnMeta.DerivedPublicKey, 6, emptyTransactionSpendingLimit, m0PrivateKey) + blockHeight, err = GetBlockTipHeight(db, false) + require.NoError(err) + accessSignature, err := _getAccessSignature( + m0AuthTxnMeta.DerivedPublicKey, expirationBlockHeight, emptyTransactionSpendingLimit, m0PrivateKey, blockHeight+1) require.NoError(err) metadata := &AuthorizeDerivedKeyMetadata{ DerivedPublicKey: m0AuthTxnMeta.DerivedPublicKey, - ExpirationBlock: 6, + ExpirationBlock: expirationBlockHeight, OperationType: AuthorizeDerivedKeyOperationNotValid, AccessSignature: accessSignature, } @@ -3333,9 +3870,38 @@ func TestAuthorizedDerivedKeyWithTransactionLimitsHardcore(t *testing.T) { metadata, map[string]interface{}{ TransactionSpendingLimitKey: emptyTransactionSpendingLimit, - }) + }, + blockHeight+1, + ) + } + + _rollBackTestMetaTxnsAndFlush(testMeta) + _applyTestMetaTxnsToMempool(testMeta) + _applyTestMetaTxnsToViewAndFlush(testMeta) + _disconnectTestMetaTxnsFromViewAndFlush(testMeta) + _, err = testMeta.miner.MineAndProcessSingleBlock(0 /*threadIndex*/, testMeta.mempool) + require.NoError(err) + + testMeta.txnOps = [][]*UtxoOperation{} + testMeta.txns = []*MsgDeSoTxn{} + testMeta.expectedSenderBalances = []uint64{} + if testStage == TestStageBeforeUnlimitedDerivedBlockHeight { + // Mine block until we reach the unlimited spending limit block height. + for chain.blockTip().Height+1 < unlimitedDerivedKeysBlockHeight { + _, err = testMeta.miner.MineAndProcessSingleBlock(0 /*threadIndex*/, testMeta.mempool) + require.NoError(err) + } + testStage = TestStageAtUnlimitedDerivedBlockHeight + } else if testStage == TestStageAtUnlimitedDerivedBlockHeight { + // Mine a block to be above the unlimited derived keys block height. + _, err = testMeta.miner.MineAndProcessSingleBlock(0 /*threadIndex*/, testMeta.mempool) + require.NoError(err) + testStage = TestStageAfterUnlimitedDerivedBlockHeight } + testMeta.savedHeight = chain.blockTip().Height + 1 - // Roll all successful txns through connect and disconnect loops to make sure nothing breaks. + if testStage != TestStageAfterUnlimitedDerivedBlockHeight { + goto REPEAT + } _executeAllTestRollbackAndFlush(testMeta) } diff --git a/lib/block_view_flush.go b/lib/block_view_flush.go index adf1bfcc4..bf804cfc9 100644 --- a/lib/block_view_flush.go +++ b/lib/block_view_flush.go @@ -13,7 +13,7 @@ func (bav *UtxoView) FlushToDb(blockHeight uint64) error { // Make sure everything happens inside a single transaction. var err error if bav.Postgres != nil { - err = bav.Postgres.FlushView(bav) + err = bav.Postgres.FlushView(bav, blockHeight) if err != nil { return err } diff --git a/lib/block_view_message.go b/lib/block_view_message.go index dbcc20b53..6c3a58c95 100644 --- a/lib/block_view_message.go +++ b/lib/block_view_message.go @@ -742,6 +742,12 @@ func (bav *UtxoView) _connectMessagingGroup( RuleErrorMessagingKeyBeforeBlockHeight, "_connectMessagingGroup: "+ "Problem connecting messaging key, too early block height") } + + // Check that the transaction has the right TxnType. + if txn.TxnMeta.GetTxnType() != TxnTypeMessagingGroup { + return 0, 0, nil, fmt.Errorf("_connectMessagingGroup: called with bad TxnType %s", + txn.TxnMeta.GetTxnType().String()) + } txMeta := txn.TxnMeta.(*MessagingGroupMetadata) // If the key name is just a list of 0s, then return because this name is reserved for the base key. @@ -771,14 +777,19 @@ func (bav *UtxoView) _connectMessagingGroup( // We now have a valid messaging public key, key name, and owner public key. // The hard-coded default key is only intended to be registered by the owner, so we will require a signature. - if EqualGroupKeyName(NewGroupKeyName(txMeta.MessagingGroupKeyName), DefaultGroupKeyName()) { - // Verify the GroupOwnerSignature. it should be signature( messagingPublicKey || messagingKeyName ) - // We need to make sure the default messaging key was authorized by the master public key. - // All other keys can be registered by derived keys. - bytes := append(txMeta.MessagingPublicKey, txMeta.MessagingGroupKeyName...) - if err := _verifyBytesSignature(txn.PublicKey, bytes, txMeta.GroupOwnerSignature, blockHeight, bav.Params); err != nil { - return 0, 0, nil, errors.Wrapf(err, "_connectMessagingGroup: "+ - "Problem verifying signature bytes, error: %v", RuleErrorMessagingSignatureInvalid) + // + // Note that we decided to relax this constraint after the fork height. Why? Because keeping it would have + // required users to go through two confirmations when approving a key with MetaMask vs just one. + if blockHeight < bav.Params.ForkHeights.DeSoUnlimitedDerivedKeysBlockHeight { + if EqualGroupKeyName(NewGroupKeyName(txMeta.MessagingGroupKeyName), DefaultGroupKeyName()) { + // Verify the GroupOwnerSignature. it should be signature( messagingPublicKey || messagingKeyName ) + // We need to make sure the default messaging key was authorized by the master public key. + // All other keys can be registered by derived keys. + bytes := append(txMeta.MessagingPublicKey, txMeta.MessagingGroupKeyName...) + if err := _verifyBytesSignature(txn.PublicKey, bytes, txMeta.GroupOwnerSignature, blockHeight, bav.Params); err != nil { + return 0, 0, nil, errors.Wrapf(err, "_connectMessagingGroup: "+ + "Problem verifying signature bytes, error: %v", RuleErrorMessagingSignatureInvalid) + } } } diff --git a/lib/block_view_profile.go b/lib/block_view_profile.go index 8d67550e8..59c6c9ecc 100644 --- a/lib/block_view_profile.go +++ b/lib/block_view_profile.go @@ -943,7 +943,7 @@ func _verifyBytesSignature(signer, data, signature []byte, blockHeight uint32, p if blockHeight >= params.ForkHeights.DerivedKeyEthSignatureCompatibilityBlockHeight { // Check if the provided signature is an Eth signature. - ethErr = _verifyEthPersonalSignature(signer, data, signature) + ethErr = VerifyEthPersonalSignature(signer, data, signature) if ethErr == nil { return nil } @@ -978,29 +978,27 @@ func TextAndHash(data []byte) ([]byte, string) { return hasher.Sum(nil), msg } -// _verifyEthPersonalSignature checks the signature assuming it follows Ethereum's personal_sign standard. This is used +// VerifyEthPersonalSignature checks the signature assuming it follows Ethereum's personal_sign standard. This is used // for the MetaMask DeSo integration. -func _verifyEthPersonalSignature(signer, data, signature []byte) error { +func VerifyEthPersonalSignature(signer, data, signature []byte) error { // Ethereum likes uncompressed public keys while we use compressed keys a lot. Make sure we have uncompressed pk bytes. var uncompressedSigner []byte pubKey, err := btcec.ParsePubKey(signer, btcec.S256()) if err != nil { - return errors.Wrapf(err, "_verifyEthPersonalSignature: Problem parsing signer public key") + return errors.Wrapf(err, "VerifyEthPersonalSignature: Problem parsing signer public key") } if len(signer) == btcec.PubKeyBytesLenCompressed { uncompressedSigner = pubKey.SerializeUncompressed() } else if len(signer) == btcec.PubKeyBytesLenUncompressed { uncompressedSigner = signer } else { - return fmt.Errorf("_verifyEthPersonalSignature: Public key has incorrect length. It should be either "+ + return fmt.Errorf("VerifyEthPersonalSignature: Public key has incorrect length. It should be either "+ "(%v) for compressed key or (%v) for uncompressed key", btcec.PubKeyBytesLenCompressed, btcec.PubKeyBytesLenUncompressed) } // Change the data bytes into Ethereum's personal_sign message standard. This will prepend the message prefix and hash - // the prepended message using keccak256. We turn data into a hex string and treat it as a character sequence which is - // how MetaMask treats it. - dataHex := hex.EncodeToString(data) - hash, _ := TextAndHash([]byte(dataHex)) + // the prepended message using keccak256. + hash, _ := TextAndHash(data) // Make sure signature has the correct length. If signature has 65 bytes then it contains the recovery ID, we can // slice it off since we already know the signer public key. @@ -1008,14 +1006,14 @@ func _verifyEthPersonalSignature(signer, data, signature []byte) error { if len(signature) == 64 || len(signature) == 65 { copy(formattedSignature, signature[:64]) } else { - return fmt.Errorf("_verifyEthPersonalSignature: Signature must be 64 or 65 bytes in size. Got (%v) instead", len(signature)) + return fmt.Errorf("VerifyEthPersonalSignature: Signature must be 64 or 65 bytes in size. Got (%v) instead", len(signature)) } // Now, verify the signature. if crypto.VerifySignature(uncompressedSigner, hash, formattedSignature) { return nil } else { - return fmt.Errorf("_verifyEthPersonalSignature: Signature verification failed") + return fmt.Errorf("VerifyEthPersonalSignature: Signature verification failed") } } diff --git a/lib/block_view_profile_test.go b/lib/block_view_profile_test.go index 9cc4073d7..ccfa41e13 100644 --- a/lib/block_view_profile_test.go +++ b/lib/block_view_profile_test.go @@ -1,6 +1,7 @@ package lib import ( + "bytes" "encoding/hex" "fmt" "github.com/dgraph-io/badger/v3" @@ -3267,21 +3268,45 @@ func TestEthSignature(t *testing.T) { require := require.New(t) _ = require + // Make sure encoder migrations are not triggered yet. + for ii := range GlobalDeSoParams.EncoderMigrationHeightsList { + if GlobalDeSoParams.EncoderMigrationHeightsList[ii].Version == 0 { + continue + } + GlobalDeSoParams.EncoderMigrationHeightsList[ii].Height = 1 + } + // This data was taken directly from MetaMask personal_sign. - signatureHex := "e1ddc8f4a6004439988a7578299856cdaa1a211e39ecbe57a500e1c3a65bb389779adf0472812fb35500e5b49ce679a3ed8b2cc4fac851e8783835bd7b82f0721c" - publicKeyHex := "04aaa44d617bae2fde81bd3e35857ac6e0358a39da4b62d8be0c94cb60def3f637641d9c5c5adb20e8561bbbbc4f271158871d530053bb917423846c8b482fd518" - message := []byte("message to sign") + signatureHex := "d1f84f38ce47c0ea6d67d0cf2c228dbb9f46aca12db514aaf7d8442978334e8f1547cd1999d9e84fe3f4ba3b92fc8d57bf982ebcab8227e94c7650f36c0dd7ad1b" + transactionSpendingLimitHex := "80c2d72f0305809f4909a08d06138aa41f0000000000" + expirationBlock := uint64(45639) + derivedPublicKeyBase58Check := "tBCKYV1wrcCgDhXY3ZnvcX3L1yrFNbYjYzTBFnkxHUcjM9vt84NpbT" + ownerPublicKeyBase58Check := "tBCKW6GJpevX6g9kfVz4opSb7gVsJMhES67z4k5Bntxnd7zHcHdgFM" // parse signature signature, err := hex.DecodeString(signatureHex) require.NoError(err) - // parse public key - publicKeyBytes, err := hex.DecodeString(publicKeyHex) + // parse spending limits + transactionSpendingLimit := &TransactionSpendingLimit{} + transactionSpendingLimitBytes, err := hex.DecodeString(transactionSpendingLimitHex) + require.NoError(err) + rr := bytes.NewReader(transactionSpendingLimitBytes) + // This error is fine because transaction should fail anyway if spending limit cannot be decoded. + require.NoError(transactionSpendingLimit.FromBytes(0, rr)) + + // parse derived public key + derivedPublicKeyBytes, _, err := Base58CheckDecode(derivedPublicKeyBase58Check) + require.NoError(err) + + // parse owner public key + ownerPublicKeyBytes, _, err := Base58CheckDecode(ownerPublicKeyBase58Check) require.NoError(err) + // assemble the message + accessBytes := AssembleAccessBytesWithMetamaskStrings(derivedPublicKeyBytes, expirationBlock, + transactionSpendingLimit, &DeSoTestnetParams) + // verify signature - _, _, _ = publicKeyBytes, message, signature - // TODO: replace the test case - //require.NoError(_verifyEthPersonalSignature(publicKeyBytes, message, signature)) + require.NoError(VerifyEthPersonalSignature(ownerPublicKeyBytes, accessBytes, signature)) } diff --git a/lib/block_view_test.go b/lib/block_view_test.go index c2d46af18..238c90164 100644 --- a/lib/block_view_test.go +++ b/lib/block_view_test.go @@ -1,11 +1,14 @@ package lib import ( + "encoding/hex" "fmt" + "github.com/btcsuite/btcd/btcec" "github.com/dgraph-io/badger/v3" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" _ "net/http/pprof" + "reflect" "testing" ) @@ -246,7 +249,8 @@ func _rollBackTestMetaTxnsAndFlush(testMeta *TestMeta) { err = utxoView.DisconnectTransaction(currentTxn, currentHash, currentOps, testMeta.savedHeight) require.NoError(testMeta.t, err) - require.NoError(testMeta.t, utxoView.FlushToDb(0)) + blockHeight := uint64(testMeta.chain.BlockTip().Height) + require.NoError(testMeta.t, utxoView.FlushToDb(blockHeight+1)) // After disconnecting, the balances should be restored to what they // were before this transaction was applied. @@ -290,7 +294,8 @@ func _applyTestMetaTxnsToViewAndFlush(testMeta *TestMeta) { require.NoError(testMeta.t, err) } // Flush the utxoView after having added all the transactions. - require.NoError(testMeta.t, utxoView.FlushToDb(0)) + blockHeight := uint64(testMeta.chain.BlockTip().Height) + require.NoError(testMeta.t, utxoView.FlushToDb(blockHeight+1)) } func _disconnectTestMetaTxnsFromViewAndFlush(testMeta *TestMeta) { @@ -308,11 +313,8 @@ func _disconnectTestMetaTxnsFromViewAndFlush(testMeta *TestMeta) { err = utxoView.DisconnectTransaction(currentTxn, currentHash, currentOps, testMeta.savedHeight) require.NoError(testMeta.t, err) } - require.NoError(testMeta.t, utxoView.FlushToDb(0)) - require.Equal( - testMeta.t, - testMeta.expectedSenderBalances[0], - _getBalance(testMeta.t, testMeta.chain, nil, senderPkString)) + blockHeight := uint64(testMeta.chain.BlockTip().Height) + require.NoError(testMeta.t, utxoView.FlushToDb(blockHeight)) } func _connectBlockThenDisconnectBlockAndFlush(testMeta *TestMeta) { @@ -337,10 +339,11 @@ func _connectBlockThenDisconnectBlockAndFlush(testMeta *TestMeta) { // Compute the hashes for all the transactions. txHashes, err := ComputeTransactionHashes(block.Txns) require.NoError(testMeta.t, err) - require.NoError(testMeta.t, utxoView.DisconnectBlock(block, txHashes, utxoOps, 0)) + blockHeight := uint64(testMeta.chain.BlockTip().Height) + require.NoError(testMeta.t, utxoView.DisconnectBlock(block, txHashes, utxoOps, blockHeight)) // Flushing the view after applying and rolling back should work. - require.NoError(testMeta.t, utxoView.FlushToDb(0)) + require.NoError(testMeta.t, utxoView.FlushToDb(blockHeight)) } } @@ -351,6 +354,7 @@ func TestUpdateGlobalParams(t *testing.T) { _, _ = assert, require chain, params, db := NewLowDifficultyBlockchain() + postgres := chain.postgres mempool, miner := NewTestMiner(t, chain, params, true /*isSender*/) _, _ = mempool, miner @@ -386,6 +390,7 @@ func TestUpdateGlobalParams(t *testing.T) { // Should pass when founder key is equal to moneyPk var updateGlobalParamsTxn *MsgDeSoTxn var err error + { newUSDCentsPerBitcoin := int64(270430 * 100) newMinimumNetworkFeeNanosPerKB := int64(191) @@ -404,7 +409,7 @@ func TestUpdateGlobalParams(t *testing.T) { false) require.NoError(err) - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, postgres, chain.snapshot) require.NoError(err) txnSize := getTxnSize(*updateGlobalParamsTxn) blockHeight := chain.blockTip().Height + 1 @@ -467,7 +472,7 @@ func TestUpdateGlobalParams(t *testing.T) { require.Equal(DbGetGlobalParamsEntry(db, chain.snapshot), expectedGlobalParams) // Now let's do a disconnect and make sure the values reflect the previous entry. - utxoView, err := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, err := NewUtxoView(db, params, postgres, chain.snapshot) require.NoError(err) blockHeight := chain.blockTip().Height + 1 utxoView.DisconnectTransaction( @@ -490,6 +495,7 @@ func TestBasicTransfer(t *testing.T) { _ = require chain, params, db := NewLowDifficultyBlockchain() + postgres := chain.postgres mempool, miner := NewTestMiner(t, chain, params, true /*isSender*/) // Mine two blocks to give the sender some DeSo. _, err := miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) @@ -530,7 +536,7 @@ func TestBasicTransfer(t *testing.T) { txn.PublicKey = recipientPkBytes _signTxn(t, txn, recipientPrivString) - utxoView, _ := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, _ := NewUtxoView(db, params, postgres, chain.snapshot) txHash := txn.Hash() blockHeight := chain.blockTip().Height + 1 _, _, _, _, err = @@ -564,7 +570,7 @@ func TestBasicTransfer(t *testing.T) { // Sign the transaction with the recipient's key rather than the // sender's key. _signTxn(t, txn, recipientPrivString) - utxoView, _ := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, _ := NewUtxoView(db, params, postgres, chain.snapshot) txHash := txn.Hash() blockHeight := chain.blockTip().Height + 1 _, _, _, _, err = @@ -591,7 +597,7 @@ func TestBasicTransfer(t *testing.T) { }, } _signTxn(t, txn, senderPrivString) - utxoView, _ := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, _ := NewUtxoView(db, params, postgres, chain.snapshot) txHash := txn.Hash() blockHeight := chain.blockTip().Height + 1 _, _, _, _, err = @@ -626,7 +632,7 @@ func TestBasicTransfer(t *testing.T) { require.Greater(totalInput, uint64(0)) _signTxn(t, txn, senderPrivString) - utxoView, _ := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, _ := NewUtxoView(db, params, postgres, chain.snapshot) txHash := txn.Hash() blockHeight := chain.blockTip().Height + 1 _, _, _, _, err = @@ -650,7 +656,7 @@ func TestBasicTransfer(t *testing.T) { txHashes, err := ComputeTransactionHashes(blockToMine.Txns) require.NoError(err) - utxoView, _ := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, _ := NewUtxoView(db, params, postgres, chain.snapshot) _, err = utxoView.ConnectBlock(blockToMine, txHashes, true /*verifySignatures*/, nil, 0) require.Error(err) require.Contains(err.Error(), RuleErrorBlockRewardExceedsMaxAllowed) @@ -666,8 +672,377 @@ func TestBasicTransfer(t *testing.T) { txHashes, err := ComputeTransactionHashes(blockToMine.Txns) require.NoError(err) - utxoView, _ := NewUtxoView(db, params, nil, chain.snapshot) + utxoView, _ := NewUtxoView(db, params, postgres, chain.snapshot) _, err = utxoView.ConnectBlock(blockToMine, txHashes, true /*verifySignatures*/, nil, 0) require.NoError(err) } } + +// TestBasicTransferSignatures thoroughly tests all possible ways to sign a DeSo transaction. +// There are three available signature schemas that are accepted by the DeSo blockchain: +// (1) Transaction signed by user's main public key +// (2) Transaction signed by user's derived key with "DerivedPublicKey" passed in ExtraData +// (3) Transaction signed by user's derived key using DESO-DER signature standard. +// +// We will try all these schemas while running three main tests scenarios: +// - try signing and processing a basicTransfer +// - try signing and processing a authorizeDerivedKey +// - try signing and processing a authorizeDerivedKey followed by a basicTransfer +// We use basicTransfer as a placeholder for a normal DeSo transaction (alternatively, we could have used a post, +// follow, nft, etc transaction). For each scenario we try signing the transaction with either user's main public +// key, a derived key, or a random key. Basically, we try every possible context in which a transaction can be signed. +func TestBasicTransferSignatures(t *testing.T) { + require := require.New(t) + _ = require + + chain, params, db := NewLowDifficultyBlockchain() + postgres := chain.postgres + params.ForkHeights.NFTTransferOrBurnAndDerivedKeysBlockHeight = uint32(0) + params.ForkHeights.DerivedKeySetSpendingLimitsBlockHeight = uint32(0) + params.ForkHeights.DerivedKeyTrackSpendingLimitsBlockHeight = uint32(0) + // Make sure encoder migrations are not triggered yet. + GlobalDeSoParams = *params + GlobalDeSoParams.ForkHeights.DeSoUnlimitedDerivedKeysBlockHeight = uint32(100) + for ii := range GlobalDeSoParams.EncoderMigrationHeightsList { + if GlobalDeSoParams.EncoderMigrationHeightsList[ii].Version == 0 { + continue + } + GlobalDeSoParams.EncoderMigrationHeightsList[ii].Height = 100 + } + + _ = db + mempool, miner := NewTestMiner(t, chain, params, true /*isSender*/) + // Mine two blocks to give the sender some DeSo. + _, err := miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) + require.NoError(err) + _, err = miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) + require.NoError(err) + + senderPkBytes, _, err := Base58CheckDecode(senderPkString) + require.NoError(err) + senderPrivBytes, _, err := Base58CheckDecode(senderPrivString) + require.NoError(err) + senderPrivKey, _ := btcec.PrivKeyFromBytes(btcec.S256(), senderPrivBytes) + recipientPkBytes, _, err := Base58CheckDecode(recipientPkString) + require.NoError(err) + + // Construct an unsigned basic transfer transaction. + createTransaction := func() *MsgDeSoTxn { + txn := &MsgDeSoTxn{ + // The inputs will be set below. + TxInputs: []*DeSoInput{}, + TxOutputs: []*DeSoOutput{ + { + PublicKey: recipientPkBytes, + AmountNanos: 1, + }, + }, + PublicKey: senderPkBytes, + TxnMeta: &BasicTransferMetadata{}, + } + + totalInput, spendAmount, changeAmount, fees, err := + chain.AddInputsAndChangeToTransaction(txn, 10, mempool) + require.NoError(err) + require.Equal(totalInput, spendAmount+changeAmount+fees) + require.Greater(totalInput, uint64(0)) + return txn + } + + // Add a transaction to the mempool. + mempoolProcess := func(txn *MsgDeSoTxn) (_mempoolTxs []*MempoolTx, _err error) { + mempoolTxs, err := mempool.processTransaction(txn, true, true, 0, true) + if err != nil { + return nil, err + } + require.Equal(1, len(mempoolTxs)) + return mempoolTxs, err + } + + // Mine block with the latest mempool. Validate that the persisted transaction signatures match original transactions. + mineBlockAndVerifySignatures := func(allTxns []*MsgDeSoTxn) { + block, err := miner.MineAndProcessSingleBlock(0, mempool) + blockHash, err := block.Hash() + require.NoError(err) + require.NoError(err) + require.Equal(1+len(allTxns), len(block.Txns)) + for ii := 1; ii < len(block.Txns); ii++ { + txn := allTxns[ii-1] + transactionHash := allTxns[ii-1].Hash() + require.Equal(true, reflect.DeepEqual(transactionHash.ToBytes(), block.Txns[ii].Hash().ToBytes())) + + // Now fetch all transactions from the db and verify their signatures have been properly persisted. + if postgres != nil { + pgTxn := postgres.GetTransactionByHash(transactionHash) + require.Equal(true, reflect.DeepEqual(txn.Signature.Sign.R.Bytes(), HashToBigint(pgTxn.R).Bytes())) + require.Equal(true, reflect.DeepEqual(txn.Signature.Sign.S.Bytes(), HashToBigint(pgTxn.S).Bytes())) + require.Equal(txn.Signature.RecoveryId, byte(pgTxn.RecoveryId)) + require.Equal(txn.Signature.IsRecoverable, pgTxn.IsRecoverable) + } else { + dbBlock, err := GetBlock(blockHash, db, chain.Snapshot()) + require.NoError(err) + for _, blockTxn := range dbBlock.Txns { + if reflect.DeepEqual(transactionHash.ToBytes(), blockTxn.Hash().ToBytes()) { + require.Equal(true, reflect.DeepEqual(txn.Signature.Sign.R.Bytes(), blockTxn.Signature.Sign.R.Bytes())) + require.Equal(true, reflect.DeepEqual(txn.Signature.Sign.S.Bytes(), blockTxn.Signature.Sign.S.Bytes())) + require.Equal(txn.Signature.RecoveryId, blockTxn.Signature.RecoveryId) + require.Equal(txn.Signature.IsRecoverable, blockTxn.Signature.IsRecoverable) + } + } + } + } + } + + // Create a derived key transaction based on the provided spending limit. + doDerivedKeyTransaction := func(transactionSpendingLimit *TransactionSpendingLimit) (derivedKeyTxn *MsgDeSoTxn, + derivedPrivateKey *btcec.PrivateKey) { + + extraData := make(map[string]interface{}) + extraData[TransactionSpendingLimitKey] = transactionSpendingLimit + blockHeight, err := GetBlockTipHeight(db, false) + require.NoError(err) + authTxnMeta, derivedPriv := _getAuthorizeDerivedKeyMetadataWithTransactionSpendingLimit( + t, senderPrivKey, 10, transactionSpendingLimit, false, blockHeight+1) + transactionSpendingLimitBytes, err := transactionSpendingLimit.ToBytes(blockHeight + 1) + require.NoError(err) + derivedKeyTxn, totalInput, changeAmount, fees, err := chain.CreateAuthorizeDerivedKeyTxn( + senderPkBytes, + authTxnMeta.DerivedPublicKey, + authTxnMeta.ExpirationBlock, + authTxnMeta.AccessSignature, + false, + false, + nil, + []byte{}, + hex.EncodeToString(transactionSpendingLimitBytes), + 10, + mempool, + nil, + ) + require.NoError(err) + require.Equal(totalInput, changeAmount+fees) + require.Greater(totalInput, uint64(0)) + require.NoError(err) + return derivedKeyTxn, derivedPriv + } + + // This function will try all possible signature schemes (1), (2), (3) given signer's private key and transaction + // generator function createTransaction (BasicTransafer) or derivedKeyTransaction (AuthorizeDerivedKey). TestVector + // expresses our expectation as to the errors we are supposed to get when trying to process a transaction signed + // with each respective signature scheme. + mempoolProcessAllSignatureCombinations := func( + createTransaction func() *MsgDeSoTxn, + derivedKeyTransaction func(*TransactionSpendingLimit) (*MsgDeSoTxn, *btcec.PrivateKey), + signaturePrivateKeyBase58 string, + transactionSpendingLimit *TransactionSpendingLimit, + testVector [3]RuleError) []*MsgDeSoTxn { + + var allTxns []*MsgDeSoTxn + processTxn := func(ii int, txn *MsgDeSoTxn) { + if testVector[ii].Error() == "" { + allTxns = append(allTxns, txn) + _, err = mempoolProcess(txn) + require.NoError(err) + } else { + _, err = mempoolProcess(txn) + require.Error(err) + require.Contains(err.Error(), testVector[ii].Error()) + } + } + + if createTransaction != nil { + + txn := createTransaction() + // Sign the transaction with the recipient's key rather than the sender's key. + _signTxn(t, txn, signaturePrivateKeyBase58) + processTxn(0, txn) + + txn = createTransaction() + _signTxnWithDerivedKeyAndType(t, txn, signaturePrivateKeyBase58, 0) + processTxn(1, txn) + + txn = createTransaction() + _signTxnWithDerivedKeyAndType(t, txn, signaturePrivateKeyBase58, 1) + processTxn(2, txn) + } else if derivedKeyTransaction != nil { + var signerPrivBase58 string + if signaturePrivateKeyBase58 != "" { + signerPrivBase58 = signaturePrivateKeyBase58 + } + + derivedKeyTxn, derivedPriv := doDerivedKeyTransaction(transactionSpendingLimit) + if signaturePrivateKeyBase58 == "" { + signerPrivBase58 = Base58CheckEncode(derivedPriv.Serialize(), true, params) + } + _signTxn(t, derivedKeyTxn, signerPrivBase58) + processTxn(0, derivedKeyTxn) + + derivedKeyTxn, derivedPriv = doDerivedKeyTransaction(transactionSpendingLimit) + if signaturePrivateKeyBase58 == "" { + signerPrivBase58 = Base58CheckEncode(derivedPriv.Serialize(), true, params) + } + _signTxnWithDerivedKeyAndType(t, derivedKeyTxn, signerPrivBase58, 0) + processTxn(1, derivedKeyTxn) + + derivedKeyTxn, derivedPriv = doDerivedKeyTransaction(transactionSpendingLimit) + if signaturePrivateKeyBase58 == "" { + signerPrivBase58 = Base58CheckEncode(derivedPriv.Serialize(), true, params) + } + _signTxnWithDerivedKeyAndType(t, derivedKeyTxn, signerPrivBase58, 1) + processTxn(2, derivedKeyTxn) + } + return allTxns + } + + // First scenario, just signing a basic transfer. + { + var allTxns []*MsgDeSoTxn + // Try signing the basic transfer with the owner's private key. + testSenderVector := [3]RuleError{ + "", RuleErrorDerivedKeyNotAuthorized, RuleErrorDerivedKeyNotAuthorized, + } + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + createTransaction, + nil, + senderPrivString, + nil, + testSenderVector, + )...) + + // Try signing the basic transfer with a random private key. + testRandomVector := [3]RuleError{ + RuleErrorInvalidTransactionSignature, RuleErrorDerivedKeyNotAuthorized, RuleErrorDerivedKeyNotAuthorized, + } + randomPrivKey, err := btcec.NewPrivateKey(btcec.S256()) + require.NoError(err) + randomPrivKeyBase58Check := Base58CheckEncode(randomPrivKey.Serialize(), true, params) + + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + createTransaction, + nil, + randomPrivKeyBase58Check, + nil, + testRandomVector, + )...) + + mineBlockAndVerifySignatures(allTxns) + } + + // Second scenario, authorize derived key transaction. + { + var allTxns []*MsgDeSoTxn + transactionSpendingLimit := &TransactionSpendingLimit{ + GlobalDESOLimit: 100, + TransactionCountLimitMap: make(map[TxnType]uint64), + CreatorCoinOperationLimitMap: make(map[CreatorCoinOperationLimitKey]uint64), + DAOCoinOperationLimitMap: make(map[DAOCoinOperationLimitKey]uint64), + NFTOperationLimitMap: make(map[NFTOperationLimitKey]uint64), + } + transactionSpendingLimit.TransactionCountLimitMap[TxnTypeAuthorizeDerivedKey] = 1 + + // First try signing the authorize derived key transaction with the derived key itself. + testDerivedKeyVector := [3]RuleError{ + RuleErrorInvalidTransactionSignature, "", "", + } + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + nil, + doDerivedKeyTransaction, + "", + transactionSpendingLimit, + testDerivedKeyVector, + )...) + + // Now try signing the authorize derived key transaction with the sender's private key. + testSignerKeyVector := [3]RuleError{ + "", RuleErrorDerivedKeyNotAuthorized, RuleErrorDerivedKeyNotAuthorized, + } + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + nil, + doDerivedKeyTransaction, + senderPrivString, + transactionSpendingLimit, + testSignerKeyVector, + )...) + + // Finally try a random private key. + testRandomKeyVector := [3]RuleError{ + RuleErrorInvalidTransactionSignature, RuleErrorDerivedKeyNotAuthorized, RuleErrorDerivedKeyNotAuthorized, + } + randomPrivKey, err := btcec.NewPrivateKey(btcec.S256()) + require.NoError(err) + randomPrivKeyBase58Check := Base58CheckEncode(randomPrivKey.Serialize(), true, params) + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + nil, + doDerivedKeyTransaction, + randomPrivKeyBase58Check, + transactionSpendingLimit, + testRandomKeyVector, + )...) + + mineBlockAndVerifySignatures(allTxns) + } + + // Third scenario, there exists an authorize derived key entry and we're signing a basic transfer. + { + var allTxns []*MsgDeSoTxn + transactionSpendingLimit := &TransactionSpendingLimit{ + GlobalDESOLimit: 100, + TransactionCountLimitMap: make(map[TxnType]uint64), + CreatorCoinOperationLimitMap: make(map[CreatorCoinOperationLimitKey]uint64), + DAOCoinOperationLimitMap: make(map[DAOCoinOperationLimitKey]uint64), + NFTOperationLimitMap: make(map[NFTOperationLimitKey]uint64), + } + transactionSpendingLimit.TransactionCountLimitMap[TxnTypeBasicTransfer] = 2 + transactionSpendingLimit.TransactionCountLimitMap[TxnTypeAuthorizeDerivedKey] = 1 + + // First authorize the derived key. + derivedKeyTxn, derivedPriv := doDerivedKeyTransaction(transactionSpendingLimit) + derivedPrivBase58Check := Base58CheckEncode(derivedPriv.Serialize(), true, params) + _signTxn(t, derivedKeyTxn, senderPrivString) + allTxns = append(allTxns, derivedKeyTxn) + _, err = mempoolProcess(derivedKeyTxn) + require.NoError(err) + + // Sign the basic transfer with the sender's private key. + testMoneyOwnerVector := [3]RuleError{ + "", RuleErrorDerivedKeyNotAuthorized, RuleErrorDerivedKeyNotAuthorized, + } + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + createTransaction, + nil, + senderPrivString, + nil, + testMoneyOwnerVector, + )...) + + // Sign the basic transfer with the derived key. + testMoneyDerivedVector := [3]RuleError{ + RuleErrorInvalidTransactionSignature, "", "", + } + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + createTransaction, + nil, + derivedPrivBase58Check, + nil, + testMoneyDerivedVector, + )...) + + // Sign the basic transfer with a random private key. + testMoneyRandomVector := [3]RuleError{ + RuleErrorInvalidTransactionSignature, RuleErrorDerivedKeyNotAuthorized, RuleErrorDerivedKeyNotAuthorized, + } + randomPrivKey, err := btcec.NewPrivateKey(btcec.S256()) + require.NoError(err) + randomPrivKeyBase58Check := Base58CheckEncode(randomPrivKey.Serialize(), true, params) + + allTxns = append(allTxns, mempoolProcessAllSignatureCombinations( + createTransaction, + nil, + randomPrivKeyBase58Check, + nil, + testMoneyRandomVector, + )...) + + mineBlockAndVerifySignatures(allTxns) + } +} diff --git a/lib/block_view_types.go b/lib/block_view_types.go index c2606d37e..1fc251b2e 100644 --- a/lib/block_view_types.go +++ b/lib/block_view_types.go @@ -364,8 +364,8 @@ func MigrationTriggered(blockHeight uint64, migrationName MigrationName) bool { } } - panic(fmt.Sprintf("Problem finding a migration corresponding to migrationName (%v) "+ - "check your code!", migrationName)) + panic(any(fmt.Sprintf("Problem finding a migration corresponding to migrationName (%v) "+ + "check your code!", migrationName))) } // GetMigrationVersion can be returned in GetVersionByte when implementing DeSoEncoders. The way to do it is simply @@ -1996,6 +1996,10 @@ func (entry *MessagingGroupEntry) String() string { entry.GroupOwnerPublicKey, entry.MessagingPublicKey, entry.MessagingGroupKeyName, entry.isDeleted) } +func (entry *MessagingGroupEntry) IsDeleted() bool { + return entry.isDeleted +} + func sortMessagingGroupMembers(membersArg []*MessagingGroupMember) []*MessagingGroupMember { // Make a deep copy of the members to avoid messing up the slice the caller // used. Not doing this could cause downstream effects, mainly in tests where @@ -2590,7 +2594,7 @@ func (key *DerivedKeyEntry) RawEncodeWithoutMetadata(blockHeight uint64, skipMet data = append(data, EncodeExtraData(key.ExtraData)...) if key.TransactionSpendingLimitTracker != nil { data = append(data, BoolToByte(true)) - tslBytes, _ := key.TransactionSpendingLimitTracker.ToBytes() + tslBytes, _ := key.TransactionSpendingLimitTracker.ToBytes(blockHeight) data = append(data, tslBytes...) } else { data = append(data, BoolToByte(false)) @@ -2636,7 +2640,7 @@ func (key *DerivedKeyEntry) RawDecodeWithoutMetadata(blockHeight uint64, rr *byt if exists, err := ReadBoolByte(rr); exists && err == nil { key.TransactionSpendingLimitTracker = &TransactionSpendingLimit{} - err := key.TransactionSpendingLimitTracker.FromBytes(rr) + err := key.TransactionSpendingLimitTracker.FromBytes(blockHeight, rr) if err != nil { return errors.Wrapf(err, "DerivedKeyEntry.Decode: Problem decoding TransactionSpendingLimitTracker") } @@ -2653,7 +2657,7 @@ func (key *DerivedKeyEntry) RawDecodeWithoutMetadata(blockHeight uint64, rr *byt } func (key *DerivedKeyEntry) GetVersionByte(blockHeight uint64) byte { - return 0 + return GetMigrationVersion(blockHeight, UnlimitedDerivedKeysMigration) } func (key *DerivedKeyEntry) GetEncoderType() EncoderType { diff --git a/lib/block_view_types_test.go b/lib/block_view_types_test.go index e501ee9c0..0ee422b52 100644 --- a/lib/block_view_types_test.go +++ b/lib/block_view_types_test.go @@ -31,8 +31,18 @@ func TestRandomTypeEncoders(t *testing.T) { require := require.New(t) _ = require + // Make sure encoder migrations are not triggered yet. + for ii := range GlobalDeSoParams.EncoderMigrationHeightsList { + if GlobalDeSoParams.EncoderMigrationHeightsList[ii].Version == 0 { + continue + } + GlobalDeSoParams.EncoderMigrationHeightsList[ii].Height = 1 + } + encodeCases := _getAllDeSoEncoders(t) decodeCases := _getAllDeSoEncoders(t) + // Make sure the encoder migration for v3 messages is tested. + GlobalDeSoParams.ForkHeights = RegtestForkHeights for ii := range encodeCases { gofakeit.Struct(encodeCases[ii]) encodedBytes := EncodeToBytes(0, encodeCases[ii]) diff --git a/lib/blockchain.go b/lib/blockchain.go index 9a894e499..0df49703c 100644 --- a/lib/blockchain.go +++ b/lib/blockchain.go @@ -2050,6 +2050,8 @@ func (bc *Blockchain) ProcessBlock(desoBlock *MsgDeSoBlock, verifySignatures boo return false, false, errors.Wrapf(err, "ProcessBlock: Problem storing block after basic validation") } + // If we've already validated this block, there's no need to do that again. This in particular gets triggered in the + // archival mode, where we actually skip block validation altogether for historical blocks. if nodeToValidate.Status&StatusBlockValidated != 0 { return true, false, nil } diff --git a/lib/blockchain_test.go b/lib/blockchain_test.go index 8f5ab404e..10ea4120e 100644 --- a/lib/blockchain_test.go +++ b/lib/blockchain_test.go @@ -7,6 +7,7 @@ import ( "github.com/go-pg/pg/v10" "log" "math/big" + "math/rand" "os" "testing" "time" @@ -751,25 +752,41 @@ func _signTxn(t *testing.T, txn *MsgDeSoTxn, privKeyStrArg string) { privKey, _ := btcec.PrivKeyFromBytes(btcec.S256(), privKeyBytes) txnSignature, err := txn.Sign(privKey) require.NoError(err) - txn.Signature = txnSignature + txn.Signature.SetSignature(txnSignature) +} + +func _signTxnWithDerivedKey(t *testing.T, txn *MsgDeSoTxn, privKeyStrBase58Check string) { + signatureType := rand.Int() % 2 + _signTxnWithDerivedKeyAndType(t, txn, privKeyStrBase58Check, signatureType) } // Signs the transaction with a derived key. Transaction ExtraData contains the derived // public key, so that _verifySignature() knows transaction wasn't signed by the owner. -func _signTxnWithDerivedKey(t *testing.T, txn *MsgDeSoTxn, privKeyStrArg string) { +func _signTxnWithDerivedKeyAndType(t *testing.T, txn *MsgDeSoTxn, privKeyStrBase58Check string, signatureType int) { require := require.New(t) - privKeyBytes, _, err := Base58CheckDecode(privKeyStrArg) + privKeyBytes, _, err := Base58CheckDecode(privKeyStrBase58Check) require.NoError(err) privateKey, publicKey := btcec.PrivKeyFromBytes(btcec.S256(), privKeyBytes) - if txn.ExtraData == nil { - txn.ExtraData = make(map[string][]byte) - } - txn.ExtraData[DerivedPublicKey] = publicKey.SerializeCompressed() - txnSignature, err := txn.Sign(privateKey) - require.NoError(err) - txn.Signature = txnSignature + // We will randomly sign with the standard DER encoding + ExtraData, or with the DeSo-DER encoding. + if signatureType == 0 { + if txn.ExtraData == nil { + txn.ExtraData = make(map[string][]byte) + } + txn.ExtraData[DerivedPublicKey] = publicKey.SerializeCompressed() + txnSignature, err := txn.Sign(privateKey) + require.NoError(err) + txn.Signature.SetSignature(txnSignature) + } else { + txBytes, err := txn.ToBytes(true /*preSignature*/) + require.NoError(err) + txHash := Sha256DoubleHash(txBytes)[:] + + desoSignature, err := SignRecoverable(txHash, privateKey) + require.NoError(err) + txn.Signature = *desoSignature + } } func _assembleBasicTransferTxnFullySigned(t *testing.T, chain *Blockchain, diff --git a/lib/constants.go b/lib/constants.go index 667316fbd..759f131c0 100644 --- a/lib/constants.go +++ b/lib/constants.go @@ -4,6 +4,7 @@ import ( "encoding/hex" "fmt" "log" + "math" "math/big" "os" "path/filepath" @@ -248,6 +249,10 @@ type ForkHeights struct { // ParamUpdater to use a blockHeight-gated function rather than a constant. ParamUpdaterRefactorBlockHeight uint32 + // DeSoUnlimitedDerivedKeysBlockHeight defines the height at which + // we introduce derived keys without a spending limit. + DeSoUnlimitedDerivedKeysBlockHeight uint32 + // Be sure to update EncoderMigrationHeights as well via // GetEncoderMigrationHeights if you're modifying schema. } @@ -288,15 +293,16 @@ type ForkHeights struct { // if MigrationTriggered(blockHeight, UtxoEntryTestHeight) { // _, err = rr.ReadByte() // if err != nil { -// return errors.Wrapf(err, "UtxoEntry.Decode: Problem reading random byte") +// return errors.Wrapf(err, "UtxoEntry.Decode: Problem reading random byte.") // } // } // MAKE SURE TO WRITE CORRECT CONDITIONS FOR THE HEIGHTS IN BOTH ENCODE AND DECODE! // -// 3. Modify func (utxo *UtxoEntry) GetVersionByte to return the correct encoding version depending on the height. (Note +// 3. Modify func (utxo *UtxoEntry) GetVersionByte to return the correct encoding version depending on the height. Use the +// function GetMigrationVersion to chain encoder migrations (Note the variadic parameter of GetMigrationVersion and // the usage of the MigrationName UtxoEntryTestHeight) // -// return GetMigrationVersion(blockHeight, [UtxoEntryTestHeight]) +// return GetMigrationVersion(blockHeight, UtxoEntryTestHeight) // // That's it! type MigrationName string @@ -307,11 +313,15 @@ type MigrationHeight struct { } const ( - DefaultMigration MigrationName = "DefaultMigration" + DefaultMigration MigrationName = "DefaultMigration" + UnlimitedDerivedKeysMigration MigrationName = "UnlimitedDerivedKeysMigration" ) type EncoderMigrationHeights struct { DefaultMigration MigrationHeight + + // DeSoUnlimitedDerivedKeys coincides with the DeSoUnlimitedDerivedKeysBlockHeight block + DeSoUnlimitedDerivedKeys MigrationHeight } func GetEncoderMigrationHeights(forkHeights *ForkHeights) *EncoderMigrationHeights { @@ -321,6 +331,11 @@ func GetEncoderMigrationHeights(forkHeights *ForkHeights) *EncoderMigrationHeigh Height: forkHeights.DefaultHeight, Name: DefaultMigration, }, + DeSoUnlimitedDerivedKeys: MigrationHeight{ + Version: 1, + Height: uint64(forkHeights.DeSoUnlimitedDerivedKeysBlockHeight), + Name: UnlimitedDerivedKeysMigration, + }, } } func GetEncoderMigrationHeightsList(forkHeights *ForkHeights) ( @@ -559,6 +574,7 @@ var RegtestForkHeights = ForkHeights{ DerivedKeyEthSignatureCompatibilityBlockHeight: uint32(0), OrderBookDBFetchOptimizationBlockHeight: uint32(0), ParamUpdaterRefactorBlockHeight: uint32(0), + DeSoUnlimitedDerivedKeysBlockHeight: uint32(0), // Be sure to update EncoderMigrationHeights as well via // GetEncoderMigrationHeights if you're modifying schema. @@ -699,6 +715,9 @@ var MainnetForkHeights = ForkHeights{ ParamUpdaterRefactorBlockHeight: uint32(141193), + // TODO: ADD FINAL DATE & TIME HERE + DeSoUnlimitedDerivedKeysBlockHeight: uint32(math.MaxUint32), + // Be sure to update EncoderMigrationHeights as well via // GetEncoderMigrationHeights if you're modifying schema. } @@ -946,6 +965,9 @@ var TestnetForkHeights = ForkHeights{ ParamUpdaterRefactorBlockHeight: uint32(373536), + // TODO: ADD FINAL DATE & TIME HERE + DeSoUnlimitedDerivedKeysBlockHeight: uint32(math.MaxUint32), + // Be sure to update EncoderMigrationHeights as well via // GetEncoderMigrationHeights if you're modifying schema. } @@ -1166,6 +1188,9 @@ const ( // TransactionSpendingLimit TransactionSpendingLimitKey = "TransactionSpendingLimit" DerivedKeyMemoKey = "DerivedKeyMemo" + + // V3 Group Chat Messages ExtraData Key + MessagingGroupOperationType = "MessagingGroupOperationType" ) // Defines values that may exist in a transaction's ExtraData map diff --git a/lib/db_adapter.go b/lib/db_adapter.go index 82f5e0461..1e955af9a 100644 --- a/lib/db_adapter.go +++ b/lib/db_adapter.go @@ -10,6 +10,14 @@ type DbAdapter struct { snapshot *Snapshot } +func (bc *Blockchain) NewDbAdapter() *DbAdapter { + return &DbAdapter{ + badgerDb: bc.db, + postgresDb: bc.postgres, + snapshot: bc.snapshot, + } +} + func (bav *UtxoView) GetDbAdapter() *DbAdapter { snap := bav.Snapshot if bav.Postgres != nil { diff --git a/lib/db_utils.go b/lib/db_utils.go index 1cbdeb075..f823f8b01 100644 --- a/lib/db_utils.go +++ b/lib/db_utils.go @@ -329,7 +329,7 @@ type DBPrefixes struct { // In particular, this is used by the EncoderMigration service, and used to determine how to encode/decode db entries. func StatePrefixToDeSoEncoder(prefix []byte) (_isEncoder bool, _encoder DeSoEncoder) { if len(prefix) > MaxPrefixLen { - panic(fmt.Sprintf("Called with prefix longer than MaxPrefixLen, prefix: (%v), MaxPrefixLen: (%v)", prefix, MaxPrefixLen)) + panic(any(fmt.Sprintf("Called with prefix longer than MaxPrefixLen, prefix: (%v), MaxPrefixLen: (%v)", prefix, MaxPrefixLen))) } if bytes.Equal(prefix, Prefixes.PrefixUtxoKeyToUtxoEntry) { // prefix_id:"[5]" @@ -486,7 +486,7 @@ func StatePrefixToDeSoEncoder(prefix []byte) (_isEncoder bool, _encoder DeSoEnco func StateKeyToDeSoEncoder(key []byte) (_isEncoder bool, _encoder DeSoEncoder) { if MaxPrefixLen > 1 { - panic(fmt.Errorf("this function only works if MaxPrefixLen is 1 but currently MaxPrefixLen=(%v)", MaxPrefixLen)) + panic(any(fmt.Errorf("this function only works if MaxPrefixLen is 1 but currently MaxPrefixLen=(%v)", MaxPrefixLen))) } return StatePrefixToDeSoEncoder(key[:1]) } @@ -500,11 +500,11 @@ func getPrefixIdValue(structFields reflect.StructField, fieldType reflect.Type) ref.Elem().Set(reflect.MakeSlice(fieldType, 0, 0)) if value != "" && value != "[]" { if err := json.Unmarshal([]byte(value), ref.Interface()); err != nil { - panic(err) + panic(any(err)) } } } else { - panic(fmt.Errorf("prefix_id cannot be empty")) + panic(any(fmt.Errorf("prefix_id cannot be empty"))) } return ref.Elem() } @@ -554,13 +554,13 @@ func GetStatePrefixes() *DBStatePrefixes { prefixId := getPrefixIdValue(structFields.Field(i), prefixField.Type()) prefixBytes := prefixId.Bytes() if len(prefixBytes) > MaxPrefixLen { - panic(fmt.Errorf("prefix (%v) is longer than MaxPrefixLen: (%v)", - structFields.Field(i).Name, MaxPrefixLen)) + panic(any(fmt.Errorf("prefix (%v) is longer than MaxPrefixLen: (%v)", + structFields.Field(i).Name, MaxPrefixLen))) } prefix := prefixBytes[0] if statePrefixes.StatePrefixesMap[prefix] { - panic(fmt.Errorf("prefix (%v) already exists in StatePrefixesMap. You created a "+ - "prefix overlap, fix it", structFields.Field(i).Name)) + panic(any(fmt.Errorf("prefix (%v) already exists in StatePrefixesMap. You created a "+ + "prefix overlap, fix it", structFields.Field(i).Name))) } if structFields.Field(i).Tag.Get("is_state") == "true" { statePrefixes.StatePrefixesMap[prefix] = true @@ -590,7 +590,7 @@ func GetStatePrefixes() *DBStatePrefixes { // isStateKey checks if a key is a state-related key. func isStateKey(key []byte) bool { if MaxPrefixLen > 1 { - panic(fmt.Errorf("this function only works if MaxPrefixLen is 1 but currently MaxPrefixLen=(%v)", MaxPrefixLen)) + panic(any(fmt.Errorf("this function only works if MaxPrefixLen is 1 but currently MaxPrefixLen=(%v)", MaxPrefixLen))) } prefix := key[0] isState, exists := StatePrefixes.StatePrefixesMap[prefix] @@ -600,7 +600,7 @@ func isStateKey(key []byte) bool { // isTxIndexKey checks if a key is a txindex-related key. func isTxIndexKey(key []byte) bool { if MaxPrefixLen > 1 { - panic(fmt.Errorf("this function only works if MaxPrefixLen is 1 but currently MaxPrefixLen=(%v)", MaxPrefixLen)) + panic(any(fmt.Errorf("this function only works if MaxPrefixLen is 1 but currently MaxPrefixLen=(%v)", MaxPrefixLen))) } prefix := key[0] for _, txIndexPrefix := range StatePrefixes.TxIndexPrefixes { @@ -641,7 +641,7 @@ func EncodeKeyAndValueForChecksum(key []byte, value []byte, blockHeight uint64) } else if err != nil { glog.Errorf("Some odd problem: isEncoder %v encoder %v, key bytes (%v), value bytes (%v), blockHeight (%v)", isEncoder, encoder, key, checksumValue, blockHeight) - panic(errors.Wrapf(err, "EncodeKeyAndValueForChecksum: The schema is corrupted or value doesn't match the key")) + panic(any(errors.Wrapf(err, "EncodeKeyAndValueForChecksum: The schema is corrupted or value doesn't match the key"))) } } @@ -3771,6 +3771,40 @@ func InitDbWithDeSoGenesisBlock(params *DeSoParams, handle *badger.DB, return nil } +// GetBlockTipHeight fetches the current block tip height from the database. +func GetBlockTipHeight(handle *badger.DB, bitcoinNodes bool) (uint64, error) { + var blockHeight uint64 + prefix := _heightHashToNodeIndexPrefix(bitcoinNodes) + // Seek prefix will look for the block node with the largest block height. We populate the maximal possible + // uint32 and iterate backwards. + seekPrefix := append(prefix, []byte{0xff, 0xff, 0xff, 0xff}...) + + err := handle.View(func(txn *badger.Txn) error { + opts := badger.DefaultIteratorOptions + opts.Reverse = true + nodeIterator := txn.NewIterator(opts) + defer nodeIterator.Close() + + // Fetch a single blocknode and then return. + nodeIterator.Seek(seekPrefix) + if !nodeIterator.ValidForPrefix(prefix) { + return fmt.Errorf("No block nodes were found in the database") + } + + item := nodeIterator.Item() + err := item.Value(func(blockNodeBytes []byte) error { + blockNode, err := DeserializeBlockNode(blockNodeBytes) + if err != nil { + return err + } + blockHeight = uint64(blockNode.Height) + return nil + }) + return err + }) + return blockHeight, err +} + func GetBlockIndex(handle *badger.DB, bitcoinNodes bool) (map[BlockHash]*BlockNode, error) { blockIndex := make(map[BlockHash]*BlockNode) @@ -7197,7 +7231,9 @@ func DBGetAllProfilesByCoinValue(handle *badger.DB, snap *Snapshot, fetchEntries } // ===================================================================================== -// Coin balance entry code - Supports both creator coins and DAO coins +// +// Coin balance entry code - Supports both creator coins and DAO coins +// // ===================================================================================== func _dbGetPrefixForHODLerPKIDCreatorPKIDToBalanceEntry(isDAOCoin bool) []byte { if isDAOCoin { diff --git a/lib/errors.go b/lib/errors.go index 67eef8a21..afc3fbd5a 100644 --- a/lib/errors.go +++ b/lib/errors.go @@ -1,6 +1,8 @@ package lib import ( + "fmt" + "reflect" "strings" "github.com/btcsuite/btcd/btcec" @@ -292,6 +294,10 @@ const ( RuleErrorDerivedKeyNotAuthorized RuleError = "RuleErrorDerivedKeyNotAuthorized" RuleErrorDerivedKeyInvalidExtraData RuleError = "RuleErrorDerivedKeyInvalidExtraData" RuleErrorDerivedKeyBeforeBlockHeight RuleError = "RuleErrorDerivedKeyBeforeBlockHeight" + RuleErrorDerivedKeyHasBothExtraDataAndRecoveryId RuleError = "RuleErrorDerivedKeyHasBothExtraDataAndRecoveryId" + RuleErrorDerivedKeyInvalidRecoveryId RuleError = "RuleErrorDerivedKeyInvalidRecoveryId" + RuleErrorUnlimitedDerivedKeyBeforeBlockHeight RuleError = "RuleErrorUnlimitedDerivedKeyBeforeBlockHeight" + RuleErrorUnlimitedDerivedKeyNonEmptySpendingLimits RuleError = "RuleErrorUnlimitedDerivedKeyNonEmptySpendingLimits" // Messages RuleErrorMessagingPublicKeyCannotBeOwnerKey RuleError = "RuleErrorMessagingPublicKeyCannotBeOwnerKey" @@ -395,8 +401,8 @@ const ( RuleErrorOldToPublicKeyHasDeletedPKID RuleError = "RuleErrorOldToPublicKeyHasDeletedPKID" // Derived Key Transaction Spending Limits - RuleErrorDerivedKeyTxnTypeNotAuthorized RuleError = "RuleErrorTxnTypeNotAuthorized" - RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit RuleError = "RuleErrorTxnSpendsMoreThanGlobalDESOLimit" + RuleErrorDerivedKeyTxnTypeNotAuthorized RuleError = "RuleErrorDerivedKeyTxnTypeNotAuthorized" + RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit RuleError = "RuleErrorDerivedKeyTxnSpendsMoreThanGlobalDESOLimit" RuleErrorDerivedKeyInvalidCreatorCoinLimitOperation RuleError = "RuleErrorInvalidCreatorCoinLimitOperation" RuleErrorDerivedKeyInvalidDAOCoinLimitOperation RuleError = "RuleErrorInvalidDAOCoinLimitOperation" RuleErrorDerivedKeyNFTOperationNotAuthorized RuleError = "RuleErrorDerivedKeyNFTOperationNotAuthorized" @@ -453,3 +459,19 @@ func IsByteArrayValidPublicKey(bytes []byte) error { } return nil } + +// AssertDependencyStructFieldNumbers checks if a struct has a specified number of fields, otherwise it panics. The idea +// is to place this as an anti-bug feature that will detect changes to the code that poses a risk of causing a bug. +func AssertDependencyStructFieldNumbers[T any](obj T, num int) { + if GetNumberOfStructFields(obj) != num { + panic(any(fmt.Sprintf("AssertDependencyStructFieldNumbers: Struct type (%T), number of struct fields (%v), "+ + "expected (%v). This probably means that you've modified a dependency of this code but did not update the "+ + "code itself.", obj, GetNumberOfStructFields(obj), num, + ))) + } +} + +func GetNumberOfStructFields[T any](obj T) int { + objElements := reflect.ValueOf(obj).Elem() + return objElements.NumField() +} diff --git a/lib/network.go b/lib/network.go index f22186f2b..a35a0e7d4 100644 --- a/lib/network.go +++ b/lib/network.go @@ -8,15 +8,19 @@ import ( "encoding/hex" "encoding/json" "fmt" + "github.com/decred/dcrd/dcrec/secp256k1/v4" "io" "math" + "math/big" "net" "sort" + "strconv" "time" "github.com/btcsuite/btcd/btcec" "github.com/btcsuite/btcd/wire" "github.com/davecgh/go-spew/spew" + decredEC "github.com/decred/dcrd/dcrec/secp256k1/v4/ecdsa" merkletree "github.com/deso-protocol/go-merkle-tree" "github.com/ethereum/go-ethereum/crypto/ecies" "github.com/holiman/uint256" @@ -2513,6 +2517,202 @@ func (desoOutput *DeSoOutput) GetEncoderType() EncoderType { return EncoderTypeDeSoOutput } +const ( + // derSigMagicOffset is the first byte of the DER signature format. It's a hard-coded value defined as part of the + // DER encoding standard. + derSigMagicOffset = 0x30 + + // derSigMagicMaxRecoveryOffset is the maximal value of the DeSo-DER signature format. We enable public key recovery + // from ECDSA signatures. To facilitate this, we add the recovery id to the DER magic 0x30 first byte. The recovery id + // is in the range of [0, 3] and corresponds to the compact signature header magic. Adding recovery id to signature + // encoding is totally optional and leaving the first byte 0x30 is acceptable. Specifically, the DeSo-DER signatures + // have the following format: + // <0x30 + optionally (0x01 + recoveryId)> <0x02> 0x2 . + // At this point, a familiar reader might arrive at some malleability concerns. After all that's why bip-62 enforced + // DER signatures. ECDSA malleability is prevented by allowing public key recovery iff it was produced with a derived key. + // That is, signatures made with derived keys cannot start with 0x30, unless the underlying transaction has the + // derived public key in ExtraData. And if it does, then the header must be 0x30. + derSigMagicMaxRecoveryOffset = 0x34 +) + +// DeSoSignature is a wrapper around ECDSA signatures used primarily in the MsgDeSoTxn transaction type. +type DeSoSignature struct { + // Sign stores the main ECDSA signature. We use the btcec crypto package for most of the heavy-lifting. + Sign *btcec.Signature + + // RecoveryId is the public key recovery id. The RecoveryId is taken from the DeSo-DER signature header magic byte and + // must be in the [0, 3] range. + RecoveryId byte + // IsRecoverable indicates if the original signature contained the public key recovery id. + IsRecoverable bool +} + +func (desoSign *DeSoSignature) SetSignature(sign *btcec.Signature) { + desoSign.Sign = sign +} + +// Verify is a wrapper around DeSoSignature.Sign.Verify. +func (desoSign *DeSoSignature) Verify(hash []byte, pubKey *btcec.PublicKey) bool { + if desoSign.Sign == nil { + return false + } + return desoSign.Sign.Verify(hash, pubKey) +} + +// ToBytes encodes the signature in accordance to the DeSo-DER ECDSA format. +// <0x30 + optionally (0x01 + recoveryId)> <0x02> 0x2 . +func (desoSign *DeSoSignature) ToBytes() []byte { + // Serialize the signature using the DER encoding. + signatureBytes := desoSign.Sign.Serialize() + + // If the signature contains the recovery id, place it in the header magic in accordance with + // the DeSo-DER format. + if len(signatureBytes) > 0 && desoSign.IsRecoverable { + signatureBytes[0] += 0x01 + desoSign.RecoveryId + } + return signatureBytes +} + +// FromBytes parses the signature bytes encoded in accordance to the DeSo-DER ECDSA format. +func (desoSign *DeSoSignature) FromBytes(signatureBytes []byte) error { + // Signature cannot be an empty byte array. + if len(signatureBytes) == 0 { + return fmt.Errorf("FromBytes: Signature cannot be empty") + } + + // The first byte of the signature must be in the [0x30, 0x34] range. + if signatureBytes[0] < derSigMagicOffset || signatureBytes[0] > derSigMagicMaxRecoveryOffset { + return fmt.Errorf("FromBytes: DeSo-DER header magic expected in [%v, %v] range but got: %v", + derSigMagicOffset, derSigMagicMaxRecoveryOffset, signatureBytes[0]) + } + + // Copy the signature bytes to make so that we can freely modify it. + var err error + signatureBytesCopy := make([]byte, len(signatureBytes)) + copy(signatureBytesCopy, signatureBytes) + // If header magic contains the recovery Id, we will retrieve it. + if signatureBytes[0] > derSigMagicOffset { + // We subtract 1 because DeSo-DER header magic in this case is 0x30 + 0x01 + recoveryId + desoSign.RecoveryId = signatureBytes[0] - derSigMagicOffset - 0x01 + desoSign.IsRecoverable = true + // Now set the first byte as the standard DER header offset so that we can parse it with btcec. + signatureBytesCopy[0] = derSigMagicOffset + } + // Parse the signature assuming it's encoded in the standard DER format. + desoSign.Sign, err = btcec.ParseDERSignature(signatureBytesCopy, btcec.S256()) + if err != nil { + return errors.Wrapf(err, "Problem parsing signatureBytes") + } + return nil +} + +const ( + // See comment on _btcecSerializeCompact to better understand how these constants are used. + + // btcecCompactSigSize is the size of a btcec compact signature. It consists of a compact signature recovery code + // byte followed by the R and S components serialized as 32-byte big-endian values. 1+32*2 = 65 for the R and S + // components. 1+32+32=65. + btcecCompactSigSize byte = 65 + + // This is a magic offset that we need to implement the compact signature concept from btcec. + // + // btcecCompactSigMagicOffset is a value used when creating the compact signature recovery code inherited from Bitcoin and + // has no meaning, but has been retained for compatibility. For historical purposes, it was originally picked to avoid + // a binary representation that would allow compact signatures to be mistaken for other components. + btcecCompactSigMagicOffset byte = 27 + + // btcecCompactSigCompPubKey is a value used when creating the compact signature recovery code to indicate the original + // public key was compressed. + btcecCompactSigCompPubKey byte = 4 +) + +// The concept of a compact signature comes from btcec. It's a weird format that's different from standard DER +// encoding, but we use it because it allows us to leverage their RecoverCompact function. For some reason, btcec +// only implemented SignCompact() and RecoverCompact() but not SerializeCompact(). So, for our use-case, we +// implement the missing Serialize() function and then we call the following to recover the public key: +// - btcec.RecoverCompact(_btcecSerializeCompact(desoSignature)). +// +// _btcecSerializeCompact encodes the signature into the compact signature format: +// <1-byte compact sig recovery code><32-byte R><32-byte S> +// +// The compact sig recovery code is the value 27 + public key recovery ID + 4 +// if the compact signature was created with a compressed public key. +// Public key recovery ID is in the range [0, 3]. +func (desoSign *DeSoSignature) _btcecSerializeCompact() ([]byte, error) { + // We will change from the btcec signature type to the dcrec signature type. To achieve this, we will create the + // ecdsa (R, S) pair using the decred's package. + // Reference: https://github.com/decred/dcrd/blob/1eff7/dcrec/secp256k1/modnscalar_test.go#L26 + rBytes := desoSign.Sign.R.Bytes() + r := &secp256k1.ModNScalar{} + r.SetByteSlice(rBytes) + + sBytes := desoSign.Sign.S.Bytes() + s := &secp256k1.ModNScalar{} + s.SetByteSlice(sBytes) + + // To make sure the signature has been correctly parsed, we verify DER encoding of both signatures matches. + verifySignature := decredEC.NewSignature(r, s) + if !bytes.Equal(verifySignature.Serialize(), desoSign.Sign.Serialize()) { + return nil, fmt.Errorf("_btcecSerializeCompact: Problem sanity-checking signature") + } + + // Encode the signature using compact format. + // reference: https://github.com/decred/dcrd/blob/1eff7/dcrec/secp256k1/ecdsa/signature.go#L712 + compactSigRecoveryCode := btcecCompactSigMagicOffset + desoSign.RecoveryId + btcecCompactSigCompPubKey + + // Output <32-byte R><32-byte S>. + var b [btcecCompactSigSize]byte + b[0] = compactSigRecoveryCode + r.PutBytesUnchecked(b[1:33]) + s.PutBytesUnchecked(b[33:65]) + return b[:], nil +} + +// RecoverPublicKey attempts to retrieve the signer's public key from the DeSoSignature given the messageHash sha256x2 digest. +func (desoSign *DeSoSignature) RecoverPublicKey(messageHash []byte) (*btcec.PublicKey, error) { + // Serialize signature into the compact encoding. + signatureBytes, err := desoSign._btcecSerializeCompact() + if err != nil { + return nil, errors.Wrapf(err, "RecoverPublicKey: Problem serializing compact signature") + } + + // Now recover the public key from the compact encoding. + recoveredPublicKey, _, err := btcec.RecoverCompact(btcec.S256(), signatureBytes, messageHash) + if err != nil { + return nil, errors.Wrapf(err, "RecoverPublicKey: Problem recovering public key from the signature bytes") + } + + return recoveredPublicKey, nil +} + +// SignRecoverable computes a signature that adds a publicKeyRecoveryID to the first byte of a +// standard DER signature. We call the combination the DeSo-DER signature. +// +// Overall, it first computes a standard DER signature, and then it adds (0x01 + recoveryID) to +// the first byte. This makes it so that the first byte will be between [0x31, 0x34] inclusive, +// instead of being 0x30, which is the standard DER signature magic number. +func SignRecoverable(bb []byte, privateKey *btcec.PrivateKey) (*DeSoSignature, error) { + signature, err := privateKey.Sign(bb) + if err != nil { + return nil, err + } + + // We use SignCompact from the btcec library to get the recoverID. This results in a non-standard + // encoding that we need to manipulate in order to get the recoveryID back out. See comment on + // _btcecSerializeCompact for more information. + signatureCompact, err := btcec.SignCompact(btcec.S256(), privateKey, bb, true) + if err != nil { + return nil, err + } + recoveryId := (signatureCompact[0] - btcecCompactSigMagicOffset) & ^byte(btcecCompactSigCompPubKey) + + return &DeSoSignature{ + Sign: signature, + RecoveryId: recoveryId, + IsRecoverable: true, + }, nil +} + type MsgDeSoTxn struct { TxInputs []*DeSoInput TxOutputs []*DeSoOutput @@ -2541,7 +2741,7 @@ type MsgDeSoTxn struct { // inputs to the transaction. The exception to this rule is that // BLOCK_REWARD and CREATE_deso transactions do not require a signature // since they have no inputs. - Signature *btcec.Signature + Signature DeSoSignature // (!!) **DO_NOT_USE** (!!) // @@ -2631,8 +2831,8 @@ func (msg *MsgDeSoTxn) ToBytes(preSignature bool) ([]byte, error) { // a zero will be encoded for the length and no signature bytes will be added // beyond it. sigBytes := []byte{} - if !preSignature && msg.Signature != nil { - sigBytes = msg.Signature.Serialize() + if !preSignature && msg.Signature.Sign != nil { + sigBytes = msg.Signature.ToBytes() } // Note that even though we encode the length as a varint as opposed to a // fixed-width int, it should always take up just one byte since the length @@ -2757,7 +2957,7 @@ func _readTransaction(rr io.Reader) (*MsgDeSoTxn, error) { return nil, fmt.Errorf("_readTransaction.FromBytes: sigLen length %d longer than max %d", sigLen, MaxMessagePayload) } - ret.Signature = nil + ret.Signature.SetSignature(nil) if sigLen != 0 { sigBytes := make([]byte, sigLen) _, err = io.ReadFull(rr, sigBytes) @@ -2766,12 +2966,10 @@ func _readTransaction(rr io.Reader) (*MsgDeSoTxn, error) { } // Verify that the signature is valid. - sig, err := btcec.ParseDERSignature(sigBytes, btcec.S256()) + err := ret.Signature.FromBytes(sigBytes) if err != nil { return nil, errors.Wrapf(err, "_readTransaction: Problem parsing DeSoTxn.Signature bytes") } - // If everything worked, we set the ret signature to the original. - ret.Signature = sig } return ret, nil @@ -2936,7 +3134,7 @@ func (msg *MsgDeSoTxn) UnmarshalJSON(data []byte) error { TxOutputs []*DeSoOutput TxnMeta DeSoTxnMetadata PublicKey []byte - Signature *btcec.Signature + Signature DeSoSignature TxnType uint64 }{ TxInputs: msg.TxInputs, @@ -4772,9 +4970,172 @@ type TransactionSpendingLimit struct { // BuyingCreatorPKID || SellingCreatorPKID to number of // transactions DAOCoinLimitOrderLimitMap map[DAOCoinLimitOrderLimitKey]uint64 + + // ===== ENCODER MIGRATION UnlimitedDerivedKeysMigration ===== + // IsUnlimited field determines whether this derived key has no spending limit. + IsUnlimited bool +} + +// ToMetamaskString encodes the TransactionSpendingLimit into a Metamask-compatible string. The encoded string will +// be a part of Access Bytes Encoding 2.0 for derived keys, which creates a human-readable string that MM can sign. +// The idea behind this function is to create an injective mapping from the TransactionSpendingLimit -> string. +// This mapping is not intended to be invertible, rather we would also call this function while verifying access bytes. +// Basically, to verify signature on a derived key, we will call this function as well, instead of attempting to revert +// the metamask string. +func (tsl *TransactionSpendingLimit) ToMetamaskString(params *DeSoParams) string { + var str string + var indentationCounter int + + str += "Spending limits on the derived key:\n" + indentationCounter++ + + // GlobalDESOLimit + if tsl.GlobalDESOLimit > 0 { + str += _indt(indentationCounter) + "Total $DESO Limit: " + FormatScaledUint256AsDecimalString( + big.NewInt(0).SetUint64(tsl.GlobalDESOLimit), big.NewInt(int64(NanosPerUnit))) + " $DESO\n" + } + + // Sort an array of strings and add them to the spending limit string str. This will come in handy below, + // simplifying the construction of the metamask spending limit string. + sortStringsAndAddToLimitStr := func(strList []string) { + sort.Strings(strList) + for _, limitStr := range strList { + str += limitStr + } + } + + // TransactionCountLimitMap + if len(tsl.TransactionCountLimitMap) > 0 { + var txnCountStr []string + str += _indt(indentationCounter) + "Transaction Count Limit: \n" + indentationCounter++ + for txnType, limit := range tsl.TransactionCountLimitMap { + txnCountStr = append(txnCountStr, _indt(indentationCounter)+txnType.String()+": "+ + strconv.FormatUint(limit, 10)+"\n") + } + // Ensure deterministic ordering of the transaction count limit strings by doing a lexicographical sort. + sortStringsAndAddToLimitStr(txnCountStr) + indentationCounter-- + } + + // CreatorCoinOperationLimitMap + if len(tsl.CreatorCoinOperationLimitMap) > 0 { + var creatorCoinLimitStr []string + str += _indt(indentationCounter) + "Creator Coin Operation Limits:\n" + indentationCounter++ + for limitKey, limit := range tsl.CreatorCoinOperationLimitMap { + opString := _indt(indentationCounter) + "[\n" + + indentationCounter++ + opString += _indt(indentationCounter) + "Creator PKID: " + + Base58CheckEncode(limitKey.CreatorPKID.ToBytes(), false, params) + "\n" + opString += _indt(indentationCounter) + "Operation: " + + limitKey.Operation.ToString() + "\n" + opString += _indt(indentationCounter) + "Transaction Count: " + + strconv.FormatUint(limit, 10) + "\n" + indentationCounter-- + + opString += _indt(indentationCounter) + "]\n" + creatorCoinLimitStr = append(creatorCoinLimitStr, opString) + } + // Ensure deterministic ordering of the transaction count limit strings by doing a lexicographical sort. + sortStringsAndAddToLimitStr(creatorCoinLimitStr) + indentationCounter-- + } + + // DAOCoinOperationLimitMap + if len(tsl.DAOCoinOperationLimitMap) > 0 { + var daoCoinOperationLimitStr []string + str += _indt(indentationCounter) + "DAO Coin Operation Limits:\n" + indentationCounter++ + for limitKey, limit := range tsl.DAOCoinOperationLimitMap { + opString := _indt(indentationCounter) + "[\n" + + indentationCounter++ + opString += _indt(indentationCounter) + "Creator PKID: " + + Base58CheckEncode(limitKey.CreatorPKID.ToBytes(), false, params) + "\n" + opString += _indt(indentationCounter) + "Operation: " + + limitKey.Operation.ToString() + "\n" + opString += _indt(indentationCounter) + "Transaction Count: " + + strconv.FormatUint(limit, 10) + "\n" + indentationCounter-- + + opString += _indt(indentationCounter) + "]\n" + daoCoinOperationLimitStr = append(daoCoinOperationLimitStr, opString) + } + // Ensure deterministic ordering of the transaction count limit strings by doing a lexicographical sort. + sortStringsAndAddToLimitStr(daoCoinOperationLimitStr) + indentationCounter-- + } + + // NFTOperationLimitMap + if len(tsl.NFTOperationLimitMap) > 0 { + var nftOperationLimitKey []string + str += _indt(indentationCounter) + "NFT Operation Limits:\n" + indentationCounter++ + for limitKey, limit := range tsl.NFTOperationLimitMap { + opString := _indt(indentationCounter) + "[\n" + + indentationCounter++ + opString += _indt(indentationCounter) + "Block Hash: " + limitKey.BlockHash.String() + "\n" + opString += _indt(indentationCounter) + "Serial Number: " + + strconv.FormatUint(limitKey.SerialNumber, 10) + "\n" + opString += _indt(indentationCounter) + "Operation: " + + limitKey.Operation.ToString() + "\n" + opString += _indt(indentationCounter) + "Transaction Count: " + + strconv.FormatUint(limit, 10) + "\n" + indentationCounter-- + + opString += _indt(indentationCounter) + "]\n" + nftOperationLimitKey = append(nftOperationLimitKey, opString) + } + // Ensure deterministic ordering of the transaction count limit strings by doing a lexicographical sort. + sortStringsAndAddToLimitStr(nftOperationLimitKey) + indentationCounter-- + } + + // DAOCoinLimitOrderLimitMap + if len(tsl.DAOCoinLimitOrderLimitMap) > 0 { + var daoCoinLimitOrderStr []string + str += _indt(indentationCounter) + "DAO Coin Limit Order Restrictions:\n" + indentationCounter++ + for limitKey, limit := range tsl.DAOCoinLimitOrderLimitMap { + opString := _indt(indentationCounter) + "[\n" + + indentationCounter++ + opString += _indt(indentationCounter) + "Buying DAO Creator PKID: " + + Base58CheckEncode(limitKey.BuyingDAOCoinCreatorPKID.ToBytes(), false, params) + "\n" + opString += _indt(indentationCounter) + "Selling DAO Creator PKID: " + + Base58CheckEncode(limitKey.SellingDAOCoinCreatorPKID.ToBytes(), false, params) + "\n" + opString += _indt(indentationCounter) + "Transaction Count: " + + strconv.FormatUint(limit, 10) + "\n" + indentationCounter-- + + opString += _indt(indentationCounter) + "]\n" + daoCoinLimitOrderStr = append(daoCoinLimitOrderStr, opString) + } + // Ensure deterministic ordering of the transaction count limit strings by doing a lexicographical sort. + sortStringsAndAddToLimitStr(daoCoinLimitOrderStr) + indentationCounter-- + } + + // IsUnlimited + if tsl.IsUnlimited { + str += "Unlimited" + } + + return str } -func (tsl *TransactionSpendingLimit) ToBytes() ([]byte, error) { +func _indt(counter int) string { + var indentationString string + for ; counter > 0; counter-- { + indentationString += "\t" + } + return indentationString +} + +func (tsl *TransactionSpendingLimit) ToBytes(blockHeight uint64) ([]byte, error) { data := []byte{} if tsl == nil { @@ -4871,10 +5232,15 @@ func (tsl *TransactionSpendingLimit) ToBytes() ([]byte, error) { } } + // IsUnlimited, gated by the encoder migration. + if MigrationTriggered(blockHeight, UnlimitedDerivedKeysMigration) { + data = append(data, BoolToByte(tsl.IsUnlimited)) + } + return data, nil } -func (tsl *TransactionSpendingLimit) FromBytes(rr *bytes.Reader) error { +func (tsl *TransactionSpendingLimit) FromBytes(blockHeight uint64, rr *bytes.Reader) error { globalDESOLimit, err := ReadUvarint(rr) if err != nil { return err @@ -4995,6 +5361,14 @@ func (tsl *TransactionSpendingLimit) FromBytes(rr *bytes.Reader) error { tsl.DAOCoinLimitOrderLimitMap[*daoCoinLimitOrderLimitKey] = operationCount } } + + if MigrationTriggered(blockHeight, UnlimitedDerivedKeysMigration) { + tsl.IsUnlimited, err = ReadBoolByte(rr) + if err != nil { + return errors.Wrapf(err, "TransactionSpendingLimit.FromBytes: Problem reading IsUnlimited") + } + } + return nil } @@ -5006,6 +5380,7 @@ func (tsl *TransactionSpendingLimit) Copy() *TransactionSpendingLimit { DAOCoinOperationLimitMap: make(map[DAOCoinOperationLimitKey]uint64), NFTOperationLimitMap: make(map[NFTOperationLimitKey]uint64), DAOCoinLimitOrderLimitMap: make(map[DAOCoinLimitOrderLimitKey]uint64), + IsUnlimited: tsl.IsUnlimited, } for txnType, txnCount := range tsl.TransactionCountLimitMap { @@ -5031,6 +5406,26 @@ func (tsl *TransactionSpendingLimit) Copy() *TransactionSpendingLimit { return copyTSL } +func (bav *UtxoView) CheckIfValidUnlimitedSpendingLimit(tsl *TransactionSpendingLimit, blockHeight uint32) (_isUnlimited bool, _err error) { + AssertDependencyStructFieldNumbers(&TransactionSpendingLimit{}, 7) + + if tsl.IsUnlimited && blockHeight < bav.Params.ForkHeights.DeSoUnlimitedDerivedKeysBlockHeight { + return false, RuleErrorUnlimitedDerivedKeyBeforeBlockHeight + } + + if tsl.IsUnlimited && (tsl.GlobalDESOLimit > 0 || + len(tsl.TransactionCountLimitMap) > 0 || + len(tsl.CreatorCoinOperationLimitMap) > 0 || + len(tsl.DAOCoinOperationLimitMap) > 0 || + len(tsl.NFTOperationLimitMap) > 0 || + len(tsl.DAOCoinLimitOrderLimitMap) > 0) { + + return tsl.IsUnlimited, RuleErrorUnlimitedDerivedKeyNonEmptySpendingLimits + } + + return tsl.IsUnlimited, nil +} + type NFTLimitOperation uint8 const ( diff --git a/lib/network_test.go b/lib/network_test.go index a5653705e..3a26f78de 100644 --- a/lib/network_test.go +++ b/lib/network_test.go @@ -4,7 +4,10 @@ import ( "bytes" "encoding/hex" "github.com/holiman/uint256" + "math/big" + "math/rand" "reflect" + "strconv" "strings" "testing" "time" @@ -1294,3 +1297,283 @@ func TestDecodeBlockVersion0(t *testing.T) { require.Equal(expectedBytes, blockBytes) } + +// This test will test determinism and correctness of TransactionSpendingLimit.ToMetamaskString(). +func TestSpendingLimitMetamaskString(t *testing.T) { + require := require.New(t) + _ = require + + // Number of operations to choose from during tests. The following fields should reflect the upper bound on + // the corresponding TransactionSpendingLimit fields. + maxTxnType := 26 + maxCreatorCoinLimitOperation := 4 + maxDAOCoinLimitOperation := 6 + maxNFTLimitOperation := 7 + + // Number of random operations to generate for each field. + testOperationCount := 2 + + // We test different configurations of TransactionSpendingLimit fields. + // Generate a random GlobalDESOLimit field. + _populateTotalDESOLimit := func() uint64 { + return rand.Uint64() + } + // Generate a random TransactionCountLimitMap field. + _populateTransactionCountLimitMap := func(operationCount int) map[TxnType]uint64 { + operationMap := make(map[TxnType]uint64) + + var indexList []byte + for ii := 0; ii < maxTxnType; ii++ { + indexList = append(indexList, byte(ii)) + } + rand.Shuffle(len(indexList), func(i, j int) { + temp := indexList[i] + indexList[i] = indexList[j] + indexList[j] = temp + }) + + if operationCount > maxTxnType { + operationCount = maxTxnType + } + for ii := 0; ii < operationCount; ii++ { + txnTyp := TxnType(indexList[ii]) + operationMap[txnTyp] = rand.Uint64() + } + return operationMap + } + // Generate a random TransactionCountLimitMap field. + _populateCreatorCoinOperationLimitMap := func(operationCount int) map[CreatorCoinOperationLimitKey]uint64 { + operationMap := make(map[CreatorCoinOperationLimitKey]uint64) + + for ; operationCount > 0; operationCount-- { + randomCreatorCoinOperationKey := CreatorCoinOperationLimitKey{ + CreatorPKID: *NewPKID(RandomBytes(int32(PublicKeyLenCompressed))), + Operation: CreatorCoinLimitOperation(uint8(rand.Int()%maxCreatorCoinLimitOperation + 1)), + } + operationMap[randomCreatorCoinOperationKey] = rand.Uint64() + } + return operationMap + } + // Generate a random DAOCoinOperationLimitMap field. + _populateDAOCoinOperationLimitMap := func(operationCount int) map[DAOCoinOperationLimitKey]uint64 { + operationMap := make(map[DAOCoinOperationLimitKey]uint64) + + for ; operationCount > 0; operationCount-- { + randomDAOCoinOperationKey := DAOCoinOperationLimitKey{ + CreatorPKID: *NewPKID(RandomBytes(int32(PublicKeyLenCompressed))), + Operation: DAOCoinLimitOperation(uint8(rand.Int()%maxDAOCoinLimitOperation + 1)), + } + operationMap[randomDAOCoinOperationKey] = rand.Uint64() + } + return operationMap + } + // Generate a random NFTOperationLimitMap field. + _populateNFTOperationLimitKey := func(operationCount int) map[NFTOperationLimitKey]uint64 { + operationMap := make(map[NFTOperationLimitKey]uint64) + + for ; operationCount > 0; operationCount-- { + randomNFTOperationKey := NFTOperationLimitKey{ + BlockHash: *NewBlockHash(RandomBytes(HashSizeBytes)), + SerialNumber: rand.Uint64(), + Operation: NFTLimitOperation(uint8(rand.Int()%maxNFTLimitOperation + 1)), + } + operationMap[randomNFTOperationKey] = rand.Uint64() + } + return operationMap + } + // Generate a random DAOCoinLimitOrderLimitMap field. + _populateDAOCoinLimitOrderLimitMap := func(operationCount int) map[DAOCoinLimitOrderLimitKey]uint64 { + operationMap := make(map[DAOCoinLimitOrderLimitKey]uint64) + + for ; operationCount > 0; operationCount-- { + randomDAOLimitOperation := DAOCoinLimitOrderLimitKey{ + BuyingDAOCoinCreatorPKID: *NewPKID(RandomBytes(int32(PublicKeyLenCompressed))), + SellingDAOCoinCreatorPKID: *NewPKID(RandomBytes(int32(PublicKeyLenCompressed))), + } + operationMap[randomDAOLimitOperation] = rand.Uint64() + } + return operationMap + } + + // Test encoding of all possible combinations of TransactionSpendingLimit fields. + _runTestOnSpendingLimit := func(spendingLimit *TransactionSpendingLimit, params *DeSoParams) bool { + return spendingLimit.ToMetamaskString(params) == spendingLimit.ToMetamaskString(params) + } + + // Do the binomial sum trick 2^n = \sum^n_{i=0} (n choose i) + for ii := 0; ii < 1<<(reflect.ValueOf(TransactionSpendingLimit{}).Type().NumField()); ii++ { + spendingLimit := TransactionSpendingLimit{} + if ii&(1<<0) > 0 { + spendingLimit.GlobalDESOLimit = _populateTotalDESOLimit() + } + if ii&(1<<1) > 0 { + spendingLimit.TransactionCountLimitMap = _populateTransactionCountLimitMap(testOperationCount) + } + if ii&(1<<2) > 0 { + spendingLimit.CreatorCoinOperationLimitMap = _populateCreatorCoinOperationLimitMap(testOperationCount) + } + if ii&(1<<3) > 0 { + spendingLimit.DAOCoinOperationLimitMap = _populateDAOCoinOperationLimitMap(testOperationCount) + } + if ii&(1<<4) > 0 { + spendingLimit.NFTOperationLimitMap = _populateNFTOperationLimitKey(testOperationCount) + } + if ii&(1<<5) > 0 { + spendingLimit.DAOCoinLimitOrderLimitMap = _populateDAOCoinLimitOrderLimitMap(testOperationCount) + } + // Make sure the encoding is deterministic. + require.Equal(true, _runTestOnSpendingLimit(&spendingLimit, &DeSoTestnetParams)) + require.Equal(true, _runTestOnSpendingLimit(&spendingLimit, &DeSoMainnetParams)) + + // Make sure the encoding contains all the spending limit fields + _verifyEncodingCorrectness := func(tsl *TransactionSpendingLimit, params *DeSoParams) bool { + encoding := spendingLimit.ToMetamaskString(params) + if tsl.GlobalDESOLimit > 0 { + if !strings.Contains(encoding, FormatScaledUint256AsDecimalString( + big.NewInt(0).SetUint64(tsl.GlobalDESOLimit), big.NewInt(int64(NanosPerUnit)))) { + return false + } + } + if len(tsl.TransactionCountLimitMap) > 0 { + for txnType, limit := range tsl.TransactionCountLimitMap { + if !strings.Contains(encoding, txnType.String()) { + return false + } + if !strings.Contains(encoding, strconv.FormatUint(limit, 10)) { + return false + } + } + } + if len(tsl.CreatorCoinOperationLimitMap) > 0 { + for limitKey, limit := range tsl.CreatorCoinOperationLimitMap { + if !strings.Contains(encoding, Base58CheckEncode(limitKey.CreatorPKID.ToBytes(), false, params)) { + return false + } + if !strings.Contains(encoding, limitKey.Operation.ToString()) { + return false + } + if !strings.Contains(encoding, strconv.FormatUint(limit, 10)) { + return false + } + } + } + if len(tsl.DAOCoinOperationLimitMap) > 0 { + for limitKey, limit := range tsl.DAOCoinOperationLimitMap { + if !strings.Contains(encoding, Base58CheckEncode(limitKey.CreatorPKID.ToBytes(), false, params)) { + return false + } + if !strings.Contains(encoding, limitKey.Operation.ToString()) { + return false + } + if !strings.Contains(encoding, strconv.FormatUint(limit, 10)) { + return false + } + } + } + if len(tsl.NFTOperationLimitMap) > 0 { + for limitKey, limit := range tsl.NFTOperationLimitMap { + if !strings.Contains(encoding, limitKey.BlockHash.String()) { + return false + } + if !strings.Contains(encoding, strconv.FormatUint(limitKey.SerialNumber, 10)) { + return false + } + if !strings.Contains(encoding, limitKey.Operation.ToString()) { + return false + } + if !strings.Contains(encoding, strconv.FormatUint(limit, 10)) { + return false + } + } + } + if len(tsl.DAOCoinLimitOrderLimitMap) > 0 { + for limitKey, limit := range tsl.DAOCoinLimitOrderLimitMap { + if !strings.Contains(encoding, Base58CheckEncode(limitKey.BuyingDAOCoinCreatorPKID.ToBytes(), false, params)) { + return false + } + if !strings.Contains(encoding, Base58CheckEncode(limitKey.SellingDAOCoinCreatorPKID.ToBytes(), false, params)) { + return false + } + if !strings.Contains(encoding, strconv.FormatUint(limit, 10)) { + return false + } + } + } + return true + } + require.Equal(true, _verifyEncodingCorrectness(&spendingLimit, &DeSoTestnetParams)) + require.Equal(true, _verifyEncodingCorrectness(&spendingLimit, &DeSoMainnetParams)) + } +} + +// Test encoding of unlimited derived key spending limits. +func TestUnlimitedSpendingLimitMetamaskEncoding(t *testing.T) { + require := require.New(t) + + // Set the blockheights for encoder migration. + GlobalDeSoParams = DeSoTestnetParams + GlobalDeSoParams.ForkHeights.DeSoUnlimitedDerivedKeysBlockHeight = 0 + for ii := range GlobalDeSoParams.EncoderMigrationHeightsList { + GlobalDeSoParams.EncoderMigrationHeightsList[ii].Height = 0 + } + + // Encode the spending limit with just the IsUnlimited field. + spendingLimit := &TransactionSpendingLimit{ + IsUnlimited: true, + } + + // Test the spending limit encoding using the standard scheme. + spendingLimitBytes, err := spendingLimit.ToBytes(1) + require.NoError(err) + require.Equal(true, reflect.DeepEqual(spendingLimitBytes, []byte{0, 0, 0, 0, 0, 0, 1})) + + // Test the spending limit encoding using the metamask scheme. + require.Equal(true, reflect.DeepEqual( + "Spending limits on the derived key:\nUnlimited", + spendingLimit.ToMetamaskString(&GlobalDeSoParams), + )) +} + +// Verify that DeSoSignature.SerializeCompact correctly encodes the signature into compact format. +func TestDeSoSignature_SerializeCompact(t *testing.T) { + require := require.New(t) + _ = require + + // Number of test cases. In each test case we generate a new signer private key. + numTestCases := 100 + // Number of messages signed for each signer private key. + numIterations := 10 + + for ; numTestCases > 0; numTestCases-- { + // Generate a random (private, public) keypair. + privateKey, err := btcec.NewPrivateKey(btcec.S256()) + require.NoError(err) + publicKeyBytes := privateKey.PubKey().SerializeCompressed() + + for iter := 0; iter < numIterations; iter++ { + // Generate a random message and sign it. + message := RandomBytes(10) + messageHash := Sha256DoubleHash(message)[:] + desoSignature, err := SignRecoverable(messageHash, privateKey) + require.NoError(err) + + // Verify that the compact signature is equal to what we serialized. + signatureCompact, err := btcec.SignCompact(btcec.S256(), privateKey, messageHash, true) + require.NoError(err) + + // Use the DeSoSignature.SerializeCompact encoding. + signatureCompactCustom, err := desoSignature._btcecSerializeCompact() + require.NoError(err) + // Make sure the btcec and our custom encoding are identical. + require.Equal(true, reflect.DeepEqual(signatureCompact, signatureCompactCustom)) + + // Recover the public key from our custom encoding. + recoveredPublicKey, _, err := btcec.RecoverCompact(btcec.S256(), signatureCompactCustom, messageHash) + require.NoError(err) + + // Verify that the recovered public key matches the original public key. + recoveredPublicKeyBytes := recoveredPublicKey.SerializeCompressed() + require.Equal(true, reflect.DeepEqual(publicKeyBytes, recoveredPublicKeyBytes)) + } + } +} diff --git a/lib/postgres.go b/lib/postgres.go index 427b68511..29dcf7a34 100644 --- a/lib/postgres.go +++ b/lib/postgres.go @@ -118,13 +118,15 @@ type PGBlock struct { type PGTransaction struct { tableName struct{} `pg:"pg_transactions"` - Hash *BlockHash `pg:",pk,type:bytea"` - BlockHash *BlockHash `pg:",type:bytea"` - Type TxnType `pg:",use_zero"` - PublicKey []byte `pg:",type:bytea"` - ExtraData map[string][]byte - R *BlockHash `pg:",type:bytea"` - S *BlockHash `pg:",type:bytea"` + Hash *BlockHash `pg:",pk,type:bytea"` + BlockHash *BlockHash `pg:",type:bytea"` + Type TxnType `pg:",use_zero"` + PublicKey []byte `pg:",type:bytea"` + ExtraData map[string][]byte + R *BlockHash `pg:",type:bytea"` + S *BlockHash `pg:",type:bytea"` + RecoveryId uint32 `pg:",use_zero"` + IsRecoverable bool `pg:",use_zero"` // Relationships Outputs []*PGTransactionOutput `pg:"rel:has-many,join_fk:output_hash"` @@ -885,6 +887,7 @@ type PGDerivedKey struct { // TransactionSpendingLimit fields TransactionSpendingLimitTracker []byte `pg:",type:bytea"` Memo []byte `pg:",type:bytea"` + BlockHeight uint64 `pg:",use_zero"` } func (key *PGDerivedKey) NewDerivedKeyEntry() *DerivedKeyEntry { @@ -894,7 +897,7 @@ func (key *PGDerivedKey) NewDerivedKeyEntry() *DerivedKeyEntry { var tsl *TransactionSpendingLimit if len(key.TransactionSpendingLimitTracker) > 0 { tsl = &TransactionSpendingLimit{} - if err := tsl.FromBytes(bytes.NewReader(key.TransactionSpendingLimitTracker)); err != nil { + if err := tsl.FromBytes(key.BlockHeight, bytes.NewReader(key.TransactionSpendingLimitTracker)); err != nil { glog.Errorf("Error converting Derived Key's TransactionLimitTracker bytes back into a TransactionSpendingLimit: %v", err) return nil } @@ -1060,9 +1063,11 @@ func (postgres *Postgres) InsertTransactionsTx(tx *pg.Tx, desoTxns []*MsgDeSoTxn ExtraData: txn.ExtraData, } - if txn.Signature != nil { - transaction.R = BigintToHash(txn.Signature.R) - transaction.S = BigintToHash(txn.Signature.S) + if txn.Signature.Sign != nil { + transaction.R = BigintToHash(txn.Signature.Sign.R) + transaction.S = BigintToHash(txn.Signature.Sign.S) + transaction.RecoveryId = uint32(txn.Signature.RecoveryId) + transaction.IsRecoverable = txn.Signature.IsRecoverable } transactions = append(transactions, transaction) @@ -1545,11 +1550,22 @@ func (postgres *Postgres) UpsertBlockAndTransactions(blockNode *BlockNode, desoB }) } +func (postgres *Postgres) GetTransactionByHash(txnHash *BlockHash) *PGTransaction { + txn := PGTransaction{ + Hash: txnHash, + } + err := postgres.db.Model(&txn).WherePK().Select() + if err != nil { + return nil + } + return &txn +} + // // BlockView Flushing // -func (postgres *Postgres) FlushView(view *UtxoView) error { +func (postgres *Postgres) FlushView(view *UtxoView, blockHeight uint64) error { return postgres.db.RunInTransaction(postgres.db.Context(), func(tx *pg.Tx) error { if err := postgres.flushUtxos(tx, view); err != nil { return err @@ -1590,7 +1606,7 @@ func (postgres *Postgres) FlushView(view *UtxoView) error { if err := postgres.flushNFTBids(tx, view); err != nil { return err } - if err := postgres.flushDerivedKeys(tx, view); err != nil { + if err := postgres.flushDerivedKeys(tx, view, blockHeight); err != nil { return err } // Temporarily write limit orders to badger @@ -2147,11 +2163,12 @@ func (postgres *Postgres) flushNFTBids(tx *pg.Tx, view *UtxoView) error { return nil } -func (postgres *Postgres) flushDerivedKeys(tx *pg.Tx, view *UtxoView) error { +func (postgres *Postgres) flushDerivedKeys(tx *pg.Tx, view *UtxoView, blockHeight uint64) error { var insertKeys []*PGDerivedKey var deleteKeys []*PGDerivedKey + for _, keyEntry := range view.DerivedKeyToDerivedEntry { - tslBytes, err := keyEntry.TransactionSpendingLimitTracker.ToBytes() + tslBytes, err := keyEntry.TransactionSpendingLimitTracker.ToBytes(blockHeight) if err != nil { return err } @@ -2160,8 +2177,10 @@ func (postgres *Postgres) flushDerivedKeys(tx *pg.Tx, view *UtxoView) error { DerivedPublicKey: keyEntry.DerivedPublicKey, ExpirationBlock: keyEntry.ExpirationBlock, OperationType: keyEntry.OperationType, + ExtraData: keyEntry.ExtraData, TransactionSpendingLimitTracker: tslBytes, Memo: keyEntry.Memo, + BlockHeight: blockHeight, } if keyEntry.isDeleted { diff --git a/lib/server.go b/lib/server.go index 48d13a2eb..f90369567 100644 --- a/lib/server.go +++ b/lib/server.go @@ -53,6 +53,7 @@ type Server struct { cmgr *ConnectionManager blockchain *Blockchain snapshot *Snapshot + forceChecksum bool mempool *DeSoMempool miner *DeSoMiner blockProducer *DeSoBlockProducer @@ -364,7 +365,9 @@ func NewServer( _trustedBlockProducerStartHeight uint64, eventManager *EventManager, _nodeMessageChan chan NodeMessage, -) (_srv *Server, _err error, _shouldRestart bool) { + _forceChecksum bool) ( + _srv *Server, _err error, _shouldRestart bool) { + var err error // Setup snapshot @@ -392,6 +395,7 @@ func NewServer( IgnoreInboundPeerInvMessages: _ignoreInboundPeerInvMessages, snapshot: _snapshot, nodeMessageChannel: _nodeMessageChan, + forceChecksum: _forceChecksum, } // The same timesource is used in the chain data structure and in the connection @@ -623,7 +627,7 @@ func (srv *Server) GetSnapshot(pp *Peer) { // If peer isn't assigned to any prefix, we will assign him now. if !syncingPrefix { - // We will assign the peer to a non-existing prefix. + // We will assign the peer to a non-existent prefix. for _, prefix = range StatePrefixes.StatePrefixesList { exists := false for _, prefixProgress := range srv.HyperSyncProgress.PrefixProgress { @@ -1298,16 +1302,29 @@ func (srv *Server) _handleSnapshot(pp *Peer, msg *MsgDeSoSnapshotData) { if err != nil { glog.Errorf("Server._handleSnapshot: Problem getting checksum bytes, error (%v)", err) } - if !reflect.DeepEqual(checksumBytes, srv.HyperSyncProgress.SnapshotMetadata.CurrentEpochChecksumBytes) { - if srv.nodeMessageChannel != nil { - srv.nodeMessageChannel <- NodeErase - } + if reflect.DeepEqual(checksumBytes, srv.HyperSyncProgress.SnapshotMetadata.CurrentEpochChecksumBytes) { + glog.Infof(CLog(Green, fmt.Sprintf("Server._handleSnapshot: State checksum matched "+ + "what was expected!"))) + } else { + // Checksums didn't match glog.Errorf(CLog(Red, fmt.Sprintf("Server._handleSnapshot: The final db checksum doesn't match the "+ "checksum received from the peer. It is likely that HyperSync encountered some unexpected error earlier. "+ "You should report this as an issue on DeSo github https://github.com/deso-protocol/core. It is also possible "+ "that the peer is misbehaving and sent invalid snapshot chunks. In either way, we'll restart the node and "+ - "attempt to HyperSync from the beginning."))) - return + "attempt to HyperSync from the beginning. Local db checksum %v; peer's snapshot checksum %v", + checksumBytes, srv.HyperSyncProgress.SnapshotMetadata.CurrentEpochChecksumBytes))) + if srv.forceChecksum { + // If forceChecksum is true we signal an erasure of the state and return here, + // which will cut off the sync. + if srv.nodeMessageChannel != nil { + srv.nodeMessageChannel <- NodeErase + } + return + } else { + // Otherwise, if forceChecksum is false, we error but then keep going. + glog.Errorf(CLog(Yellow, fmt.Sprintf("Server._handleSnapshot: Ignoring checksum mismatch because "+ + "--force-checksum is set to false."))) + } } // After syncing state from a snapshot, we will sync remaining blocks. To do so, we will @@ -1363,8 +1380,9 @@ func (srv *Server) _handleSnapshot(pp *Peer, msg *MsgDeSoSnapshotData) { srv.snapshot.Status.CurrentBlockHeight = msg.SnapshotMetadata.SnapshotBlockHeight srv.snapshot.Status.SaveStatus() - glog.Infof("server._handleSnapshot: FINAL snapshot checksum is (%v)", - srv.snapshot.CurrentEpochSnapshotMetadata.CurrentEpochChecksumBytes) + glog.Infof("server._handleSnapshot: FINAL snapshot checksum is (%v) (%v)", + srv.snapshot.CurrentEpochSnapshotMetadata.CurrentEpochChecksumBytes, + hex.EncodeToString(srv.snapshot.CurrentEpochSnapshotMetadata.CurrentEpochChecksumBytes)) // Take care of any callbacks that need to run once the snapshot is completed. srv.eventManager.snapshotCompleted() @@ -1397,7 +1415,7 @@ func (srv *Server) _startSync() { var bestPeer *Peer for _, peer := range srv.cmgr.GetAllPeers() { if !peer.IsSyncCandidate() { - glog.Infof("Peer is not sync candidate: %v", peer) + glog.Infof("Peer is not sync candidate: %v (isOutbound: %v)", peer, peer.isOutbound) continue } @@ -1462,7 +1480,7 @@ func (srv *Server) _handleNewPeer(pp *Peer) { srv._startSync() } if !isSyncCandidate { - glog.Infof("Peer is not sync candidate: %v", pp) + glog.Infof("Peer is not sync candidate: %v (isOutbound: %v)", pp, pp.isOutbound) } } diff --git a/lib/snapshot.go b/lib/snapshot.go index 60756e30b..04d2f9288 100644 --- a/lib/snapshot.go +++ b/lib/snapshot.go @@ -14,6 +14,7 @@ import ( "github.com/oleiade/lane" "github.com/pkg/errors" "golang.org/x/sync/semaphore" + "math" "path/filepath" "reflect" "runtime" @@ -780,7 +781,7 @@ func (snap *Snapshot) GetAncestralRecordsKeyWithTxn(txn *badger.Txn, key []byte, // DBSetAncestralRecordWithTxn sets a record corresponding to our ExistingRecordsMap. // We append a []byte{1} to the end to indicate that this is an existing record, and -// we append a []byte{0} to the end to indicate that this is a NON-existing record. We +// we append a []byte{0} to the end to indicate that this is a NON-existent record. We // need to create this distinction to tell the difference between a record that was // updated to have an *empty* value vs a record that was deleted entirely. func (snap *Snapshot) DBSetAncestralRecordWithTxn( @@ -1627,8 +1628,8 @@ type AncestralRecordValue struct { // main db flushes and ancestral records flushes. For each main db flush transaction, we // will build an ancestral cache that contains maps of historical values that were in the // main db before we flushed. In particular, we distinguish between existing and -// non-existing records. Existing records are those records that had already been present -// in the main db prior to the flush. Non-existing records were not present in the main db, +// non-existent records. Existing records are those records that had already been present +// in the main db prior to the flush. Non-existent records were not present in the main db, // and the flush added them for the first time. // // The AncestralCache is stored in the Snapshot struct in a concurrency-safe deque (bi-directional @@ -2048,8 +2049,7 @@ func (migration *EncoderMigration) Initialize(mainDb *badger.DB, snapshotDb *bad // sanity-check that node has the same "version" of migration version map. exists := false for _, migrationHeight := range params.EncoderMigrationHeightsList { - if migrationChecksum.BlockHeight == migrationHeight.Height && - migrationChecksum.Version == migrationHeight.Version { + if migrationChecksum.Version == migrationHeight.Version { exists = true } } @@ -2071,10 +2071,15 @@ func (migration *EncoderMigration) Initialize(mainDb *badger.DB, snapshotDb *bad // Check if there are any outstanding migrations apart from the migrations we've saved in the db. // If so, add them to the migrationChecksums. for _, migrationHeight := range params.EncoderMigrationHeightsList { - if migrationHeight.Height > blockHeight { + // We ignore migrations with height equal to the max because these migrations + // will have their block heights modified to a rational value before they're + // supposed to trigger, and thus we should not store them in the db. + if migrationHeight.Height != math.MaxUint32 && + migrationHeight.Height > blockHeight { exists := false for _, migrationChecksum := range migration.migrationChecksums { - if migrationChecksum.BlockHeight == migrationHeight.Height { + // If we already have a migration with the same version in our migrationChecksums, we set exists to true. + if migrationChecksum.Version == migrationHeight.Version { exists = true break } diff --git a/lib/txindex.go b/lib/txindex.go index a40d1a846..1d1090588 100644 --- a/lib/txindex.go +++ b/lib/txindex.go @@ -34,6 +34,7 @@ type TXIndex struct { // Shutdown channel stopUpdateChannel chan struct{} + killed bool } func NewTXIndex(coreChain *Blockchain, params *DeSoParams, dataDirectory string) ( @@ -143,6 +144,7 @@ func NewTXIndex(coreChain *Blockchain, params *DeSoParams, dataDirectory string) CoreChain: coreChain, Params: params, stopUpdateChannel: make(chan struct{}), + killed: false, }, nil } @@ -191,6 +193,7 @@ func (txi *TXIndex) Start() { func (txi *TXIndex) Stop() { glog.Info("TXIndex: Stopping updates and closing database") + txi.killed = true txi.stopUpdateChannel <- struct{}{} txi.updateWaitGroup.Wait() } @@ -281,6 +284,10 @@ func (txi *TXIndex) Update() error { // For each of the blocks we're removing, delete the transactions from // the transaction index. for _, blockToDetach := range detachBlocks { + if txi.killed { + glog.Infof(CLog(Yellow, "TxIndex: Update: Killed while detaching blocks")) + break + } // Go through each txn in the block and delete its mappings from our // txindex. glog.V(1).Infof("Update: Detaching block (height: %d, hash: %v)", @@ -370,6 +377,10 @@ func (txi *TXIndex) Update() error { // and add their mappings to our txn index. Compute any metadata that might // be useful. for _, blockToAttach := range attachBlocks { + if txi.killed { + glog.Infof(CLog(Yellow, "TxIndex: Update: Killed while attaching blocks")) + break + } if blockToAttach.Height%1 == 0 { glog.Infof("Update: Txindex progress: block %d / %d", blockToAttach.Height, blockTipNode.Height) diff --git a/lib/utils.go b/lib/utils.go index 4b606351e..bfe8aaa35 100644 --- a/lib/utils.go +++ b/lib/utils.go @@ -6,6 +6,8 @@ import ( "github.com/btcsuite/btcd/chaincfg" "github.com/btcsuite/btcutil/hdkeychain" "github.com/unrolled/secure" + "math/big" + "strings" ) const SECURE_MIDDLEWARE_RESTRICTIVE_CONTENT_SECURITY_POLICY = "default-src 'self'" @@ -122,3 +124,40 @@ func ComputeKeysFromSeedWithNet(seedBytes []byte, index uint32, isTestnet bool) return pubKey, privKey, btcDepositAddress, nil } + +func GetNumDigits(val *big.Int) int { + quotient := big.NewInt(0).Set(val) + zero := big.NewInt(0) + ten := big.NewInt(10) + numDigits := 0 + for quotient.Cmp(zero) != 0 { + numDigits += 1 + quotient.Div(quotient, ten) + } + return numDigits +} + +// Given a value v that is a scaled uint256 with the provided scaling factor, this prints the decimal representation +// of v as a string +// Ex: if v = 12345 and scalingFactor = 100, then this outputs 123.45 +func FormatScaledUint256AsDecimalString(v *big.Int, scalingFactor *big.Int) string { + wholeNumber := big.NewInt(0).Div(v, scalingFactor) + decimalPart := big.NewInt(0).Mod(v, scalingFactor) + + decimalPartIsZero := decimalPart.Cmp(big.NewInt(0)) == 0 + + scalingFactorDigits := GetNumDigits(scalingFactor) + decimalPartAsString := fmt.Sprintf("%d", decimalPart) + + // Left pad the decimal part with zeros + if !decimalPartIsZero && len(decimalPartAsString) != scalingFactorDigits { + decimalLeadingZeros := strings.Repeat("0", scalingFactorDigits-len(decimalPartAsString)-1) + decimalPartAsString = fmt.Sprintf("%v%v", decimalLeadingZeros, decimalPartAsString) + } + + // Trim trailing zeros + if !decimalPartIsZero { + decimalPartAsString = strings.TrimRight(decimalPartAsString, "0") + } + return fmt.Sprintf("%d.%v", wholeNumber, decimalPartAsString) +} diff --git a/migrate/20220711054349_recoverable_signatures_and_unlimited_derived_keys.go b/migrate/20220711054349_recoverable_signatures_and_unlimited_derived_keys.go new file mode 100644 index 000000000..948606b4e --- /dev/null +++ b/migrate/20220711054349_recoverable_signatures_and_unlimited_derived_keys.go @@ -0,0 +1,50 @@ +package migrate + +import ( + "github.com/go-pg/pg/v10/orm" + migrations "github.com/robinjoseph08/go-pg-migrations/v3" +) + +func init() { + up := func(db orm.DB) error { + if _, err := db.Exec(` + ALTER TABLE pg_derived_keys + ADD COLUMN block_height BIGINT; + `); err != nil { + return err + } + + if _, err := db.Exec(` + ALTER TABLE pg_transactions + ADD COLUMN recovery_id INT, + ADD COLUMN is_recoverable BOOL; + `); err != nil { + return err + } + + return nil + } + + down := func(db orm.DB) error { + if _, err := db.Exec(` + ALTER TABLE pg_derived_keys + DROP COLUMN block_height; + `); err != nil { + return err + } + + if _, err := db.Exec(` + ALTER TABLE pg_transactions + DROP COLUMN recovery_id, + DROP COLUMN is_recoverable; + `); err != nil { + return err + } + + return nil + } + + opts := migrations.MigrationOptions{} + + migrations.Register("20220711054349_recoverable_signatures_and_unlimited_derived_keys", up, down, opts) +} From 1b2170e8d4655a1b8cc2391e4084ee9b77b1cee0 Mon Sep 17 00:00:00 2001 From: Lazy Nina <81658138+lazynina@users.noreply.github.com> Date: Mon, 12 Sep 2022 15:52:54 -0400 Subject: [PATCH 06/11] Set fork heights for DeSoUnlimitedDerivedKeysBlockHeight (#401) --- lib/constants.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/constants.go b/lib/constants.go index 759f131c0..98c9cac60 100644 --- a/lib/constants.go +++ b/lib/constants.go @@ -4,7 +4,6 @@ import ( "encoding/hex" "fmt" "log" - "math" "math/big" "os" "path/filepath" @@ -715,8 +714,8 @@ var MainnetForkHeights = ForkHeights{ ParamUpdaterRefactorBlockHeight: uint32(141193), - // TODO: ADD FINAL DATE & TIME HERE - DeSoUnlimitedDerivedKeysBlockHeight: uint32(math.MaxUint32), + // Mon Sept 19 @ 12pm PST + DeSoUnlimitedDerivedKeysBlockHeight: uint32(166066), // Be sure to update EncoderMigrationHeights as well via // GetEncoderMigrationHeights if you're modifying schema. @@ -965,8 +964,8 @@ var TestnetForkHeights = ForkHeights{ ParamUpdaterRefactorBlockHeight: uint32(373536), - // TODO: ADD FINAL DATE & TIME HERE - DeSoUnlimitedDerivedKeysBlockHeight: uint32(math.MaxUint32), + // Tues Sept 13 @ 10am PT + DeSoUnlimitedDerivedKeysBlockHeight: uint32(467217), // Be sure to update EncoderMigrationHeights as well via // GetEncoderMigrationHeights if you're modifying schema. From 6ab58918eb29d0c18df8ac19565c3a8ca39f9aa7 Mon Sep 17 00:00:00 2001 From: lazynina Date: Mon, 12 Sep 2022 15:57:26 -0400 Subject: [PATCH 07/11] [stable] Release 2.2.7 From 9cd60876e3e7a47b1fa0bf3e47f4a44408b515df Mon Sep 17 00:00:00 2001 From: lazynina Date: Mon, 12 Sep 2022 15:58:17 -0400 Subject: [PATCH 08/11] [stable] Release 3.0.0 From def66beb63ac7bd64148c12cef38bd47cbff854a Mon Sep 17 00:00:00 2001 From: Piotr Nojszewski <29924594+AeonSw4n@users.noreply.github.com> Date: Tue, 13 Sep 2022 16:00:39 -0700 Subject: [PATCH 09/11] Additional persisting of state and migration checksums (#402) * Additional persisting of state and migration checksums * make public --- lib/miner.go | 2 +- lib/snapshot.go | 47 ++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/lib/miner.go b/lib/miner.go index 9a0b866a1..b7fa22a85 100644 --- a/lib/miner.go +++ b/lib/miner.go @@ -124,7 +124,7 @@ func (desoMiner *DeSoMiner) _mineSingleBlock(threadIndex uint32) (_diffTarget *B // Compute a few hashes before checking if we've solved the block. timeBefore := time.Now() bestHash, bestNonce, err := FindLowestHash(header, desoMiner.params.MiningIterationsPerCycle) - glog.V(2).Infof("DeSoMiner._startThread: Time per iteration: %v", time.Since(timeBefore)) + glog.V(3).Infof("DeSoMiner._startThread: Time per iteration: %v", time.Since(timeBefore)) if err != nil { // If there's an error just log it and break out. glog.Error(errors.Wrapf(err, "DeSoMiner._startThread: Problem while mining: ")) diff --git a/lib/snapshot.go b/lib/snapshot.go index 04d2f9288..7e8bd4905 100644 --- a/lib/snapshot.go +++ b/lib/snapshot.go @@ -181,7 +181,8 @@ func NewSnapshot(mainDb *badger.DB, mainDbDirectory string, snapshotBlockHeightP } operationChannel := &SnapshotOperationChannel{} - if err := operationChannel.Initialize(snapshotDb, &snapshotDbMutex); err != nil { + // Initialize the SnapshotOperationChannel. We don't set any of the handlers yet because we don't have a snapshot instance yet. + if err := operationChannel.Initialize(snapshotDb, &snapshotDbMutex, nil, nil); err != nil { return nil, errors.Wrapf(err, "NewSnapshot: Problem reading SnapshotOperationChannel"), true } @@ -242,6 +243,8 @@ func NewSnapshot(mainDb *badger.DB, mainDbDirectory string, snapshotBlockHeightP timer: timer, ExitChannel: make(chan bool), } + // Now we will set the handler for finishing all operations in the operation channel. + snap.OperationChannel.SetFinishAllOperationsHandler(snap.PersistChecksumAndMigration) // Run the snapshot main loop. go snap.Run() @@ -443,6 +446,16 @@ func (snap *Snapshot) StartAncestralRecordsFlush(shouldIncrement bool) { }) } +func (snap *Snapshot) PersistChecksumAndMigration() error { + if err := snap.Checksum.SaveChecksum(); err != nil { + return errors.Wrapf(err, "PersistChecksumAndMigration: Problem saving checksum") + } + if err := snap.Migrations.SaveMigrations(); err != nil { + return errors.Wrapf(err, "PersistChecksumAndMigration: Problem saving migrations") + } + return nil +} + func (snap *Snapshot) PrintChecksum(text string) { snap.OperationChannel.EnqueueOperation(&SnapshotOperation{ operationType: SnapshotOperationChecksumPrint, @@ -1723,15 +1736,22 @@ type SnapshotOperationChannel struct { snapshotDb *badger.DB snapshotDbMutex *sync.Mutex + + startOperationHandler func(op *SnapshotOperation) error + finishAllOperationsHandler func() error } -func (opChan *SnapshotOperationChannel) Initialize(snapshotDb *badger.DB, snapshotDbMutex *sync.Mutex) error { +func (opChan *SnapshotOperationChannel) Initialize(snapshotDb *badger.DB, snapshotDbMutex *sync.Mutex, + startOperationHandler func(op *SnapshotOperation) error, finishAllOperationsHandler func() error) error { opChan.OperationChannel = make(chan *SnapshotOperation, 100000) opChan.StateSemaphore = 0 opChan.snapshotDb = snapshotDb opChan.snapshotDbMutex = snapshotDbMutex + opChan.startOperationHandler = startOperationHandler + opChan.finishAllOperationsHandler = finishAllOperationsHandler + if snapshotDb == nil || snapshotDbMutex == nil { opChan.snapshotDbMutex = &sync.Mutex{} return nil @@ -1762,6 +1782,14 @@ func (opChan *SnapshotOperationChannel) Initialize(snapshotDb *badger.DB, snapsh return nil } +func (opChan *SnapshotOperationChannel) SetStartOperationHandler(handler func(op *SnapshotOperation) error) { + opChan.startOperationHandler = handler +} + +func (opChan *SnapshotOperationChannel) SetFinishAllOperationsHandler(handler func() error) { + opChan.finishAllOperationsHandler = handler +} + func (opChan *SnapshotOperationChannel) SaveOperationChannel() error { opChan.snapshotDbMutex.Lock() defer opChan.snapshotDbMutex.Unlock() @@ -1785,7 +1813,14 @@ func (opChan *SnapshotOperationChannel) EnqueueOperation(op *SnapshotOperation) } func (opChan *SnapshotOperationChannel) DequeueOperationStateless() *SnapshotOperation { - return <-opChan.OperationChannel + op := <-opChan.OperationChannel + if opChan.startOperationHandler != nil { + if err := opChan.startOperationHandler(op); err != nil { + glog.Errorf("SnapshotOperationChannel.DequeueOperationStateless: Problem executing startOperationHandler "+ + "on operation (%v), error (%v)", op, err) + } + } + return op } func (opChan *SnapshotOperationChannel) FinishOperation() { @@ -1797,6 +1832,12 @@ func (opChan *SnapshotOperationChannel) FinishOperation() { if err := opChan.SaveOperationChannel(); err != nil { glog.Errorf("SnapshotOperationChannel.FinishOperation: Problem saving StateSemaphore to db, error (%v)", err) } + // We will invoke the external finishAllOperationsHandler if it is set. + if opChan.finishAllOperationsHandler != nil { + if err := opChan.finishAllOperationsHandler(); err != nil { + glog.Errorf("SnapshotOperationChannel.FinishOperation: Problem executing finishAllOperationsHandler, error (%v)", err) + } + } } } From fe9413705936af25fe91dc100e1a94a0922dd83f Mon Sep 17 00:00:00 2001 From: lazynina Date: Tue, 13 Sep 2022 19:14:29 -0400 Subject: [PATCH 10/11] [stable] Release 3.0.1 From c41575424b21548595ebbf1c57f0a2e587df9d72 Mon Sep 17 00:00:00 2001 From: MichelMajdalani Date: Sun, 21 Aug 2022 19:06:37 -0700 Subject: [PATCH 11/11] WIP: add DBAdapter logic --- go.sum | 2 + lib/block_view.go | 24 ++ lib/block_view_flush.go | 50 +++ lib/block_view_reaction.go | 287 +++++++++++++ lib/block_view_reaction_test.go | 720 ++++++++++++++++++++++++++++++++ lib/block_view_types.go | 70 +++- lib/blockchain.go | 65 ++- lib/db_adapter.go | 37 ++ lib/db_utils.go | 227 +++++++++- lib/errors.go | 4 + lib/mempool.go | 28 ++ lib/network.go | 99 ++++- lib/network_test.go | 58 +++ lib/notifier.go | 15 + lib/postgres.go | 106 +++++ 15 files changed, 1783 insertions(+), 9 deletions(-) create mode 100644 lib/block_view_reaction.go create mode 100644 lib/block_view_reaction_test.go diff --git a/go.sum b/go.sum index 0a34a3666..76adb6f21 100644 --- a/go.sum +++ b/go.sum @@ -642,6 +642,8 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 h1:Hir2P/De0WpUhtrKGGjvSb2YxUgyZ7EFOSLIcSSpiwE= diff --git a/lib/block_view.go b/lib/block_view.go index 147840688..a4f062a78 100644 --- a/lib/block_view.go +++ b/lib/block_view.go @@ -64,6 +64,9 @@ type UtxoView struct { // Like data LikeKeyToLikeEntry map[LikeKey]*LikeEntry + // React data + ReactionKeyToReactionEntry map[ReactionKey]*ReactionEntry + // Repost data RepostKeyToRepostEntry map[RepostKey]*RepostEntry @@ -145,6 +148,9 @@ func (bav *UtxoView) _ResetViewMappingsAfterFlush() { // Like data bav.LikeKeyToLikeEntry = make(map[LikeKey]*LikeEntry) + // React data + bav.ReactionKeyToReactionEntry = make(map[ReactionKey]*ReactionEntry) + // Repost data bav.RepostKeyToRepostEntry = make(map[RepostKey]*RepostEntry) @@ -281,6 +287,16 @@ func (bav *UtxoView) CopyUtxoView() (*UtxoView, error) { newView.LikeKeyToLikeEntry[likeKey] = &newLikeEntry } + // Copy the react data + newView.ReactionKeyToReactionEntry = make(map[ReactionKey]*ReactionEntry, len(bav.ReactionKeyToReactionEntry)) + for reactKey, reactEntry := range bav.ReactionKeyToReactionEntry { + if reactEntry == nil { + continue + } + newReactEntry := *reactEntry + newView.ReactionKeyToReactionEntry[reactKey] = &newReactEntry + } + // Copy the repost data newView.RepostKeyToRepostEntry = make(map[RepostKey]*RepostEntry, len(bav.RepostKeyToRepostEntry)) for repostKey, repostEntry := range bav.RepostKeyToRepostEntry { @@ -948,6 +964,10 @@ func (bav *UtxoView) DisconnectTransaction(currentTxn *MsgDeSoTxn, txnHash *Bloc return bav._disconnectLike( OperationTypeLike, currentTxn, txnHash, utxoOpsForTxn, blockHeight) + } else if currentTxn.TxnMeta.GetTxnType() == TxnTypeReact { + return bav._disconnectReact( + OperationTypeReact, currentTxn, txnHash, utxoOpsForTxn, blockHeight) + } else if currentTxn.TxnMeta.GetTxnType() == TxnTypeCreatorCoin { return bav._disconnectCreatorCoin( OperationTypeCreatorCoin, currentTxn, txnHash, utxoOpsForTxn, blockHeight) @@ -2310,6 +2330,10 @@ func (bav *UtxoView) _connectTransaction(txn *MsgDeSoTxn, txHash *BlockHash, totalInput, totalOutput, utxoOpsForTxn, err = bav._connectLike(txn, txHash, blockHeight, verifySignatures) + } else if txn.TxnMeta.GetTxnType() == TxnTypeReact { + totalInput, totalOutput, utxoOpsForTxn, err = + bav._connectReact(txn, txHash, blockHeight, verifySignatures) + } else if txn.TxnMeta.GetTxnType() == TxnTypeCreatorCoin { totalInput, totalOutput, utxoOpsForTxn, err = bav._connectCreatorCoin( diff --git a/lib/block_view_flush.go b/lib/block_view_flush.go index bf804cfc9..194aae826 100644 --- a/lib/block_view_flush.go +++ b/lib/block_view_flush.go @@ -66,6 +66,9 @@ func (bav *UtxoView) FlushToDbWithTxn(txn *badger.Txn, blockHeight uint64) error if err := bav._flushLikeEntriesToDbWithTxn(txn); err != nil { return err } + if err := bav._flushReactEntriesToDbWithTxn(txn); err != nil { + return err + } if err := bav._flushFollowEntriesToDbWithTxn(txn); err != nil { return err } @@ -411,6 +414,53 @@ func (bav *UtxoView) _flushLikeEntriesToDbWithTxn(txn *badger.Txn) error { return nil } +func (bav *UtxoView) _flushReactEntriesToDbWithTxn(txn *badger.Txn) error { + + // Go through all the entries in the ReactionKeyToReactionEntry map. + for reactKeyIter, reactEntry := range bav.ReactionKeyToReactionEntry { + // Make a copy of the iterator since we make references to it below. + reactKey := reactKeyIter + + // Sanity-check that the ReactKey computed from the ReactEntry is + // equal to the ReactKey that maps to that entry. + reactKeyInEntry := MakeReactionKey(reactEntry.ReactorPubKey, *reactEntry.ReactedPostHash, reactEntry.ReactEmoji) + if reactKeyInEntry != reactKey { + return fmt.Errorf("_flushReactEntriesToDbWithTxn: ReactEntry has "+ + "ReactKey: %v, which doesn't match the ReactKeyToReactEntry map key %v", + &reactKeyInEntry, &reactKey) + } + + // Delete the existing mappings in the db for this ReactKey. They will be re-added + // if the corresponding entry in memory has isDeleted=false. + if err := DbDeleteReactMappingsWithTxn( + txn, reactKey.ReactorPubKey[:], reactKey.ReactedPostHash, reactKey.ReactEmoji); err != nil { + + return errors.Wrapf( + err, "_flushReactEntriesToDbWithTxn: Problem deleting mappings "+ + "for LikeKey: %v: ", &reactKey) + } + } + + // Go through all the entries in the ReactionKeyToReactionEntry map. + for _, reactEntry := range bav.ReactionKeyToReactionEntry { + + if reactEntry.isDeleted { + // If the ReactEntry has isDeleted=true then there's nothing to do because + // we already deleted the entry above. + } else { + // If the ReactEntry has (isDeleted = false) then we put the corresponding + // mappings for it into the db. + if err := DbPutReactMappingsWithTxn( + txn, reactEntry.ReactorPubKey, *reactEntry.ReactedPostHash, reactEntry.ReactEmoji); err != nil { + + return err + } + } + } + + return nil +} + func (bav *UtxoView) _flushFollowEntriesToDbWithTxn(txn *badger.Txn) error { // Go through all the entries in the FollowKeyToFollowEntry map. diff --git a/lib/block_view_reaction.go b/lib/block_view_reaction.go new file mode 100644 index 000000000..b38a1f063 --- /dev/null +++ b/lib/block_view_reaction.go @@ -0,0 +1,287 @@ +package lib + +import ( + "bytes" + "fmt" + "github.com/golang/glog" + "github.com/pkg/errors" + "reflect" +) + +func (bav *UtxoView) _getReactionEntryForReactionKey(reactionKey *ReactionKey) *ReactionEntry { + // If an entry exists in the in-memory map, return the value of that mapping. + mapValue, existsMapValue := bav.ReactionKeyToReactionEntry[*reactionKey] + if existsMapValue { + return mapValue + } + + adapter := bav.GetDbAdapter() + + // If we get here it means no value exists in our in-memory map. In this case, + // defer to the db. If a mapping exists in the db, return it. If not, return + // nil. Either way, save the value to the in-memory view mapping got later. + reactionExists := false + if adapter.postgresDb != nil { + reactionExists = adapter.postgresDb.GetReaction(reactionKey.ReactorPubKey[:], &reactionKey.ReactedPostHash, reactionKey.ReactEmoji) != nil + } else { + reactionExists = DbGetReactorPubKeyToPostHashMapping(adapter.badgerDb, reactionKey.ReactorPubKey[:], reactionKey.ReactedPostHash, reactionKey.ReactEmoji) != nil + } + + if reactionExists { + reactionEntry := ReactionEntry{ + ReactorPubKey: reactionKey.ReactorPubKey[:], + ReactedPostHash: &reactionKey.ReactedPostHash, + ReactEmoji: reactionKey.ReactEmoji, + } + bav._setReactionEntryMappings(&reactionEntry) + return &reactionEntry + } + + return nil +} + +func (bav *UtxoView) _setReactionEntryMappings(reactionEntry *ReactionEntry) { + // This function shouldn't be called with nil. + if reactionEntry == nil { + glog.Errorf("_setReactionEntryMappings: Called with nil ReactionEntry; " + + "this should never happen.") + return + } + + reactionKey := MakeReactionKey(reactionEntry.ReactorPubKey, *reactionEntry.ReactedPostHash, reactionEntry.ReactEmoji) + bav.ReactionKeyToReactionEntry[reactionKey] = reactionEntry +} + +func (bav *UtxoView) _deleteReactionEntryMappings(reactionEntry *ReactionEntry) { + + // Create a tombstone entry. + tombstoneReactionEntry := *reactionEntry + tombstoneReactionEntry.isDeleted = true + + // Set the mappings to point to the tombstone entry. + bav._setReactionEntryMappings(&tombstoneReactionEntry) +} + +func (bav *UtxoView) GetReactionByReader(readerPK []byte, postHash *BlockHash, reactEmoji rune) bool { + // Get react state. + reactionKey := MakeReactionKey(readerPK, *postHash, reactEmoji) + reactionEntry := bav._getReactionEntryForReactionKey(&reactionKey) + return reactionEntry != nil && !reactionEntry.isDeleted +} + +func (bav *UtxoView) GetReactorsForPostHash(postHash *BlockHash, reactionEmoji rune) (_ReactorPubKeys [][]byte, _err error) { + // Returns the public key of the users that reacted to a post with a specific emoji reaction + adapter := bav.GetDbAdapter() + + reactions, _ := adapter.GetReactionsForPost(postHash, reactionEmoji) + + for _, reaction := range reactions { + reactKey := MakeReactionKey(reaction.ReactorPubKey, *reaction.ReactedPostHash, reactionEmoji) + if _, exists := bav.ReactionKeyToReactionEntry[reactKey]; !exists { + bav._setReactionEntryMappings(reaction) + } + } + + // Iterate over the view and create the final list to return + // because there might be values in the view that weren't propagated to the db. + var reactorPubKeys [][]byte + for _, reactionEntry := range bav.ReactionKeyToReactionEntry { + if !reactionEntry.isDeleted && bytes.Equal(reactionEntry.ReactedPostHash[:], postHash[:]) { + reactorPubKeys = append(reactorPubKeys, reactionEntry.ReactorPubKey) + } + } + + return reactorPubKeys, nil +} + +func (bav *UtxoView) _connectReact( + txn *MsgDeSoTxn, txHash *BlockHash, blockHeight uint32, verifySignatures bool) ( + _totalInput uint64, _totalOutput uint64, _utxoOps []*UtxoOperation, _err error) { + //TODO (Michel) Add a block height restriction before doing anything else in this function. You can see a fork height in constants.go + + // Check that the transaction has the right TxnType. + if txn.TxnMeta.GetTxnType() != TxnTypeReact { + return 0, 0, nil, fmt.Errorf("_connectReact: called with bad TxnType %s", + txn.TxnMeta.GetTxnType().String()) + } + txMeta := txn.TxnMeta.(*ReactMetadata) + + // Connect basic txn to get the total input and the total output without + // considering the transaction metadata. + totalInput, totalOutput, utxoOpsForTxn, err := bav._connectBasicTransfer( + txn, txHash, blockHeight, verifySignatures) + if err != nil { + return 0, 0, nil, errors.Wrapf(err, "_connectReact: ") + } + + if verifySignatures { + // _connectBasicTransfer has already checked that the transaction is + // signed by the top-level public key, which we take to be the sender's + // public key so there is no need to verify anything further. + } + + // At this point the inputs and outputs have been processed. Now we need to handle + // the metadata. + + // There are two main checks that need to be done before allowing a reaction: + // - Check that the post exists + // - Check that the person hasn't already reacted with the same emoji + + //TODO (Michel) Validate that EmojiReaction is a valid rune for an emoji before proceeding. + + // Check that the post to react actually exists. + existingPostEntry := bav.GetPostEntryForPostHash(txMeta.PostHash) + if existingPostEntry == nil || existingPostEntry.isDeleted { + return 0, 0, nil, errors.Wrapf( + RuleErrorCannotReactNonexistentPost, + "_connectReact: Post hash: %v", txMeta.PostHash) + } + + // At this point the code diverges and considers the react flows differently + // since the presence of an existing react entry has a different effect in either case. + + reactionKey := MakeReactionKey(txn.PublicKey, *txMeta.PostHash, txMeta.EmojiReaction) + existingReactEntry := bav._getReactionEntryForReactionKey(&reactionKey) + // We don't need to make a copy of the post entry because all we're modifying is the emoji counts, + // which isn't stored in any of our mappings. But we make a copy here just because it's a little bit + // more foolproof. + updatedPostEntry := *existingPostEntry + + if txMeta.IsRemove { + // Ensure that there *is* an existing emoji entry to delete. + if existingReactEntry == nil || existingReactEntry.isDeleted { + return 0, 0, nil, errors.Wrapf( + RuleErrorCannotRemoveReactionWithoutAnExistingReaction, + "_connectReact: React key: %v", &reactionKey) + } + + // Now that we know there is a react entry, we delete it and decrement the emoji count. + bav._deleteReactionEntryMappings(existingReactEntry) + updatedPostEntry.EmojiCount[txMeta.EmojiReaction] -= 1 + } else { + // Ensure that there *is not* an existing react entry. + if existingReactEntry != nil && !existingReactEntry.isDeleted { + return 0, 0, nil, errors.Wrapf( + RuleErrorReactEntryAlreadyExists, + "_connectReact: Like key: %v", &reactionKey) + } + + // Now that we know there is no pre-existing reactentry, we can create one and + // increment the react s on the react d post. + reactEntry := &ReactionEntry{ + ReactorPubKey: txn.PublicKey, + ReactedPostHash: txMeta.PostHash, + ReactEmoji: txMeta.EmojiReaction, + } + bav._setReactionEntryMappings(reactEntry) + if updatedPostEntry.EmojiCount == nil { + updatedPostEntry.EmojiCount = make(map[rune]uint64) + } + updatedPostEntry.EmojiCount[txMeta.EmojiReaction] += 1 + } + + // Set the updated post entry so it has the new emoji count. + bav._setPostEntryMappings(&updatedPostEntry) + + // Add an operation to the list at the end indicating we've added a follow. + utxoOpsForTxn = append(utxoOpsForTxn, &UtxoOperation{ + Type: OperationTypeReact, + PrevReactEntry: existingReactEntry, + PrevEmojiCount: existingPostEntry.EmojiCount, + }) + + return totalInput, totalOutput, utxoOpsForTxn, nil +} + +func (bav *UtxoView) _disconnectReact( + operationType OperationType, currentTxn *MsgDeSoTxn, txnHash *BlockHash, + utxoOpsForTxn []*UtxoOperation, blockHeight uint32) error { + + //TODO (Michel) Add block height check + + // Verify that the last operation is a Reaction operation + if len(utxoOpsForTxn) == 0 { + return fmt.Errorf("_disconnectReact: utxoOperations are missing") + } + operationIndex := len(utxoOpsForTxn) - 1 + if utxoOpsForTxn[operationIndex].Type != OperationTypeReact { + return fmt.Errorf("_disconnectReact: Trying to revert "+ + "OperationTypeReact but found type %v", + utxoOpsForTxn[operationIndex].Type) + } + + // Now we know the txMeta is a React + txMeta := currentTxn.TxnMeta.(*ReactMetadata) + + //TODO (Michel) Check that the post isn't deleted. + + // Before we do anything, let's get the post so we can adjust the emoji map counter later. + reactedPostEntry := bav.GetPostEntryForPostHash(txMeta.PostHash) + if reactedPostEntry == nil { + return fmt.Errorf("_disconnectReact: Error getting post: %v", txMeta.PostHash) + } + + // Here we diverge and consider the react and unreact cases separately. + if txMeta.IsRemove { + // If this is an remove we just need to add back the previous react entry and react + // count. We do some sanity checks first though to be extra safe. + + prevReactEntry := utxoOpsForTxn[operationIndex].PrevReactEntry + // Sanity check: verify that the user on the reactEntry matches the transaction sender. + if !reflect.DeepEqual(prevReactEntry.ReactorPubKey, currentTxn.PublicKey) { + return fmt.Errorf("_disconnectReact: User public key on "+ + "ReactionEntry was %s but the PublicKey on the txn was %s", + PkToStringBoth(prevReactEntry.ReactorPubKey), + PkToStringBoth(currentTxn.PublicKey)) + } + + // Sanity check: verify that the post hash on the prevReactEntry matches the transaction's. + if !reflect.DeepEqual(prevReactEntry.ReactedPostHash, txMeta.PostHash) { + return fmt.Errorf("_disconnectLike: Liked post hash on "+ + "ReactionEntry was %s but the ReactedPostHash on the txn was %s", + prevReactEntry.ReactedPostHash, txMeta.PostHash) + } + + // Set the react entry and react count to their previous state. + bav._setReactionEntryMappings(prevReactEntry) + reactedPostEntry.EmojiCount = utxoOpsForTxn[operationIndex].PrevEmojiCount + bav._setPostEntryMappings(reactedPostEntry) + } else { + // If this is a normal "react," we do some sanity checks and then delete the entry. + + // Get the ReactionEntry. If we don't find it or isDeleted=true, that's an error. + reactKey := MakeReactionKey(currentTxn.PublicKey, *txMeta.PostHash, txMeta.EmojiReaction) + reactEntry := bav._getReactionEntryForReactionKey(&reactKey) + if reactEntry == nil || reactEntry.isDeleted { + return fmt.Errorf("_disconnectReact: ReactionEntry for "+ + "reactKey %v was found to be nil or isDeleted not set appropriately: %v", + &reactKey, reactEntry) + } + + // Sanity check: verify that the user on the reactEntry matches the transaction sender. + if !reflect.DeepEqual(reactEntry.ReactorPubKey, currentTxn.PublicKey) { + return fmt.Errorf("_disconnectReact: User public key on "+ + "ReactionEntry was %s but the PublicKey on the txn was %s", + PkToStringBoth(reactEntry.ReactorPubKey), + PkToStringBoth(currentTxn.PublicKey)) + } + + // Sanity check: verify that the post hash on the reactEntry matches the transaction's. + if !reflect.DeepEqual(reactEntry.ReactedPostHash, txMeta.PostHash) { + return fmt.Errorf("_disconnectReact: Reacted post hash on "+ + "ReactionEntry was %s but the ReactedPostHash on the txn was %s", + reactEntry.ReactedPostHash, txMeta.PostHash) + } + + // Now that we're confident the FollowEntry lines up with the transaction we're + // rolling back, delete the mappings and set the reaction counter to its previous value. + bav._deleteReactionEntryMappings(reactEntry) + reactedPostEntry.EmojiCount = utxoOpsForTxn[operationIndex].PrevEmojiCount + bav._setPostEntryMappings(reactedPostEntry) + } + + // Now revert the basic transfer with the remaining operations. Cut off + // the Like operation at the end since we just reverted it. + return bav._disconnectBasicTransfer( + currentTxn, txnHash, utxoOpsForTxn[:operationIndex], blockHeight) +} diff --git a/lib/block_view_reaction_test.go b/lib/block_view_reaction_test.go new file mode 100644 index 000000000..aecb2dc97 --- /dev/null +++ b/lib/block_view_reaction_test.go @@ -0,0 +1,720 @@ +package lib + +import ( + "fmt" + "github.com/stretchr/testify/require" + "golang.org/x/text/unicode/norm" + "testing" +) + +var ( + HappyReaction = rune(norm.NFC.String(string('😊'))[0]) + SadReaction = rune(norm.NFC.String(string('😥'))[0]) + AngryReaction = rune(norm.NFC.String(string('😠'))[0]) + SurprisedReaction = rune(norm.NFC.String(string('😮'))[0]) +) + +func _doReactTxn(testMeta *TestMeta, feeRateNanosPerKB uint64, senderPkBase58Check string, + postHash BlockHash, senderPrivBase58Check string, isRemove bool, emojiReaction rune) ( + _utxoOps []*UtxoOperation, _txn *MsgDeSoTxn, _height uint32, _err error) { + + require := require.New(testMeta.t) + + senderPkBytes, _, err := Base58CheckDecode(senderPkBase58Check) + require.NoError(err) + + utxoView, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(err) + + txn, totalInputMake, changeAmountMake, feesMake, err := testMeta.chain.CreateReactTxn( + senderPkBytes, postHash, isRemove, emojiReaction, feeRateNanosPerKB, nil, []*DeSoOutput{}) + if err != nil { + return nil, nil, 0, err + } + + require.Equal(totalInputMake, changeAmountMake+feesMake) + + // Sign the transaction now that its inputs are set up. + _signTxn(testMeta.t, txn, senderPrivBase58Check) + + txHash := txn.Hash() + // Always use height+1 for validation since it's assumed the transaction will + // get mined into the next block. + blockHeight := testMeta.chain.blockTip().Height + 1 + utxoOps, totalInput, totalOutput, fees, err := + utxoView.ConnectTransaction(txn, txHash, getTxnSize(*txn), blockHeight, true, /*verifySignature*/ + false /*ignoreUtxos*/) + // ConnectTransaction should treat the amount locked as contributing to the + // output. + if err != nil { + return nil, nil, 0, err + } + require.Equal(totalInput, totalOutput+fees) + require.Equal(totalInput, totalInputMake) + + // We should have one SPEND UtxoOperation for each input, one ADD operation + // for each output, and one OperationTypeReact operation at the end. + require.Equal(len(txn.TxInputs)+len(txn.TxOutputs)+1, len(utxoOps)) + for ii := 0; ii < len(txn.TxInputs); ii++ { + require.Equal(OperationTypeSpendUtxo, utxoOps[ii].Type) + } + require.Equal(OperationTypeReact, utxoOps[len(utxoOps)-1].Type) + + require.NoError(utxoView.FlushToDb(0)) + + return utxoOps, txn, blockHeight, nil +} + +func TestReactTxns(t *testing.T) { + // Test constants + const feeRateNanosPerKb = uint64(101) + var err error + + //Initialize test chain and miner + chain, params, db := NewLowDifficultyBlockchain() + mempool, miner := NewTestMiner(t, chain, params, true /*isSender*/) + + // Mine a few blocks to give the senderPkString some money. + for ii := 0; ii < 20; ii++ { + _, err := miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) + require.NoError(t, err) + } + + // We build the testMeta obj after mining blocks so that we save the correct block height. + testMeta := &TestMeta{ + t: t, + chain: chain, + params: params, + db: db, + mempool: mempool, + miner: miner, + savedHeight: chain.blockTip().Height + 1, + } + + // Helpers + type User struct { + Pub string + Priv string + PkBytes []byte + PublicKey *PublicKey + Pkid *PKID + } + + //TODO Use this correctly + //deso := User{ + // PublicKey: &ZeroPublicKey, + // Pkid: &ZeroPKID, + //} + + m0 := User{ + Pub: m0Pub, + Priv: m0Priv, + PkBytes: m0PkBytes, + PublicKey: NewPublicKey(m0PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m0PkBytes).PKID, + } + + m1 := User{ + Pub: m1Pub, + Priv: m1Priv, + PkBytes: m1PkBytes, + PublicKey: NewPublicKey(m1PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m1PkBytes).PKID, + } + + m2 := User{ + Pub: m2Pub, + Priv: m2Priv, + PkBytes: m2PkBytes, + PublicKey: NewPublicKey(m2PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m2PkBytes).PKID, + } + + m3 := User{ + Pub: m3Pub, + Priv: m3Priv, + PkBytes: m3PkBytes, + PublicKey: NewPublicKey(m3PkBytes), + Pkid: DBGetPKIDEntryForPublicKey(db, chain.snapshot, m3PkBytes).PKID, + } + + // Setup some convenience functions for the test. + var txnOps [][]*UtxoOperation + var txns []*MsgDeSoTxn + var expectedSenderBalances []uint64 + var expectedRecipientBalances []uint64 + + // We take the block tip to be the blockchain height rather than the + // header chain height. + savedHeight := chain.blockTip().Height + 1 + + // Fund all the keys. + for ii := 0; ii < 5; ii++ { + _registerOrTransferWithTestMeta(testMeta, "m0", senderPkString, m0.Pub, senderPrivString, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1", senderPkString, m1.Pub, senderPrivString, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m2", senderPkString, m2.Pub, senderPrivString, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m3", senderPkString, m3.Pub, senderPrivString, 7e6) + } + + doReactTxn := func( + senderPkBase58Check string, postHash BlockHash, + senderPrivBase58Check string, isRemove bool, emojiReaction rune, feeRateNanosPerKB uint64) { + + expectedSenderBalances = append( + expectedSenderBalances, _getBalance(t, chain, nil, senderPkString)) + expectedRecipientBalances = append( + expectedRecipientBalances, _getBalance(t, chain, nil, recipientPkString)) + + currentOps, currentTxn, _, err := _doReactTxn( + testMeta, feeRateNanosPerKB, senderPkBase58Check, + postHash, senderPrivBase58Check, isRemove, emojiReaction) + require.NoError(t, err) + + txnOps = append(txnOps, currentOps) + txns = append(txns, currentTxn) + } + + submitPost := func( + feeRateNanosPerKB uint64, updaterPkBase58Check string, + updaterPrivBase58Check string, + postHashToModify []byte, + parentStakeID []byte, + bodyObj *DeSoBodySchema, + repostedPostHash []byte, + tstampNanos uint64, + isHidden bool) { + + expectedSenderBalances = append( + expectedSenderBalances, _getBalance(t, chain, nil, senderPkString)) + expectedRecipientBalances = append( + expectedRecipientBalances, _getBalance(t, chain, nil, recipientPkString)) + + currentOps, currentTxn, _, err := _submitPost( + t, chain, db, params, feeRateNanosPerKB, + updaterPkBase58Check, + updaterPrivBase58Check, + postHashToModify, + parentStakeID, + bodyObj, + repostedPostHash, + tstampNanos, + isHidden) + + require.NoError(t, err) + + txnOps = append(txnOps, currentOps) + txns = append(txns, currentTxn) + } + + fakePostHash := BlockHash{ + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, + 0x30, 0x31, + } + // Attempting "m0 -> fakePostHash" should fail since the post doesn't exist. + _, _, _, err = _doReactTxn( + testMeta, 10 /*feeRateNanosPerKB*/, m0Pub, + fakePostHash, m0Priv, false /*isRemove*/, HappyReaction) + require.Error(t, err) + require.Contains(t, err.Error(), RuleErrorCannotReactNonexistentPost) + + // p1 + submitPost( + 10, /*feeRateNanosPerKB*/ + m0Pub, /*updaterPkBase58Check*/ + m0Priv, /*updaterPrivBase58Check*/ + []byte{}, /*postHashToModify*/ + []byte{}, /*parentStakeID*/ + &DeSoBodySchema{Body: "m0 post body 1 no profile"}, /*body*/ + []byte{}, + 1602947011*1e9, /*tstampNanos*/ + false /*isHidden*/) + post1Txn := txns[len(txns)-1] + post1Hash := *post1Txn.Hash() + + // p2 + { + submitPost( + 10, /*feeRateNanosPerKB*/ + m0Pub, /*updaterPkBase58Check*/ + m0Priv, /*updaterPrivBase58Check*/ + []byte{}, /*postHashToModify*/ + []byte{}, /*parentStakeID*/ + &DeSoBodySchema{Body: "m0 post body 2 no profile"}, /*body*/ + []byte{}, + 1502947012*1e9, /*tstampNanos*/ + false /*isHidden*/) + } + post2Txn := txns[len(txns)-1] + post2Hash := *post2Txn.Hash() + + // p3 + { + submitPost( + 10, /*feeRateNanosPerKB*/ + m1Pub, /*updaterPkBase58Check*/ + m1Priv, /*updaterPrivBase58Check*/ + []byte{}, /*postHashToModify*/ + []byte{}, /*parentStakeID*/ + &DeSoBodySchema{Body: "m1 post body 1 no profile"}, /*body*/ + []byte{}, + 1502947013*1e9, /*tstampNanos*/ + false /*isHidden*/) + } + post3Txn := txns[len(txns)-1] + post3Hash := *post3Txn.Hash() + + // m0 -> p1 (happy) + doReactTxn(m0Pub, post1Hash, m0Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // Duplicating "m0 -> p1" should fail. + _, _, _, err = _doReactTxn( + testMeta, 10 /*feeRateNanosPerKB*/, m0Pub, + post1Hash, m0Priv, false /*isRemove*/, HappyReaction) + require.Error(t, err) + require.Contains(t, err.Error(), RuleErrorReactEntryAlreadyExists) + + // m2 -> p1 (happy) + doReactTxn(m2Pub, post1Hash, m2Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m3 -> p1 (surprised) + doReactTxn(m3Pub, post1Hash, m3Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m3 -> p2 (sad) + doReactTxn(m3Pub, post2Hash, m3Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m1 -> p2 (angry) + doReactTxn(m1Pub, post2Hash, m1Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m2 -> p3 (surprised) + doReactTxn(m2Pub, post3Hash, m2Priv, false /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + reactingP1 := [][]byte{ + _strToPk(t, m0Pub), + _strToPk(t, m2Pub), + _strToPk(t, m3Pub), + } + + reactingP2 := [][]byte{ + _strToPk(t, m1Pub), + _strToPk(t, m3Pub), + } + + reactingP3 := [][]byte{ + _strToPk(t, m2Pub), + } + + // Verify pks reacting p1 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post1Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP1), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP1, reactingPks[ii]) + } + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(len(reactingP1)), post1.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p2 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post2Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP2), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP2, reactingPks[ii]) + } + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(len(reactingP2)), post2.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p3 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post3Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP3), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP3, reactingPks[ii]) + } + post3 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post3Hash) + require.Equal(t, uint64(len(reactingP3)), post3.EmojiCount[HappyReaction]) + } + + m0Reacts := []BlockHash{ + post1Hash, + } + + m1Reacts := []BlockHash{ + post2Hash, + } + + m2Reacts := []BlockHash{ + post1Hash, + post3Hash, + } + + m3Reacts := []BlockHash{ + post1Hash, + post2Hash, + } + + // Verify m0's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m0Pub)) + require.NoError(t, err) + require.Equal(t, len(m0Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m0Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m1's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m1Pub)) + require.NoError(t, err) + require.Equal(t, len(m1Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m1Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m2's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m2Pub)) + require.NoError(t, err) + require.Equal(t, len(m2Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m2Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m3's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m3Pub)) + require.NoError(t, err) + require.Equal(t, len(m3Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m3Reacts, *reactedPostHashes[ii]) + } + } + + // Try an removing a reaction. + // + // m0 -> p1 (remove, happy) + doReactTxn(m0Pub, post1Hash, m0Priv, true /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // m3 -> p2 (remove, happy) + doReactTxn(m3Pub, post2Hash, m3Priv, true /*isRemove*/, HappyReaction, 10 /*feeRateNanosPerKB*/) + + // Duplicating "m0 -> p1" (unfollow) should fail. + _, _, _, err = _doReactTxn( + testMeta, 10 /*feeRateNanosPerKB*/, m0Pub, + post1Hash, m0Priv, true /*isRemove*/, HappyReaction) + require.Error(t, err) + require.Contains(t, err.Error(), RuleErrorCannotRemoveReactionWithoutAnExistingReaction) + + reactingP1 = [][]byte{ + _strToPk(t, m2Pub), + _strToPk(t, m3Pub), + } + + reactingP2 = [][]byte{ + _strToPk(t, m1Pub), + } + + // Verify pks reacting p1 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post1Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP1), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP1, reactingPks[ii]) + } + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(len(reactingP1)), post1.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p2 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post2Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP2), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP2, reactingPks[ii]) + } + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(len(reactingP2)), post2.EmojiCount[HappyReaction]) + } + + m3Reacts = []BlockHash{ + post1Hash, + } + + // Verify m0 has no reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m0Pub)) + require.NoError(t, err) + require.Equal(t, 0, len(reactedPostHashes)) + } + + // Verify m3's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m3Pub)) + require.NoError(t, err) + require.Equal(t, len(m3Reacts), len(reactedPostHashes)) + for i := 0; i < len(reactedPostHashes); i++ { + require.Contains(t, m3Reacts, *reactedPostHashes[i]) + } + } + + // =================================================================================== + // Finish it off with some transactions + // =================================================================================== + _registerOrTransferWithTestMeta(testMeta, "m0", senderPkString, m0.Pub, senderPrivString, 42e6) + _registerOrTransferWithTestMeta(testMeta, "m1", senderPkString, m1.Pub, senderPrivString, 42e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m1 -> m0", m1Pub, m0Pub, m1Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + _registerOrTransferWithTestMeta(testMeta, "m0 -> m1", m0Pub, m1Pub, m0Priv, 7e6) + + // Roll back all of the above using the utxoOps from each. + for ii := 0; ii < len(txnOps); ii++ { + backwardIter := len(txnOps) - 1 - ii + currentOps := txnOps[backwardIter] + currentTxn := txns[backwardIter] + fmt.Printf( + "Disconnecting transaction with type %v index %d (going backwards)\n", + currentTxn.TxnMeta.GetTxnType(), backwardIter) + + utxoView, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(t, err) + + currentHash := currentTxn.Hash() + err = utxoView.DisconnectTransaction(currentTxn, currentHash, currentOps, savedHeight) + require.NoError(t, err) + + require.NoError(t, utxoView.FlushToDb(0)) + + // After disconnecting, the balances should be restored to what they + // were before this transaction was applied. + require.Equal(t, + int64(expectedSenderBalances[backwardIter]), + int64(_getBalance(t, chain, nil, senderPkString))) + require.Equal(t, + expectedRecipientBalances[backwardIter], + _getBalance(t, chain, nil, recipientPkString)) + + // Here we check the reactcounts after all the reactentries have been disconnected. + if backwardIter == 19 { + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(0), post1.EmojiCount[HappyReaction]) + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(0), post2.EmojiCount[HappyReaction]) + post3 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post3Hash) + require.Equal(t, uint64(0), post3.EmojiCount[HappyReaction]) + } + } + + _executeAllTestRollbackAndFlush(testMeta) + + // TODO (Michel) Everything below is unecessary since we call _executeAllTestRollbackAndFlush + + // Apply all the transactions to a mempool object and make sure we don't get any + // errors. Verify the balances align as we go. + for ii, tx := range txns { + // See comment above on this transaction. + fmt.Printf("Adding txn %d of type %v to mempool\n", ii, tx.TxnMeta.GetTxnType()) + + require.Equal(t, expectedSenderBalances[ii], _getBalance(t, chain, mempool, senderPkString)) + require.Equal(t, expectedRecipientBalances[ii], _getBalance(t, chain, mempool, recipientPkString)) + + _, err := mempool.ProcessTransaction(tx, false, false, 0, true) + require.NoError(t, err, "Problem adding transaction %d to mempool: %v", ii, tx) + } + + // Apply all the transactions to a view and flush the view to the db. + utxoView, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(t, err) + for ii, txn := range txns { + fmt.Printf("Adding txn %v of type %v to UtxoView\n", ii, txn.TxnMeta.GetTxnType()) + + // Always use height+1 for validation since it's assumed the transaction will + // get mined into the next block. + txHash := txn.Hash() + blockHeight := chain.blockTip().Height + 1 + _, _, _, _, err := + utxoView.ConnectTransaction(txn, txHash, getTxnSize(*txn), blockHeight, true /*verifySignature*/, false /*ignoreUtxos*/) + require.NoError(t, err) + } + // Flush the utxoView after having added all the transactions. + require.NoError(t, utxoView.FlushToDb(0)) + + testConnectedState := func() { + reactingP1 = [][]byte{ + _strToPk(t, m2Pub), + _strToPk(t, m3Pub), + } + + reactingP2 = [][]byte{ + _strToPk(t, m1Pub), + } + + reactingP3 := [][]byte{ + _strToPk(t, m2Pub), + } + + // Verify pks reacting p1 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post1Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP1), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP1, reactingPks[ii]) + } + post1 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post1Hash) + require.Equal(t, uint64(len(reactingP1)), post1.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p2 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post2Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP2), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP2, reactingPks[ii]) + } + post2 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post2Hash) + require.Equal(t, uint64(len(reactingP2)), post2.EmojiCount[HappyReaction]) + } + + // Verify pks reacting p3 and check reactcount. + { + reactingPks, err := DbGetReactorPubKeysReactingToPostHash(db, post3Hash) + require.NoError(t, err) + require.Equal(t, len(reactingP3), len(reactingPks)) + for ii := 0; ii < len(reactingPks); ii++ { + require.Contains(t, reactingP3, reactingPks[ii]) + } + post3 := DBGetPostEntryByPostHash(testMeta.db, testMeta.chain.snapshot, &post3Hash) + require.Equal(t, uint64(len(reactingP3)), post3.EmojiCount[HappyReaction]) + } + + m1Reacts := []BlockHash{ + post2Hash, + } + + m2Reacts := []BlockHash{ + post1Hash, + post3Hash, + } + + m3Reacts = []BlockHash{ + post1Hash, + } + + // Verify m0 has no reactions. + { + followPks, err := DbGetPostHashesYouReact(db, _strToPk(t, m0Pub)) + require.NoError(t, err) + require.Equal(t, 0, len(followPks)) + } + + // Verify m1's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m1Pub)) + require.NoError(t, err) + require.Equal(t, len(m1Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m1Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m2's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m2Pub)) + require.NoError(t, err) + require.Equal(t, len(m2Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m2Reacts, *reactedPostHashes[ii]) + } + } + + // Verify m3's reactions. + { + reactedPostHashes, err := DbGetPostHashesYouReact(db, _strToPk(t, m3Pub)) + require.NoError(t, err) + require.Equal(t, len(m3Reacts), len(reactedPostHashes)) + for ii := 0; ii < len(reactedPostHashes); ii++ { + require.Contains(t, m3Reacts, *reactedPostHashes[ii]) + } + } + } + testConnectedState() + + // Disconnect the transactions from a single view in the same way as above + // i.e. without flushing each time. + utxoView2, err := NewUtxoView(testMeta.db, testMeta.params, testMeta.chain.postgres, testMeta.chain.snapshot) + require.NoError(t, err) + for ii := 0; ii < len(txnOps); ii++ { + backwardIter := len(txnOps) - 1 - ii + fmt.Printf("Disconnecting transaction with index %d (going backwards)\n", backwardIter) + currentOps := txnOps[backwardIter] + currentTxn := txns[backwardIter] + + currentHash := currentTxn.Hash() + err = utxoView2.DisconnectTransaction(currentTxn, currentHash, currentOps, savedHeight) + require.NoError(t, err) + } + require.NoError(t, utxoView2.FlushToDb(0)) + require.Equal(t, expectedSenderBalances[0], _getBalance(t, chain, nil, senderPkString)) + require.Equal(t, expectedRecipientBalances[0], _getBalance(t, chain, nil, recipientPkString)) + + _executeAllTestRollbackAndFlush(testMeta) + + // All the txns should be in the mempool already so mining a block should put + // all those transactions in it. + block, err := miner.MineAndProcessSingleBlock(0 /*threadIndex*/, mempool) + require.NoError(t, err) + // Add one for the block reward. Now we have a meaty block. + require.Equal(t, len(txnOps)+1, len(block.Txns)) + // Estimate the transaction fees of the tip block in various ways. + { + // Threshold above what's in the block should return the default fee at all times. + require.Equal(t, int64(0), int64(chain.EstimateDefaultFeeRateNanosPerKB(.1, 0))) + require.Equal(t, int64(7), int64(chain.EstimateDefaultFeeRateNanosPerKB(.1, 7))) + // Threshold below what's in the block should return the max of the median + // and the minfee. This means with a low minfee the value returned should be + // higher. And with a high minfee the value returned should be equal to the + // fee. + require.Equal(t, int64(7), int64(chain.EstimateDefaultFeeRateNanosPerKB(0, 7))) + require.Equal(t, int64(4), int64(chain.EstimateDefaultFeeRateNanosPerKB(0, 0))) + require.Equal(t, int64(7), int64(chain.EstimateDefaultFeeRateNanosPerKB(.01, 7))) + require.Equal(t, int64(4), int64(chain.EstimateDefaultFeeRateNanosPerKB(.01, 1))) + } + + testConnectedState() + + _executeAllTestRollbackAndFlush(testMeta) +} + +// func TestReactTxns +// - one successful happy, sad, angry, confused +// - one failure (invalid character?, not amongst the other characters) + +// func _createReactTxn +// func _connectReactTxn +// func _doReactTxnWithTestMeta +// func _doReactRxnErrorToBeDefined +// func Eq +// func ToEntry +// func TestFlushingReactTxn diff --git a/lib/block_view_types.go b/lib/block_view_types.go index 1fc251b2e..ffd7b49d4 100644 --- a/lib/block_view_types.go +++ b/lib/block_view_types.go @@ -79,6 +79,7 @@ const ( EncoderTypeMessagingGroupMember EncoderTypeForbiddenPubKeyEntry EncoderTypeLikeEntry + EncoderTypeReactEntry EncoderTypeNFTEntry EncoderTypeNFTBidEntry EncoderTypeNFTBidEntryBundle @@ -118,6 +119,7 @@ const ( EncoderTypeUpdateProfileTxindexMetadata EncoderTypeSubmitPostTxindexMetadata EncoderTypeLikeTxindexMetadata + EncoderTypeReactTxindexMetadata EncoderTypeFollowTxindexMetadata EncoderTypePrivateMessageTxindexMetadata EncoderTypeSwapIdentityTxindexMetadata @@ -225,6 +227,8 @@ func (encoderType EncoderType) New() DeSoEncoder { return &SubmitPostTxindexMetadata{} case EncoderTypeLikeTxindexMetadata: return &LikeTxindexMetadata{} + case EncoderTypeReactTxindexMetadata: + return &ReactTxindexMetadata{} case EncoderTypeFollowTxindexMetadata: return &FollowTxindexMetadata{} case EncoderTypePrivateMessageTxindexMetadata: @@ -517,8 +521,9 @@ const ( OperationTypeDAOCoinTransfer OperationType = 26 OperationTypeSpendingLimitAccounting OperationType = 27 OperationTypeDAOCoinLimitOrder OperationType = 28 + OperationTypeReact OperationType = 29 - // NEXT_TAG = 29 + // NEXT_TAG = 30 ) func (op OperationType) String() string { @@ -688,6 +693,10 @@ type UtxoOperation struct { PrevLikeEntry *LikeEntry PrevLikeCount uint64 + // Save the previous emoji reactions + PrevReactEntry *ReactionEntry + PrevEmojiCount map[rune]uint64 + // For disconnecting diamonds. PrevDiamondEntry *DiamondEntry @@ -2267,6 +2276,62 @@ func (likeEntry *LikeEntry) GetEncoderType() EncoderType { return EncoderTypeLikeEntry } +func MakeReactionKey(userPk []byte, ReactPostHash BlockHash, ReactEmoji rune) ReactionKey { + return ReactionKey{ + ReactorPubKey: *NewPublicKey(userPk), + ReactedPostHash: ReactPostHash, + ReactEmoji: ReactEmoji, + } +} + +type ReactionKey struct { + ReactorPubKey PublicKey + ReactedPostHash BlockHash + ReactEmoji rune +} + +type ReactionEntry struct { + ReactorPubKey []byte + ReactedPostHash *BlockHash + ReactEmoji rune + // Whether this entry is deleted in the view + isDeleted bool +} + +func (reactEntry *ReactionEntry) RawEncodeWithoutMetadata(blockHeight uint64, skipMetadata ...bool) []byte { + var data []byte + + data = append(data, EncodeByteArray(reactEntry.ReactorPubKey)...) + data = append(data, EncodeToBytes(blockHeight, reactEntry.ReactedPostHash, skipMetadata...)...) + data = append(data, EncodeByteArray([]byte(string(reactEntry.ReactEmoji)))...) + return data +} + +func (reactEntry *ReactionEntry) RawDecodeWithoutMetadata(blockHeight uint64, rr *bytes.Reader) error { + var err error + + reactEntry.ReactorPubKey, err = DecodeByteArray(rr) + if err != nil { + return errors.Wrapf(err, "ReactionEntry.Decode: problem reading ReactorPubKey") + } + reactedPostHash := &BlockHash{} + if exist, err := DecodeFromBytes(reactedPostHash, rr); exist && err == nil { + reactEntry.ReactedPostHash = reactedPostHash + } else if err != nil { + return errors.Wrapf(err, "ReactionEntry.Decode: problem reading ReactedPostHash") + } + //TODO (Michel) we need to decode ReactEmoji here + return nil +} + +func (reactEntry *ReactionEntry) GetVersionByte(blockHeight uint64) byte { + return 0 +} + +func (reactEntry *ReactionEntry) GetEncoderType() EncoderType { + return EncoderTypeReactEntry +} + func MakeNFTKey(nftPostHash *BlockHash, serialNumber uint64) NFTKey { return NFTKey{ NFTPostHash: *nftPostHash, @@ -3000,6 +3065,9 @@ type PostEntry struct { // Counter of users that have liked this post. LikeCount uint64 + // Counter of emoji reactions that this post has. + EmojiCount map[rune]uint64 + // Counter of users that have reposted this post. RepostCount uint64 diff --git a/lib/blockchain.go b/lib/blockchain.go index 0df49703c..e9138a159 100644 --- a/lib/blockchain.go +++ b/lib/blockchain.go @@ -6,6 +6,7 @@ import ( "encoding/hex" "fmt" "github.com/holiman/uint256" + "golang.org/x/text/unicode/norm" "math" "math/big" "reflect" @@ -1492,8 +1493,8 @@ func CheckTransactionSanity(txn *MsgDeSoTxn) error { // // TODO: The above is easily fixed by requiring something like block height to // be present in the ExtraNonce field. - canHaveZeroInputs := (txn.TxnMeta.GetTxnType() == TxnTypeBitcoinExchange || - txn.TxnMeta.GetTxnType() == TxnTypePrivateMessage) + canHaveZeroInputs := txn.TxnMeta.GetTxnType() == TxnTypeBitcoinExchange || + txn.TxnMeta.GetTxnType() == TxnTypePrivateMessage if len(txn.TxInputs) == 0 && !canHaveZeroInputs { glog.V(2).Infof("CheckTransactionSanity: Txn needs at least one input: %v", spew.Sdump(txn)) return RuleErrorTxnMustHaveAtLeastOneInput @@ -3056,6 +3057,66 @@ func (bc *Blockchain) CreateLikeTxn( return txn, totalInput, changeAmount, fees, nil } +func (bc *Blockchain) CreateReactTxn( + userPublicKey []byte, postHash BlockHash, isRemove bool, emojiReaction rune, + minFeeRateNanosPerKB uint64, mempool *DeSoMempool, additionalOutputs []*DeSoOutput) ( + _txn *MsgDeSoTxn, _totalInput uint64, _changeAmount uint64, _fees uint64, + _err error) { + + //TODO Where would be the best place to place the validation function? + // At the moment, only support happy, sad, angry and surprised + //TODO (Michel) Validation should at a minimum live in the connect + // react logic. I recommend defining a regex or a validation function that + // can be called in the connect logic and then re-using that validation elsewhere. + AcceptedReactions := [4]rune{'😊', '😥', '😠', '😮'} + // Validate emoji reaction + var isValidEmoji = func(emoji rune) bool { + for _, acceptedReaction := range AcceptedReactions { + if emoji == acceptedReaction { + return true + } + } + return false + } + + // TODO Fix bug returning invalid for valid inputs + //TODO (Michel) We need to ensure that the rune is normalized + // in the connect logic. Anybody could construct the transaction + // on their own without using our API, so we need to make sure every + // react emoji is consistent when we're connecting. + normalizedReaction := norm.NFC.String(string(emojiReaction)) + if !isValidEmoji(rune(normalizedReaction[0])) { + return nil, 0, 0, 0, errors.New("CreateReactTxn: Invalid emoji input: ") + } + + // A React transaction doesn't need any inputs or outputs (except additionalOutputs provided). + txn := &MsgDeSoTxn{ + PublicKey: userPublicKey, + TxnMeta: &ReactMetadata{ + PostHash: &postHash, + EmojiReaction: rune(normalizedReaction[0]), + IsRemove: isRemove, + }, + TxOutputs: additionalOutputs, + // We wait to compute the signature until we've added all the + // inputs and change. + } + + totalInput, spendAmount, changeAmount, fees, err := + bc.AddInputsAndChangeToTransaction(txn, minFeeRateNanosPerKB, mempool) + if err != nil { + return nil, 0, 0, 0, errors.Wrapf( + err, "CreateReactTxn: Problem adding inputs: ") + } + + // Sanity-check that the spendAmount is zero. + if err = amountEqualsAdditionalOutputs(spendAmount, additionalOutputs); err != nil { + return nil, 0, 0, 0, fmt.Errorf("CreateReactTxn: %v", err) + } + + return txn, totalInput, changeAmount, fees, nil +} + func (bc *Blockchain) CreateFollowTxn( senderPublicKey []byte, followedPublicKey []byte, isUnfollow bool, minFeeRateNanosPerKB uint64, mempool *DeSoMempool, additionalOutputs []*DeSoOutput) ( diff --git a/lib/db_adapter.go b/lib/db_adapter.go index 1e955af9a..aead4fe87 100644 --- a/lib/db_adapter.go +++ b/lib/db_adapter.go @@ -1,6 +1,8 @@ package lib import ( + "fmt" + "github.com/btcsuite/btcd/btcec" "github.com/dgraph-io/badger/v3" ) @@ -131,3 +133,38 @@ func (adapter *DbAdapter) GetPKIDForPublicKey(pkBytes []byte) *PKID { return DBGetPKIDEntryForPublicKey(adapter.badgerDb, adapter.snapshot, pkBytes).PKID } + +// +// Reactions +// +func (adapter *DbAdapter) GetReactionsForPost(postHash *BlockHash, reactionEmoji rune) ([]*ReactionEntry, error) { + var reactionEntries []*ReactionEntry + if adapter.postgresDb != nil { + reactions := adapter.postgresDb.GetReactionsForPost(postHash, reactionEmoji) + for _, reaction := range reactions { + reactionEntries = append(reactionEntries, reaction.NewReactionEntry()) + } + } else { + handle := adapter.badgerDb + dbPrefix := append([]byte{}, Prefixes.PrefixPostHashToReactorPubKey...) + dbPrefix = append(dbPrefix, postHash[:]...) + keysFound, _ := EnumerateKeysForPrefix(handle, dbPrefix) + + // Iterate over all the db keys & values and load them into the view. + expectedKeyLength := 1 + HashSizeBytes + btcec.PubKeyBytesLenCompressed + for _, key := range keysFound { + // Sanity check that this is a reasonable key. + if len(key) != expectedKeyLength { + return nil, fmt.Errorf("DbAdapter.GetReactionsForPost: Invalid key length found: %d", len(key)) + } + reactorPubKey := key[1+HashSizeBytes:] + reactionEntry := &ReactionEntry{ + ReactorPubKey: reactorPubKey, + ReactedPostHash: postHash, + ReactEmoji: reactionEmoji, + } + reactionEntries = append(reactionEntries, reactionEntry) + } + } + return reactionEntries, nil +} diff --git a/lib/db_utils.go b/lib/db_utils.go index f823f8b01..0d46f2bd3 100644 --- a/lib/db_utils.go +++ b/lib/db_utils.go @@ -174,6 +174,17 @@ type DBPrefixes struct { PrefixLikerPubKeyToLikedPostHash []byte `prefix_id:"[30]" is_state:"true"` PrefixLikedPostHashToLikerPubKey []byte `prefix_id:"[31]" is_state:"true"` + //TODO (Michel) We need to include react emoji in these indices. I think the correct indices would be + // 1. PrefixReactorPubKeyToPostHashReactEmojiL + // 2. PrefixPostHashReactEmojiToReactorPubKey + // this allows us to get all the different reactions a user gave to a single post easily (index 1) as well as get a list of all users who gave a "happy" react to a given post (index 2) + + // Prefixes for reactions: + // -> <> + // -> <> + PrefixReactorPubKeyToPostHash []byte `prefix_id:"[63]" is_state:"true"` + PrefixPostHashToReactorPubKey []byte `prefix_id:"[64]" is_state:"true"` + // Prefixes for creator coin fields: // -> // -> @@ -322,7 +333,7 @@ type DBPrefixes struct { PrefixDAOCoinLimitOrder []byte `prefix_id:"[60]" is_state:"true"` PrefixDAOCoinLimitOrderByTransactorPKID []byte `prefix_id:"[61]" is_state:"true"` PrefixDAOCoinLimitOrderByOrderID []byte `prefix_id:"[62]" is_state:"true"` - // NEXT_TAG: 63 + // NEXT_TAG: 65 } // StatePrefixToDeSoEncoder maps each state prefix to a DeSoEncoder type that is stored under that prefix. @@ -479,8 +490,13 @@ func StatePrefixToDeSoEncoder(prefix []byte) (_isEncoder bool, _encoder DeSoEnco } else if bytes.Equal(prefix, Prefixes.PrefixDAOCoinLimitOrderByOrderID) { // prefix_id:"[62]" return true, &DAOCoinLimitOrderEntry{} + } else if bytes.Equal(prefix, Prefixes.PrefixReactorPubKeyToPostHash) { + // prefix_id:"[63]" + return false, nil + } else if bytes.Equal(prefix, Prefixes.PrefixPostHashToReactorPubKey) { + // prefix_id:"[64]" + return false, nil } - return true, nil } @@ -2032,6 +2048,165 @@ func DbGetLikerPubKeysLikingAPostHash(handle *badger.DB, likedPostHash BlockHash return userPubKeys, nil } +// ------------------------------------------------------------------------------------- +// React mapping functions +// -> <> +// -> <> +// ------------------------------------------------------------------------------------- +// +//TODO (Michel) "reacted" instead of "liked" in all these DB functions +//TODO these will probably need to change slightly to accommodate multiple reactions with different emojis per post +func _dbKeyForReactorPubKeyToPostHashMapping( + userPubKey []byte, postHash BlockHash, reactionEmoji rune) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixReactorPubKeyToPostHash...) + key := append(prefixCopy, userPubKey...) + key = append(key, postHash[:]...) + key = append(key, []byte(string(reactionEmoji))...) + return key +} + +func _dbKeyForPostHashToReactorPubKeyMapping( + postHash BlockHash, userPubKey []byte) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixPostHashToReactorPubKey...) + key := append(prefixCopy, postHash[:]...) + key = append(key, userPubKey...) + return key +} + +func _dbSeekPrefixForPostHashesYouReact(yourPubKey []byte) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixReactorPubKeyToPostHash...) + return append(prefixCopy, yourPubKey...) +} + +func _dbSeekPrefixForReactorPubKeysReactingToPostHash(likedPostHash BlockHash) []byte { + // Make a copy to avoid multiple calls to this function re-using the same slice. + prefixCopy := append([]byte{}, Prefixes.PrefixPostHashToReactorPubKey...) + return append(prefixCopy, likedPostHash[:]...) +} + +// Note that this adds a mapping for the user *and* the liked post. +func DbPutReactMappingsWithTxn( + txn *badger.Txn, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) error { + + if len(userPubKey) != btcec.PubKeyBytesLenCompressed { + return fmt.Errorf("DbPutReactMappingsWithTxn: User public key "+ + "length %d != %d", len(userPubKey), btcec.PubKeyBytesLenCompressed) + } + + if err := txn.Set(_dbKeyForReactorPubKeyToPostHashMapping(userPubKey, likedPostHash, reactionEmoji), []byte{}); err != nil { + return errors.Wrapf(err, "DbPutReactMappingsWithTxn: Problem adding user to reacted post mapping: ") + } + + if err := txn.Set(_dbKeyForPostHashToReactorPubKeyMapping(likedPostHash, userPubKey), []byte{}); err != nil { + return errors.Wrapf(err, "DbPutReactMappingsWithTxn: Problem adding reacted post to user mapping: ") + } + + return nil +} + +func DbPutReactMappings( + handle *badger.DB, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) error { + + return handle.Update(func(txn *badger.Txn) error { + return DbPutReactMappingsWithTxn(txn, userPubKey, likedPostHash, reactionEmoji) + }) +} + +func DbGetReactorPubKeyToPostHashMappingWithTxn( + txn *badger.Txn, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) []byte { + + key := _dbKeyForReactorPubKeyToPostHashMapping(userPubKey, likedPostHash, reactionEmoji) + _, err := txn.Get(key) + if err != nil { + return nil + } + + // Typically we return a DB entry here but we don't store anything for like mappings. + // We use this function instead of one returning true / false for feature consistency. + return []byte{} +} + +func DbGetReactorPubKeyToPostHashMapping( + db *badger.DB, userPubKey []byte, likedPostHash BlockHash, reactionEmoji rune) []byte { + var ret []byte + db.View(func(txn *badger.Txn) error { + ret = DbGetReactorPubKeyToPostHashMappingWithTxn(txn, userPubKey, likedPostHash, reactionEmoji) + return nil + }) + return ret +} + +// Note this deletes the like for the user *and* the liked post since a mapping +// should exist for each. +func DbDeleteReactMappingsWithTxn( + txn *badger.Txn, userPubKey []byte, postHash BlockHash, reactionEmoji rune) error { + + // First check that a mapping exists. If one doesn't exist then there's nothing to do. + existingMapping := DbGetReactorPubKeyToPostHashMappingWithTxn(txn, userPubKey, postHash, reactionEmoji) + if existingMapping == nil { + return nil + } + + // When a message exists, delete the mapping for the sender and receiver. + if err := txn.Delete( + _dbKeyForReactorPubKeyToPostHashMapping(userPubKey, postHash, reactionEmoji)); err != nil { + return errors.Wrapf(err, "DbDeleteLikeMappingsWithTxn: Deleting "+ + "userPubKey %s and postHash %s failed", + PkToStringBoth(userPubKey), postHash) + } + if err := txn.Delete( + _dbKeyForPostHashToReactorPubKeyMapping(postHash, userPubKey)); err != nil { + return errors.Wrapf(err, "DbDeleteLikeMappingsWithTxn: Deleting "+ + "postHash %s and userPubKey %s failed", + PkToStringBoth(postHash[:]), PkToStringBoth(userPubKey)) + } + + return nil +} + +func DbDeleteReactMappings( + handle *badger.DB, userPubKey []byte, postHash BlockHash, reactionEmoji rune) error { + return handle.Update(func(txn *badger.Txn) error { + return DbDeleteReactMappingsWithTxn(txn, userPubKey, postHash, reactionEmoji) + }) +} + +func DbGetPostHashesYouReact(handle *badger.DB, yourPublicKey []byte) ( + _postHashes []*BlockHash, _err error) { + + prefix := _dbSeekPrefixForPostHashesYouReact(yourPublicKey) + keysFound, _ := _enumerateKeysForPrefix(handle, prefix) + + var postHashesYouReact []*BlockHash + for _, keyBytes := range keysFound { + // We must slice off the first byte and userPubKey to get the postHash. + postHash := &BlockHash{} + copy(postHash[:], keyBytes[1+btcec.PubKeyBytesLenCompressed:]) + postHashesYouReact = append(postHashesYouReact, postHash) + } + + return postHashesYouReact, nil +} + +func DbGetReactorPubKeysReactingToPostHash(handle *badger.DB, postHash BlockHash) ( + _pubKeys [][]byte, _err error) { + + prefix := _dbSeekPrefixForReactorPubKeysReactingToPostHash(postHash) + keysFound, _ := _enumerateKeysForPrefix(handle, prefix) + + var userPubKeys [][]byte + for _, keyBytes := range keysFound { + // We must slice off the first byte and postHash to get the userPubKey. + userPubKey := keyBytes[1+HashSizeBytes:] + userPubKeys = append(userPubKeys, userPubKey) + } + + return userPubKeys, nil +} + // ------------------------------------------------------------------------------------- // Reposts mapping functions // -> <> @@ -4878,6 +5053,53 @@ func (txnMeta *LikeTxindexMetadata) GetEncoderType() EncoderType { return EncoderTypeLikeTxindexMetadata } +type ReactTxindexMetadata struct { + // ReactorPublicKeyBase58Check = TransactorPublicKeyBase58Check + IsRemove bool + EmojiReaction rune + + PostHashHex string + // PosterPublicKeyBase58Check in AffectedPublicKeys +} + +func (txnMeta *ReactTxindexMetadata) RawEncodeWithoutMetadata(blockHeight uint64, skipMetadata ...bool) []byte { + var data []byte + + data = append(data, BoolToByte(txnMeta.IsRemove)) + data = append(data, []byte(string(txnMeta.EmojiReaction))...) + data = append(data, EncodeByteArray([]byte(txnMeta.PostHashHex))...) + return data +} + +func (txnMeta *ReactTxindexMetadata) RawDecodeWithoutMetadata(blockHeight uint64, rr *bytes.Reader) error { + var err error + + //TODO (Michel) I usually put isRemove as the last one. I think logically we would expect PostHashHex then EmojiReaction then IsRemove + txnMeta.IsRemove, err = ReadBoolByte(rr) + if err != nil { + return errors.Wrapf(err, "ReactTxindexMetadata.Decode: Empty IsRemove") + } + txnMeta.EmojiReaction, _, err = rr.ReadRune() + if err != nil { + return errors.Wrapf(err, "ReactTxindexMetadata.Decode: Empty EmojiReaction") + } + postHashHexBytes, err := DecodeByteArray(rr) + if err != nil { + return errors.Wrapf(err, "ReactTxindexMetadata.Decode: problem reading PostHashHex") + } + txnMeta.PostHashHex = string(postHashHexBytes) + + return nil +} + +func (txnMeta *ReactTxindexMetadata) GetVersionByte(blockHeight uint64) byte { + return 0 +} + +func (txnMeta *ReactTxindexMetadata) GetEncoderType() EncoderType { + return EncoderTypeReactTxindexMetadata +} + type FollowTxindexMetadata struct { // FollowerPublicKeyBase58Check = TransactorPublicKeyBase58Check // FollowedPublicKeyBase58Check in AffectedPublicKeys @@ -5387,6 +5609,7 @@ type TransactionMetadata struct { UpdateProfileTxindexMetadata *UpdateProfileTxindexMetadata `json:",omitempty"` SubmitPostTxindexMetadata *SubmitPostTxindexMetadata `json:",omitempty"` LikeTxindexMetadata *LikeTxindexMetadata `json:",omitempty"` + ReactTxindexMetadata *ReactTxindexMetadata `json:",omitempty"` FollowTxindexMetadata *FollowTxindexMetadata `json:",omitempty"` PrivateMessageTxindexMetadata *PrivateMessageTxindexMetadata `json:",omitempty"` SwapIdentityTxindexMetadata *SwapIdentityTxindexMetadata `json:",omitempty"` diff --git a/lib/errors.go b/lib/errors.go index afc3fbd5a..eb724659a 100644 --- a/lib/errors.go +++ b/lib/errors.go @@ -114,6 +114,10 @@ const ( RuleErrorCannotLikeNonexistentPost RuleError = "RuleErrorCannotLikeNonexistentPost" RuleErrorCannotUnlikeWithoutAnExistingLike RuleError = "RuleErrorCannotUnlikeWithoutAnExistingLike" + RuleErrorReactEntryAlreadyExists RuleError = "RuleErrorReactEntryAlreadyExists" + RuleErrorCannotReactNonexistentPost RuleError = "RuleErrorCannotReactNonexistentPost" + RuleErrorCannotRemoveReactionWithoutAnExistingReaction RuleError = "RuleErrorCannotRemoveReactionWithoutAnExistingReaction" + RuleErrorProfileUsernameTooShort RuleError = "RuleErrorProfileUsernameTooShort" RuleErrorProfileDescriptionTooShort RuleError = "RuleErrorProfileDescriptionTooShort" RuleErrorProfileUsernameTooLong RuleError = "RuleErrorProfileUsernameTooLong" diff --git a/lib/mempool.go b/lib/mempool.go index 597e76c0d..067c2246d 100644 --- a/lib/mempool.go +++ b/lib/mempool.go @@ -1387,6 +1387,34 @@ func ComputeTransactionMetadata(txn *MsgDeSoTxn, utxoView *UtxoView, blockHash * Metadata: "PosterPublicKeyBase58Check", }) } + case TxnTypeReact: + realTxMeta := txn.TxnMeta.(*ReactMetadata) + + // ReactorPublicKeyBase58Check = TransactorPublicKeyBase58Check + + txnMeta.ReactTxindexMetadata = &ReactTxindexMetadata{ + IsRemove: realTxMeta.IsRemove, + PostHashHex: hex.EncodeToString(realTxMeta.PostHash[:]), + EmojiReaction: realTxMeta.EmojiReaction, + } + + // Get the public key of the poster and set it as having been affected + // by this like. + // + // PosterPublicKeyBase58Check in AffectedPublicKeys + postHash := &BlockHash{} + copy(postHash[:], realTxMeta.PostHash[:]) + postEntry := utxoView.GetPostEntryForPostHash(postHash) + if postEntry == nil { + glog.V(2).Infof( + "UpdateTxindex: Error creating ReactTxindexMetadata; "+ + "missing post for hash %v: %v", postHash, err) + } else { + txnMeta.AffectedPublicKeys = append(txnMeta.AffectedPublicKeys, &AffectedPublicKey{ + PublicKeyBase58Check: PkToString(postEntry.PosterPublicKey, utxoView.Params), + Metadata: "PosterPublicKeyBase58Check", + }) + } case TxnTypeFollow: realTxMeta := txn.TxnMeta.(*FollowMetadata) diff --git a/lib/network.go b/lib/network.go index a35a0e7d4..354a596c0 100644 --- a/lib/network.go +++ b/lib/network.go @@ -8,7 +8,7 @@ import ( "encoding/hex" "encoding/json" "fmt" - "github.com/decred/dcrd/dcrec/secp256k1/v4" + "golang.org/x/text/unicode/norm" "io" "math" "math/big" @@ -227,8 +227,9 @@ const ( TxnTypeDAOCoin TxnType = 24 TxnTypeDAOCoinTransfer TxnType = 25 TxnTypeDAOCoinLimitOrder TxnType = 26 + TxnTypeReact TxnType = 27 - // NEXT_ID = 27 + // NEXT_ID = 28 ) type TxnString string @@ -260,6 +261,7 @@ const ( TxnStringDAOCoin TxnString = "DAO_COIN" TxnStringDAOCoinTransfer TxnString = "DAO_COIN_TRANSFER" TxnStringDAOCoinLimitOrder TxnString = "DAO_COIN_LIMIT_ORDER" + TxnStringReact TxnString = "REACT" TxnStringUndefined TxnString = "TXN_UNDEFINED" ) @@ -270,7 +272,7 @@ var ( TxnTypeCreatorCoin, TxnTypeSwapIdentity, TxnTypeUpdateGlobalParams, TxnTypeCreatorCoinTransfer, TxnTypeCreateNFT, TxnTypeUpdateNFT, TxnTypeAcceptNFTBid, TxnTypeNFTBid, TxnTypeNFTTransfer, TxnTypeAcceptNFTTransfer, TxnTypeBurnNFT, TxnTypeAuthorizeDerivedKey, TxnTypeMessagingGroup, - TxnTypeDAOCoin, TxnTypeDAOCoinTransfer, TxnTypeDAOCoinLimitOrder, + TxnTypeDAOCoin, TxnTypeDAOCoinTransfer, TxnTypeDAOCoinLimitOrder, TxnTypeReact, } AllTxnString = []TxnString{ TxnStringUnset, TxnStringBlockReward, TxnStringBasicTransfer, TxnStringBitcoinExchange, TxnStringPrivateMessage, @@ -278,7 +280,7 @@ var ( TxnStringCreatorCoin, TxnStringSwapIdentity, TxnStringUpdateGlobalParams, TxnStringCreatorCoinTransfer, TxnStringCreateNFT, TxnStringUpdateNFT, TxnStringAcceptNFTBid, TxnStringNFTBid, TxnStringNFTTransfer, TxnStringAcceptNFTTransfer, TxnStringBurnNFT, TxnStringAuthorizeDerivedKey, TxnStringMessagingGroup, - TxnStringDAOCoin, TxnStringDAOCoinTransfer, TxnStringDAOCoinLimitOrder, + TxnStringDAOCoin, TxnStringDAOCoinTransfer, TxnStringDAOCoinLimitOrder, TxnStringReact, } ) @@ -344,6 +346,8 @@ func (txnType TxnType) GetTxnString() TxnString { return TxnStringDAOCoinTransfer case TxnTypeDAOCoinLimitOrder: return TxnStringDAOCoinLimitOrder + case TxnTypeReact: + return TxnStringReact default: return TxnStringUndefined } @@ -403,6 +407,8 @@ func GetTxnTypeFromString(txnString TxnString) TxnType { return TxnTypeDAOCoinTransfer case TxnStringDAOCoinLimitOrder: return TxnTypeDAOCoinLimitOrder + case TxnStringReact: + return TxnTypeReact default: // TxnTypeUnset means we couldn't find a matching txn type return TxnTypeUnset @@ -470,6 +476,8 @@ func NewTxnMetadata(txType TxnType) (DeSoTxnMetadata, error) { return (&DAOCoinTransferMetadata{}).New(), nil case TxnTypeDAOCoinLimitOrder: return (&DAOCoinLimitOrderMetadata{}).New(), nil + case TxnTypeReact: + return (&ReactMetadata{}).New(), nil default: return nil, fmt.Errorf("NewTxnMetadata: Unrecognized TxnType: %v; make sure you add the new type of transaction to NewTxnMetadata", txType) } @@ -3584,6 +3592,89 @@ func (txnData *LikeMetadata) New() DeSoTxnMetadata { return &LikeMetadata{} } +// ================================================================== +// ReactMetadata +// +// A reaction is an interaction where a user on the platform reacts to a post. +// ================================================================== + +type ReactMetadata struct { + // The user reacting is assumed to be the originator of the + // top-level transaction. + + // The post hash to react to. + PostHash *BlockHash + + // The Unicode for the emoji reaction. + EmojiReaction rune + + // Set to true when a user is requesting to "remove" a reaction. + IsRemove bool +} + +func (txnData *ReactMetadata) GetTxnType() TxnType { + return TxnTypeReact +} + +func (txnData *ReactMetadata) ToBytes(preSignature bool) ([]byte, error) { + // Validate the metadata before encoding it. + // + + var data []byte + + // Add PostHash + // + // We know the post hash is set and has the expected length, so we don't need + // to encode the length here. + data = append(data, txnData.PostHash[:]...) + + // Add IsRemove + data = append(data, BoolToByte(txnData.IsRemove)) + + //TODO (Michel) I would flip these two. Also, will the EmojiReaction always be the same exact length of bytes? if not, we will want to prepend the number of bytes. + // Add EmojiReaction. + // It is possible for a single character to be encoded with different code point sequences. + // By normalizing the Unicode (NFC), we ensure that a character will have a unique code point sequence. + data = append(data, norm.NFC.Bytes([]byte(string(txnData.EmojiReaction)))...) + + return data, nil +} + +func (txnData *ReactMetadata) FromBytes(data []byte) error { + ret := ReactMetadata{} + rr := bytes.NewReader(data) + + // PostHash + ret.PostHash = &BlockHash{} + _, err := io.ReadFull(rr, ret.PostHash[:]) + if err != nil { + return fmt.Errorf( + "ReactMetadata.FromBytes: Error reading PostHash: %v", err) + } + + // IsRemove + ret.IsRemove, err = ReadBoolByte(rr) + if err != nil { + return errors.Wrapf(err, "ReactMetadata.FromBytes: Problem reading IsRemove") + } + + // Emoji reaction + reaction, _, err := rr.ReadRune() + if err != nil { + return fmt.Errorf( + "ReactMetadata.FromBytes: Error reading EmojiReaction: %v", err) + } + + ret.EmojiReaction = reaction + *txnData = ret + + return nil +} + +func (txnData *ReactMetadata) New() DeSoTxnMetadata { + return &ReactMetadata{} +} + // ================================================================== // FollowMetadata // diff --git a/lib/network_test.go b/lib/network_test.go index 3a26f78de..c04169479 100644 --- a/lib/network_test.go +++ b/lib/network_test.go @@ -735,6 +735,64 @@ func TestSerializeUnlike(t *testing.T) { require.Equal(txMeta, testMeta) } +func TestSerializeNoReaction(t *testing.T) { + require := require.New(t) + + txMeta := &ReactMetadata{PostHash: &postHashForTesting1} + + data, err := txMeta.ToBytes(false) + require.NoError(err) + + testMeta, err := NewTxnMetadata(TxnTypeReact) + require.NoError(err) + err = testMeta.FromBytes(data) + require.NoError(err) + require.Equal(txMeta, testMeta) +} + +func TestSerializeRemoveReaction(t *testing.T) { + require := require.New(t) + + txMeta := &ReactMetadata{ + PostHash: &postHashForTesting1, + IsRemove: true, + } + + data, err := txMeta.ToBytes(false) + require.NoError(err) + + testMeta, err := NewTxnMetadata(TxnTypeReact) + require.NoError(err) + err = testMeta.FromBytes(data) + require.NoError(err) + require.Equal(txMeta, testMeta) +} + +func TestSerializeReactions(t *testing.T) { + ValidReactions := []rune{'😊', '😥', '😠', '😮'} + for _, r := range ValidReactions { + _testSerializeSingleReaction(t, r) + } +} + +func _testSerializeSingleReaction(t *testing.T, emoji rune) { + require := require.New(t) + + txMeta := &ReactMetadata{ + PostHash: &postHashForTesting1, + EmojiReaction: emoji, + } + + data, err := txMeta.ToBytes(false) + require.NoError(err) + + testMeta, err := NewTxnMetadata(TxnTypeReact) + require.NoError(err) + err = testMeta.FromBytes(data) + require.NoError(err) + require.Equal(txMeta, testMeta) +} + func TestSerializeFollow(t *testing.T) { assert := assert.New(t) require := require.New(t) diff --git a/lib/notifier.go b/lib/notifier.go index de5959dfb..4c93437c8 100644 --- a/lib/notifier.go +++ b/lib/notifier.go @@ -46,6 +46,7 @@ func (notifier *Notifier) Update() error { var transactions []*PGTransaction err = notifier.db.Model(&transactions).Where("block_hash = ?", block.Hash). Relation("Outputs").Relation("PGMetadataLike").Relation("PGMetadataFollow"). + Relation("PGMetadataReact"). Relation("PGMetadataCreatorCoin").Relation("PGMetadataCreatorCoinTransfer"). Relation("PGMetadataSubmitPost").Select() // TODO: Add NFTs @@ -97,6 +98,20 @@ func (notifier *Notifier) Update() error { Timestamp: block.Timestamp, }) } + } else if transaction.Type == TxnTypeReact { + postHash := transaction.MetadataReact.PostHash + post := DBGetPostEntryByPostHash(notifier.badger, nil, postHash) + if post != nil { + notifications = append(notifications, &PGNotification{ + TransactionHash: transaction.Hash, + Mined: true, + ToUser: post.PosterPublicKey, + FromUser: transaction.PublicKey, + Type: NotificationReact, + PostHash: postHash, + Timestamp: block.Timestamp, + }) + } } else if transaction.Type == TxnTypeFollow { if !transaction.MetadataFollow.IsUnfollow { notifications = append(notifications, &PGNotification{ diff --git a/lib/postgres.go b/lib/postgres.go index 29dcf7a34..92e9e33af 100644 --- a/lib/postgres.go +++ b/lib/postgres.go @@ -138,6 +138,7 @@ type PGTransaction struct { MetadataUpdateProfile *PGMetadataUpdateProfile `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataFollow *PGMetadataFollow `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataLike *PGMetadataLike `pg:"rel:belongs-to,join_fk:transaction_hash"` + MetadataReact *PGMetadataReact `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataCreatorCoin *PGMetadataCreatorCoin `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataCreatorCoinTransfer *PGMetadataCreatorCoinTransfer `pg:"rel:belongs-to,join_fk:transaction_hash"` MetadataSwapIdentity *PGMetadataSwapIdentity `pg:"rel:belongs-to,join_fk:transaction_hash"` @@ -262,6 +263,17 @@ type PGMetadataLike struct { IsUnlike bool `pg:",use_zero"` } +// PGMetadataReact represents ReactMetadata +type PGMetadataReact struct { + tableName struct{} `pg:"pg_metadata_reactions"` + + TransactionHash *BlockHash `pg:",pk,type:bytea"` + PostHash *BlockHash `pg:",type:bytea"` + IsRemove bool `pg:",use_zero"` + //TODO (Michel) Are you sure this is an integer? I just frankly don't know off the top of my head. + EmojiReaction rune `pg:",type:integer"` +} + // PGMetadataCreatorCoin represents CreatorCoinMetadataa type PGMetadataCreatorCoin struct { tableName struct{} `pg:"pg_metadata_creator_coins"` @@ -457,6 +469,7 @@ const ( NotificationUnknown NotificationType = iota NotificationSendDESO NotificationLike + NotificationReact NotificationFollow NotificationCoinPurchase NotificationCoinTransfer @@ -600,6 +613,22 @@ func (like *PGLike) NewLikeEntry() *LikeEntry { } } +type PGReact struct { + tableName struct{} `pg:"pg_react"` + + ReactorPublicKey []byte `pg:",pk,type:bytea"` + ReactorPostHash *BlockHash `pg:",pk,type:bytea"` + ReactionEmoji rune `pg:",pk,type:bytea"` +} + +func (react *PGReact) NewReactionEntry() *ReactionEntry { + return &ReactionEntry{ + ReactorPubKey: react.ReactorPublicKey, + ReactedPostHash: react.ReactorPostHash, + ReactEmoji: react.ReactionEmoji, + } +} + type PGFollow struct { tableName struct{} `pg:"pg_follows"` @@ -1579,6 +1608,9 @@ func (postgres *Postgres) FlushView(view *UtxoView, blockHeight uint64) error { if err := postgres.flushLikes(tx, view); err != nil { return err } + if err := postgres.flushReacts(tx, view); err != nil { + return err + } if err := postgres.flushFollows(tx, view); err != nil { return err } @@ -1807,6 +1839,44 @@ func (postgres *Postgres) flushLikes(tx *pg.Tx, view *UtxoView) error { return nil } +func (postgres *Postgres) flushReacts(tx *pg.Tx, view *UtxoView) error { + var insertReacts []*PGReact + var deleteReacts []*PGReact + for _, reactionEntry := range view.ReactionKeyToReactionEntry { + if reactionEntry == nil { + continue + } + + react := &PGReact{ + ReactorPublicKey: reactionEntry.ReactorPubKey, + ReactorPostHash: reactionEntry.ReactedPostHash, + ReactionEmoji: reactionEntry.ReactEmoji, + } + + if reactionEntry.isDeleted { + deleteReacts = append(deleteReacts, react) + } else { + insertReacts = append(insertReacts, react) + } + } + + if len(insertReacts) > 0 { + _, err := tx.Model(&insertReacts).WherePK().OnConflict("DO NOTHING").Returning("NULL").Insert() + if err != nil { + return err + } + } + + if len(deleteReacts) > 0 { + _, err := tx.Model(&deleteReacts).Returning("NULL").Delete() + if err != nil { + return err + } + } + + return nil +} + func (postgres *Postgres) flushFollows(tx *pg.Tx, view *UtxoView) error { var insertFollows []*PGFollow var deleteFollows []*PGFollow @@ -2464,6 +2534,42 @@ func (postgres *Postgres) GetLikesForPost(postHash *BlockHash) []*PGLike { return likes } +// +// Reacts +// +func (postgres *Postgres) GetReaction(reactorPublicKey []byte, reactedPostHash *BlockHash, reactionEmoji rune) *PGReact { + react := PGReact{ + ReactorPublicKey: reactorPublicKey, + ReactorPostHash: reactedPostHash, + ReactionEmoji: reactionEmoji, + } + err := postgres.db.Model(&react).WherePK().First() + if err != nil { + return nil + } + return &react +} +func (postgres *Postgres) GetReacts(reacts []*PGReact) []*PGReact { + err := postgres.db.Model(&reacts).WherePK().Select() + if err != nil { + return nil + } + return reacts +} + +func (postgres *Postgres) GetReactionsForPost(postHash *BlockHash, reactionEmoji rune) []*PGReact { + var reacts []*PGReact + err := postgres.db.Model(&reacts). + Where("reactor_post_hash = ?", postHash). + Where("reaction_emoji = ?", reactionEmoji). + Select() + if err != nil { + return nil + } + //TODO (Michel) You'll also want to write a migration for postgres in the migrations directory. + return reacts +} + // // Follows //