Merge pull request #146 from lightninglabs/rescueclosed

rescueclosed: make number of keys configurable
pull/95/merge
Oliver Gugger 2 months ago committed by GitHub
commit 3a0bb3a2d4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -35,7 +35,12 @@ ifneq ($(sys),)
BUILD_SYSTEM = $(sys)
endif
DOCKER_TOOLS = docker run -v $$(pwd):/build chantools-tools
DOCKER_TOOLS = docker run \
--rm \
-v $(shell bash -c "go env GOCACHE || (mkdir -p /tmp/go-cache; echo /tmp/go-cache)"):/tmp/build/.cache \
-v $(shell bash -c "go env GOMODCACHE || (mkdir -p /tmp/go-modcache; echo /tmp/go-modcache)"):/tmp/build/.modcache \
-v $(shell bash -c "mkdir -p /tmp/go-lint-cache; echo /tmp/go-lint-cache"):/root/.cache/golangci-lint \
-v $$(pwd):/build chantools-tools
TEST_FLAGS = -test.timeout=20m

@ -168,10 +168,10 @@ func (a *ExplorerAPI) Unspent(addr string) ([]*Vout, error) {
// Now filter those that are really unspent, because above we get all
// outputs that are sent to the address.
var unspent []*Vout
for idx, vout := range outputs {
for _, vout := range outputs {
url := fmt.Sprintf(
"%s/tx/%s/outspend/%d", a.BaseURL, vout.Outspend.Txid,
idx,
vout.Outspend.Vin,
)
outspend := Outspend{}
err := fetchJSON(url, &outspend)

@ -5,8 +5,8 @@ import (
"encoding/hex"
"errors"
"fmt"
"io/ioutil"
"net"
"os"
"strconv"
"strings"
"time"
@ -133,7 +133,7 @@ func (c *fakeChanBackupCommand) Execute(_ *cobra.Command, _ []string) error {
}
if c.FromChannelGraph != "" {
graphBytes, err := ioutil.ReadFile(c.FromChannelGraph)
graphBytes, err := os.ReadFile(c.FromChannelGraph)
if err != nil {
return fmt.Errorf("error reading graph JSON file %s: "+
"%v", c.FromChannelGraph, err)

@ -7,7 +7,7 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"time"
"github.com/btcsuite/btcd/btcutil/hdkeychain"
@ -230,5 +230,5 @@ func forceCloseChannels(apiURL string, extendedKey *hdkeychain.ExtendedKey,
fileName := fmt.Sprintf("results/forceclose-%s.json",
time.Now().Format("2006-01-02-15-04-05"))
log.Infof("Writing result to %s", fileName)
return ioutil.WriteFile(fileName, summaryBytes, 0644)
return os.WriteFile(fileName, summaryBytes, 0644)
}

@ -6,7 +6,7 @@ import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"regexp"
"time"
@ -22,8 +22,8 @@ import (
)
var (
cacheSize = 5000
cache []*cacheEntry
defaultNumKeys uint32 = 5000
cache []*cacheEntry
errAddrNotFound = errors.New("addr not found")
@ -40,6 +40,7 @@ type rescueClosedCommand struct {
Addr string
CommitPoint string
LndLog string
NumKeys uint32
rootKey *rootKey
inputs *inputFlags
@ -103,7 +104,12 @@ chantools rescueclosed --fromsummary results/summary-xxxxxx.json \
cc.cmd.Flags().StringVar(
&cc.LndLog, "lnd_log", "", "the lnd log file to read to get "+
"the commit_point values when rescuing multiple "+
"channels at the same time")
"channels at the same time",
)
cc.cmd.Flags().Uint32Var(
&cc.NumKeys, "num_keys", defaultNumKeys, "the number of keys "+
"to derive for the brute force attack",
)
cc.rootKey = newRootKey(cc.cmd, "decrypting the backup")
cc.inputs = newInputFlags(cc.cmd)
@ -136,7 +142,9 @@ func (c *rescueClosedCommand) Execute(_ *cobra.Command, _ []string) error {
return fmt.Errorf("error reading commit points from "+
"db: %w", err)
}
return rescueClosedChannels(extendedKey, entries, commitPoints)
return rescueClosedChannels(
c.NumKeys, extendedKey, entries, commitPoints,
)
case c.Addr != "":
// First parse address to get targetPubKeyHash from it later.
@ -156,7 +164,9 @@ func (c *rescueClosedCommand) Execute(_ *cobra.Command, _ []string) error {
return fmt.Errorf("error parsing commit point: %w", err)
}
return rescueClosedChannel(extendedKey, targetAddr, commitPoint)
return rescueClosedChannel(
c.NumKeys, extendedKey, targetAddr, commitPoint,
)
case c.LndLog != "":
// Parse channel entries from any of the possible input files.
@ -170,7 +180,9 @@ func (c *rescueClosedCommand) Execute(_ *cobra.Command, _ []string) error {
return fmt.Errorf("error parsing commit points from "+
"log file: %w", err)
}
return rescueClosedChannels(extendedKey, entries, commitPoints)
return rescueClosedChannels(
c.NumKeys, extendedKey, entries, commitPoints,
)
default:
return errors.New("you either need to specify --channeldb and " +
@ -204,7 +216,7 @@ func commitPointsFromDB(chanDb *channeldb.ChannelStateDB) ([]*btcec.PublicKey,
}
func commitPointsFromLogFile(lndLog string) ([]*btcec.PublicKey, error) {
logFileBytes, err := ioutil.ReadFile(lndLog)
logFileBytes, err := os.ReadFile(lndLog)
if err != nil {
return nil, fmt.Errorf("error reading log file %s: %w", lndLog,
err)
@ -241,11 +253,11 @@ func commitPointsFromLogFile(lndLog string) ([]*btcec.PublicKey, error) {
return result, nil
}
func rescueClosedChannels(extendedKey *hdkeychain.ExtendedKey,
func rescueClosedChannels(numKeys uint32, extendedKey *hdkeychain.ExtendedKey,
entries []*dataformat.SummaryEntry,
possibleCommitPoints []*btcec.PublicKey) error {
err := fillCache(extendedKey)
err := fillCache(numKeys, extendedKey)
if err != nil {
return err
}
@ -279,7 +291,7 @@ outer:
addr = entry.ClosingTX.ToRemoteAddr
}
wif, err := addrInCache(addr, commitPoint)
wif, err := addrInCache(numKeys, addr, commitPoint)
switch {
case err == nil:
entry.ClosingTX.SweepPrivkey = wif
@ -313,10 +325,10 @@ outer:
fileName := fmt.Sprintf("results/rescueclosed-%s.json",
time.Now().Format("2006-01-02-15-04-05"))
log.Infof("Writing result to %s", fileName)
return ioutil.WriteFile(fileName, summaryBytes, 0644)
return os.WriteFile(fileName, summaryBytes, 0644)
}
func rescueClosedChannel(extendedKey *hdkeychain.ExtendedKey,
func rescueClosedChannel(numKeys uint32, extendedKey *hdkeychain.ExtendedKey,
addr btcutil.Address, commitPoint *btcec.PublicKey) error {
// Make the check on the decoded address according to the active
@ -336,12 +348,12 @@ func rescueClosedChannel(extendedKey *hdkeychain.ExtendedKey,
return errors.New("address: must be a bech32 P2WPKH address")
}
err := fillCache(extendedKey)
err := fillCache(numKeys, extendedKey)
if err != nil {
return err
}
wif, err := addrInCache(addr.String(), commitPoint)
wif, err := addrInCache(numKeys, addr.String(), commitPoint)
switch {
case err == nil:
log.Infof("Found private key %s for address %v!", wif, addr)
@ -356,7 +368,7 @@ func rescueClosedChannel(extendedKey *hdkeychain.ExtendedKey,
}
// Try again as a static_remote_key address.
wif, err = addrInCache(addr.String(), nil)
wif, err = addrInCache(numKeys, addr.String(), nil)
switch {
case err == nil:
log.Infof("Found private key %s for address %v!", wif, addr)
@ -372,7 +384,9 @@ func rescueClosedChannel(extendedKey *hdkeychain.ExtendedKey,
}
}
func addrInCache(addr string, perCommitPoint *btcec.PublicKey) (string, error) {
func addrInCache(numKeys uint32, addr string,
perCommitPoint *btcec.PublicKey) (string, error) {
targetPubKeyHash, scriptHash, err := lnd.DecodeAddressHash(
addr, chainParams,
)
@ -386,7 +400,7 @@ func addrInCache(addr string, perCommitPoint *btcec.PublicKey) (string, error) {
// If the commit point is nil, we try with plain private keys to match
// static_remote_key outputs.
if perCommitPoint == nil {
for i := range cacheSize {
for i := range numKeys {
cacheEntry := cache[i]
hashedPubKey := btcutil.Hash160(
cacheEntry.pubKey.SerializeCompressed(),
@ -415,7 +429,7 @@ func addrInCache(addr string, perCommitPoint *btcec.PublicKey) (string, error) {
// Loop through all cached payment base point keys, tweak each of it
// with the per_commit_point and see if the hashed public key
// corresponds to the target pubKeyHash of the given address.
for i := range cacheSize {
for i := range numKeys {
cacheEntry := cache[i]
basePoint := cacheEntry.pubKey
tweakedPubKey := input.TweakPubKey(basePoint, perCommitPoint)
@ -446,17 +460,16 @@ func addrInCache(addr string, perCommitPoint *btcec.PublicKey) (string, error) {
return "", errAddrNotFound
}
func fillCache(extendedKey *hdkeychain.ExtendedKey) error {
cache = make([]*cacheEntry, cacheSize)
func fillCache(numKeys uint32, extendedKey *hdkeychain.ExtendedKey) error {
cache = make([]*cacheEntry, numKeys)
for i := range cacheSize {
for i := range numKeys {
key, err := lnd.DeriveChildren(extendedKey, []uint32{
lnd.HardenedKeyStart + uint32(keychain.BIP0043Purpose),
lnd.HardenedKeyStart + chainParams.HDCoinType,
lnd.HardenedKeyStart +
uint32(keychain.KeyFamilyPaymentBase),
0,
uint32(i),
lnd.HardenedKeyStart + uint32(
keychain.KeyFamilyPaymentBase,
), 0, i,
})
if err != nil {
return err
@ -476,7 +489,7 @@ func fillCache(extendedKey *hdkeychain.ExtendedKey) error {
if i > 0 && i%10000 == 0 {
fmt.Printf("Filled cache with %d of %d keys.\n",
i, cacheSize)
i, numKeys)
}
}
return nil

@ -5,7 +5,7 @@ import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"io"
"os"
"strings"
"time"
@ -299,9 +299,9 @@ func (f *inputFlags) parseInputType() ([]*dataformat.SummaryEntry, error) {
func readInput(input string) ([]byte, error) {
if strings.TrimSpace(input) == "-" {
return ioutil.ReadAll(os.Stdin)
return io.ReadAll(os.Stdin)
}
return ioutil.ReadFile(input)
return os.ReadFile(input)
}
func setupLogging() {

@ -3,7 +3,6 @@ package main
import (
"bytes"
"io"
"io/ioutil"
"os"
"path"
"regexp"
@ -53,14 +52,12 @@ func newHarness(t *testing.T) *harness {
buf := &bytes.Buffer{}
logBackend := btclog.NewBackend(buf)
tempDir, err := ioutil.TempDir("", "chantools")
require.NoError(t, err)
h := &harness{
t: t,
logBuffer: buf,
logger: logBackend.Logger("CHAN"),
tempDir: tempDir,
tempDir: t.TempDir(),
}
h.logger.SetLevel(btclog.LevelTrace)

@ -3,7 +3,7 @@ package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"time"
"github.com/lightninglabs/chantools/btc"
@ -88,5 +88,5 @@ func summarizeChannels(apiURL string,
fileName := fmt.Sprintf("results/summary-%s.json",
time.Now().Format("2006-01-02-15-04-05"))
log.Infof("Writing result to %s", fileName)
return ioutil.WriteFile(fileName, summaryBytes, 0644)
return os.WriteFile(fileName, summaryBytes, 0644)
}

@ -6,7 +6,6 @@ import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"time"
@ -76,7 +75,7 @@ func (c *zombieRecoveryPrepareKeysCommand) Execute(_ *cobra.Command,
return errors.New("invalid payout address, must be P2WPKH")
}
matchFileBytes, err := ioutil.ReadFile(c.MatchFile)
matchFileBytes, err := os.ReadFile(c.MatchFile)
if err != nil {
return fmt.Errorf("error reading match file %s: %w",
c.MatchFile, err)

Loading…
Cancel
Save