Skip to content

Commit f0c6b63

Browse files
committed
dbbackup: fixup, update README.md, add requirements.txt
1 parent ffb9d6b commit f0c6b63

File tree

4 files changed

+67
-43
lines changed

4 files changed

+67
-43
lines changed

dbbackup/README.md

+44-7
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,47 @@
1-
# dbbackup plugin
1+
## Summary plugin
22

3-
STILL WORK-IN-PROGRESS ** EXPERIMENTAL **
3+
This plugin keeps a synchronized backup of c-lightning's (CL) sqlite3 database.
4+
It uses the db_write hook so that every commit (write) to CL's database is first
5+
written to the backup. This allows recovery of any committed-to channel state,
6+
including HTLCs. This plugin does not backup the seed and is not a complete
7+
node-backup.
48

5-
# testing
6-
The tests utilize c-lightning's pytest framework. To run the tests, you should symlink
7-
this repositories (dbbackup) directory into c-lightning repo's /test directory.
9+
## Installation
810

9-
Then go into c-lighting repo directory and to run dbbackup's two tests:
10-
`DEVELOPER=1 py.test tests/ -s -v -k test_dbbackup_`
11+
** DO NOT ** copy or link this repository's `/dbbackup` directory into
12+
`~/.lightning/plugins`, because it also contains executable python scripts
13+
in its `/tests` sub-directory.
14+
15+
Instead copy or link the single `dbbackup.py` file into
16+
`~/.lightning/plugins/dbbackup` or start CL with the option:
17+
18+
`--plugin=/path/to/this/repo/plugins/dbbackup/dbbackup.py`
19+
20+
## Options:
21+
22+
* `--db-backup-file`: path of the backup file
23+
24+
## Usage
25+
26+
If the given [`db-backup-file`] doesn't exist yet, it will be created from a
27+
copy of CL's database.
28+
29+
During startup, any existing backup file is checked to match CL's current
30+
database. If that check fails or initialization fails for other reasons, it will
31+
shutdown CL and log `**BROKEN**`. If the plugin fails in writing to the backup
32+
file, it will trigger CL to crash.
33+
34+
The backup file is created with rw permission for the owner, it contains
35+
sensitive information, so be a bit careful. When plugin complains about mismatch
36+
between backup and original db, please investigate what caused it before
37+
recovering.
38+
39+
To recover: shutdown CL and copy the backup to `~/.lighting/lightningd.sqlite3`
40+
File permissions may need to be restored.
41+
42+
## Testing
43+
44+
The tests uses c-lightning's pytest framework. To run the tests, you can
45+
link or copy this repository's `/dbbackup` directory into c-lightning repo's
46+
`/test` directory. Then cd into the c-lighting repo directory and to run
47+
test_dbbackup_* tests, run: `DEVELOPER=1 py.test tests/ -s -v -k test_dbbackup_`

dbbackup/dbbackup.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
11
#!/usr/bin/env python3
2-
"""This plugin creates and updates a backup of c-lightnings .sqlite3 database.
3-
"""
42
from lightning import Plugin
53
import os
64
import shutil
75
import sqlite3
86
from stat import S_IRUSR, S_IWUSR
97

8+
109
plugin = Plugin()
1110
plugin.sqlite_pre_init_cmds = []
1211
plugin.initted = False
@@ -38,8 +37,7 @@ def init(configuration, options, plugin):
3837
db = os.path.join(configuration['lightning-dir'], 'lightningd.sqlite3')
3938
backup = plugin.get_option('db-backup-file')
4039

41-
# If backup exist, replay pre_init_cmds on a temporary copy and compare that
42-
# with the original database
40+
# If backup exist, replay pre_init_cmds on a temporary copy
4341
if os.path.isfile(backup):
4442
plugin.log('Found existing db-backup-file: {} comparing...'.format(backup))
4543
backup_copy = shutil.copy(backup, backup + '.tmp')
@@ -48,7 +46,11 @@ def init(configuration, options, plugin):
4846
for c in plugin.sqlite_pre_init_cmds:
4947
db1.execute(c)
5048

51-
if [x for x in db1.iterdump()] == [x for x in db2.iterdump()]:
49+
# If it then matches orignal db, replace backup with copy ... else abort
50+
dbs_match = [x for x in db1.iterdump()] == [x for x in db2.iterdump()]
51+
db1.close()
52+
db2.close()
53+
if dbs_match:
5254
os.rename(backup_copy, backup)
5355
plugin.log("Existing db-backup-file OK and successfully synced")
5456
else:
@@ -57,8 +59,6 @@ def init(configuration, options, plugin):
5759
os.remove(backup_copy)
5860
plugin.rpc.stop() # stop lightningd
5961

60-
db1.close()
61-
db2.close()
6262
else:
6363
new_backup_file(db, backup)
6464

dbbackup/requirements.txt

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
pylightning>=0.0.7.3

dbbackup/tests/test_dbbackup.py

+15-29
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,15 @@
11
from fixtures import * # noqa: F401,F403
22
from flaky import flaky # noqa: F401
33
from lightning import RpcError
4+
from utils import DEVELOPER, wait_for
5+
6+
import os
7+
import pytest
8+
import re
9+
import shutil
410
import signal
511
import time
612
import unittest
7-
from utils import wait_for
813

914

1015
# Crashing or shutting-down a node raises unpredictable errors/exceptions, thus @flaky
@@ -62,12 +67,12 @@ def test_dbbackup_recover(node_factory, executor):
6267
# l3 is our unfortunate, may_reconnect=False prevents reconnect-attempts,
6368
# but incoming or manual connections still work
6469
db_backup = os.path.join(node_factory.directory, "l3_lightningd.sqlite3-backup")
65-
opts=[{'may_reconnect': True},
66-
{'may_reconnect': False, 'may_fail': True},
67-
{'may_reconnect': False, 'may_fail': True,
68-
'plugin': 'tests/dbbackup/dbbackup.py',
69-
'db-backup-file': db_backup,
70-
'disconnect': ['@WIRE_UPDATE_FULFILL_HTLC']}]
70+
opts = [{'may_reconnect': True},
71+
{'may_reconnect': False, 'may_fail': True},
72+
{'may_reconnect': False, 'may_fail': True,
73+
'plugin': 'tests/dbbackup/dbbackup.py',
74+
'db-backup-file': db_backup,
75+
'disconnect': ['@WIRE_UPDATE_FULFILL_HTLC']}]
7176

7277
# l3 looses its database with a beneficial HTLC in flight
7378
l1, l2, l3 = node_factory.line_graph(3, opts=opts, wait_for_announce=True)
@@ -85,10 +90,10 @@ def test_dbbackup_recover(node_factory, executor):
8590
os.rename(db_backup, db_orig)
8691
l3.daemon.opts.pop('dev-disconnect')
8792
l3.daemon.opts.pop('dev-no-reconnect')
88-
assert l1.rpc.listsendpays(payment_hash = phash)['payments'][0]['status'] == 'pending'
93+
assert l1.rpc.listsendpays(payment_hash=phash)['payments'][0]['status'] == 'pending'
8994
l3.start()
9095
l2.rpc.connect(l3.info['id'], 'localhost', l3.port)['id']
91-
wait_for(lambda: l1.rpc.listsendpays(payment_hash = phash)['payments'][0]['status'] == 'complete')
96+
wait_for(lambda: l1.rpc.listsendpays(payment_hash=phash)['payments'][0]['status'] == 'complete')
9297

9398
# a HACK to get around `ValueError: 2 nodes had unexpected reconnections`
9499
l3.daemon.logs = [re.sub('Peer has reconnected', 'MODDED_PeerHasReconnected', l) for l in l3.daemon.logs]
@@ -150,26 +155,7 @@ def test_dbbackup_plugin_kill(node_factory, executor):
150155
# kill the plugin, be a bit careful extracting pid from log
151156
logline = l1.daemon.is_in_log('plugin-manager started\(\d+\).*dbbackup.py')
152157
assert logline is not None
153-
pid = int(re.search(r'plugin-manager started\((\d+)\).*dbbackup.py',logline ).group(1))
158+
pid = int(re.search(r'plugin-manager started\((\d+)\).*dbbackup.py', logline).group(1))
154159
os.kill(pid, signal.SIGTERM)
155160
time.sleep(1)
156161
assert l1.daemon.is_in_log(r'\*\*BROKEN\*\*')
157-
158-
159-
# @unittest.skipIf(not DEVELOPER, "needs DEVELOPER=1")
160-
# def test_dummy(node_factory, executor):
161-
# # l1, l2 = node_factory.line_graph(2, wait_for_announce=True,
162-
# # opts=[{'may_reconnect': False},
163-
# # {'may_reconnect': False,
164-
# # 'disconnect': ['@WIRE_COMMITMENT_SIGNED']}])
165-
# l1 = node_factory.get_node(allow_broken_log=True, random_hsm=True, start=False,
166-
# options={'plugin': 'tests/dbbackup/dbbackup.py',
167-
# 'db-backup-file': None})
168-
#
169-
# # Now with an invalid path, should error and shutdown
170-
# bad_path = node_factory.directory
171-
# l1.daemon.opts['db-backup-file'] = bad_path
172-
# with pytest.raises(ConnectionResetError):
173-
# l1.start()
174-
# import ipdb; ipdb.set_trace()
175-
# print("\n".join(l1.daemon.logs))

0 commit comments

Comments
 (0)