Compare commits
13 Commits
v3.0.1-bet
...
v3.0.1-bet
| Author | SHA1 | Date | |
|---|---|---|---|
| 628cd75941 | |||
| 1da14c5c3b | |||
| c83d0fcff2 | |||
| 573af341b8 | |||
| a64168bee2 | |||
| c678ae5f9a | |||
| e95967db53 | |||
| 29e6c056d1 | |||
| deadbe9383 | |||
| 5cbec2e06f | |||
| 66d284f183 | |||
| ae64fd6e99 | |||
| 305bd3008d |
24
.github/workflows/python-publish.yml
vendored
24
.github/workflows/python-publish.yml
vendored
@ -14,19 +14,33 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
|
||||
- name: Set up Python 3.x
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
|
||||
- name: Set up Python 2.x
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '2.x'
|
||||
|
||||
- name: Install dependencies 3.x
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel twine
|
||||
pip3 install setuptools wheel twine
|
||||
|
||||
- name: Install dependencies 2.x
|
||||
run: |
|
||||
python2 -m pip install --upgrade pip
|
||||
pip2 install setuptools wheel twine
|
||||
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
python3 -m twine check dist/*
|
||||
python3 setup.py sdist bdist_wheel
|
||||
python2 setup.py sdist bdist_wheel
|
||||
twine check dist/*
|
||||
twine upload dist/*
|
||||
|
||||
28
.github/workflows/regression.yml
vendored
28
.github/workflows/regression.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
|
||||
|
||||
- name: Prepare
|
||||
run: lsmod && sudo apt update && sudo apt install zfsutils-linux && sudo -H pip3 install coverage unittest2 mock==3.0.5 coveralls
|
||||
run: sudo apt update && sudo apt install zfsutils-linux && sudo -H pip3 install coverage unittest2 mock==3.0.5 coveralls
|
||||
|
||||
|
||||
- name: Regression test
|
||||
@ -39,7 +39,7 @@ jobs:
|
||||
|
||||
|
||||
- name: Prepare
|
||||
run: lsmod && sudo apt update && sudo apt install zfsutils-linux python3-setuptools && sudo -H pip3 install coverage unittest2 mock==3.0.5 coveralls
|
||||
run: sudo apt update && sudo apt install zfsutils-linux python3-setuptools && sudo -H pip3 install coverage unittest2 mock==3.0.5 coveralls
|
||||
|
||||
|
||||
- name: Regression test
|
||||
@ -50,3 +50,27 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: coveralls --service=github
|
||||
|
||||
ubuntu18_python2:
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2.3.4
|
||||
|
||||
- name: Set up Python 2.x
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '2.x'
|
||||
|
||||
- name: Prepare
|
||||
run: sudo apt update && sudo apt install zfsutils-linux python-setuptools && sudo -H pip install coverage unittest2 mock==3.0.5 coveralls
|
||||
|
||||
- name: Regression test
|
||||
run: sudo -E ./tests/run_tests
|
||||
|
||||
- name: Coveralls
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: coveralls --service=github
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -11,4 +11,3 @@ __pycache__
|
||||
python2.env
|
||||
venv
|
||||
.idea
|
||||
OQ
|
||||
11
README.md
11
README.md
@ -148,6 +148,8 @@ rpool/swap autobackup:offsite1 true
|
||||
...
|
||||
```
|
||||
|
||||
ZFS properties are ```inherited``` by child datasets. Since we've set the property on the highest dataset, we're essentially backupping the whole pool.
|
||||
|
||||
Because we don't want to backup everything, we can exclude certain filesystem by setting the property to false:
|
||||
|
||||
```console
|
||||
@ -163,6 +165,13 @@ rpool/swap autobackup:offsite1 false
|
||||
...
|
||||
```
|
||||
|
||||
The autobackup-property can have 3 values:
|
||||
* ```true```: Backup the dataset and all its children
|
||||
* ```false```: Dont backup the dataset and all its children. (used to exclude certain datasets)
|
||||
* ```child```: Only backup the children off the dataset, not the dataset itself.
|
||||
|
||||
Only use the zfs-command to set these properties, not the zpool command.
|
||||
|
||||
### Running zfs-autobackup
|
||||
|
||||
Run the script on the backup server and pull the data from the server specified by --ssh-source.
|
||||
@ -654,7 +663,7 @@ for HOST in $HOSTS; do
|
||||
done
|
||||
```
|
||||
|
||||
This script will also send the backup status to Zabbix. (if you've installed my zabbix-job-status script)
|
||||
This script will also send the backup status to Zabbix. (if you've installed my zabbix-job-status script https://github.com/psy0rz/stuff/tree/master/zabbix-jobs)
|
||||
|
||||
# Sponsor list
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from basetest import *
|
||||
|
||||
|
||||
class TestZfsNode(unittest2.TestCase):
|
||||
class TestExternalFailures(unittest2.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
prepare_zpools()
|
||||
@ -259,8 +259,28 @@ test_target1/test_source2/fs2/sub@test-20101111000002
|
||||
with patch('time.strftime', return_value="20101111000001"):
|
||||
self.assertTrue(ZfsAutobackup("test test_target1 --verbose --allow-empty".split(" ")).run())
|
||||
|
||||
############# TODO:
|
||||
#UPDATE: offcourse the one thing that wasn't tested had a bug :( (in ExecuteNode.run()).
|
||||
def test_ignoretransfererrors(self):
|
||||
|
||||
self.skipTest(
|
||||
"todo: create some kind of situation where zfs recv exits with an error but transfer is still ok (happens in practice with acltype)")
|
||||
self.skipTest("Not sure how to implement a test for this without some serious hacking and patching.")
|
||||
|
||||
# #recreate target pool without any features
|
||||
# # shelltest("zfs set compress=on test_source1; zpool destroy test_target1; zpool create test_target1 -o feature@project_quota=disabled /dev/ram2")
|
||||
#
|
||||
# with patch('time.strftime', return_value="20101111000000"):
|
||||
# self.assertFalse(ZfsAutobackup("test test_target1 --verbose --allow-empty --no-progress".split(" ")).run())
|
||||
#
|
||||
# r = shelltest("zfs list -H -o name -r -t all test_target1")
|
||||
#
|
||||
# self.assertMultiLineEqual(r, """
|
||||
# test_target1
|
||||
# test_target1/test_source1
|
||||
# test_target1/test_source1/fs1
|
||||
# test_target1/test_source1/fs1@test-20101111000002
|
||||
# test_target1/test_source1/fs1/sub
|
||||
# test_target1/test_source1/fs1/sub@test-20101111000002
|
||||
# test_target1/test_source2
|
||||
# test_target1/test_source2/fs2
|
||||
# test_target1/test_source2/fs2/sub
|
||||
# test_target1/test_source2/fs2/sub@test-20101111000002
|
||||
# """)
|
||||
|
||||
@ -4,6 +4,7 @@ import subprocess
|
||||
|
||||
from zfs_autobackup.LogStub import LogStub
|
||||
|
||||
|
||||
class ExecuteNode(LogStub):
|
||||
"""an endpoint to execute local or remote commands via ssh"""
|
||||
|
||||
@ -46,17 +47,23 @@ class ExecuteNode(LogStub):
|
||||
|
||||
def run(self, cmd, inp=None, tab_split=False, valid_exitcodes=None, readonly=False, hide_errors=False, pipe=False,
|
||||
return_stderr=False):
|
||||
"""run a command on the node cmd: the actual command, should be a list, where the first item is the command
|
||||
and the rest are parameters. input: Can be None, a string or a pipe-handle you got from another run()
|
||||
tab_split: split tabbed files in output into a list valid_exitcodes: list of valid exit codes for this
|
||||
command (checks exit code of both sides of a pipe) readonly: make this True if the command doesn't make any
|
||||
changes and is safe to execute in testmode hide_errors: don't show stderr output as error, instead show it as
|
||||
debugging output (use to hide expected errors) pipe: Instead of executing, return a pipe-handle to be used to
|
||||
input to another run() command. (just like a | in linux) return_stderr: return both stdout and stderr as a
|
||||
tuple. (only returns stderr from this side of the pipe)
|
||||
"""run a command on the node.
|
||||
|
||||
:param cmd: the actual command, should be a list, where the first item is the command
|
||||
and the rest are parameters.
|
||||
:param inp: Can be None, a string or a pipe-handle you got from another run()
|
||||
:param tab_split: split tabbed files in output into a list
|
||||
:param valid_exitcodes: list of valid exit codes for this command (checks exit code of both sides of a pipe)
|
||||
Use [] to accept all exit codes.
|
||||
:param readonly: make this True if the command doesn't make any changes and is safe to execute in testmode
|
||||
:param hide_errors: don't show stderr output as error, instead show it as debugging output (use to hide expected errors)
|
||||
:param pipe: Instead of executing, return a pipe-handle to be used to
|
||||
input to another run() command. (just like a | in linux)
|
||||
:param return_stderr: return both stdout and stderr as a tuple. (normally only returns stdout)
|
||||
|
||||
"""
|
||||
|
||||
if not valid_exitcodes:
|
||||
if valid_exitcodes is None:
|
||||
valid_exitcodes = [0]
|
||||
|
||||
encoded_cmd = []
|
||||
@ -196,4 +203,4 @@ class ExecuteNode(LogStub):
|
||||
if return_stderr:
|
||||
return output_lines, error_lines
|
||||
else:
|
||||
return output_lines
|
||||
return output_lines
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
# python 2 compatibility
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
colorama = False
|
||||
if sys.stdout.isatty():
|
||||
try:
|
||||
@ -39,4 +43,4 @@ class LogConsole:
|
||||
print(colorama.Fore.GREEN + "# " + txt + colorama.Style.RESET_ALL)
|
||||
else:
|
||||
print("# " + txt)
|
||||
sys.stdout.flush()
|
||||
sys.stdout.flush()
|
||||
|
||||
@ -12,7 +12,7 @@ from zfs_autobackup.ThinnerRule import ThinnerRule
|
||||
class ZfsAutobackup:
|
||||
"""main class"""
|
||||
|
||||
VERSION = "3.0.1-beta4"
|
||||
VERSION = "3.0.1-beta8"
|
||||
HEADER = "zfs-autobackup v{} - Copyright 2020 E.H.Eefting (edwin@datux.nl)".format(VERSION)
|
||||
|
||||
def __init__(self, argv, print_arguments=True):
|
||||
@ -23,8 +23,7 @@ class ZfsAutobackup:
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description=self.HEADER,
|
||||
epilog='When a filesystem fails, zfs_backup will continue and report the number of failures at that end. '
|
||||
'Also the exit code will indicate the number of failures. Full manual at: https://github.com/psy0rz/zfs_autobackup')
|
||||
epilog='Full manual at: https://github.com/psy0rz/zfs_autobackup')
|
||||
parser.add_argument('--ssh-config', default=None, help='Custom ssh client config')
|
||||
parser.add_argument('--ssh-source', default=None,
|
||||
help='Source host to get backup from. (user@hostname) Default %(default)s.')
|
||||
|
||||
@ -489,8 +489,8 @@ class ZfsDataset:
|
||||
if self.zfs_node.readonly:
|
||||
self.force_exists = True
|
||||
|
||||
# check if transfer was really ok (exit codes have been wrong before due to bugs in zfs-utils and can be
|
||||
# ignored by some parameters)
|
||||
# check if transfer was really ok (exit codes have been wrong before due to bugs in zfs-utils and some
|
||||
# errors should be ignored, thats where the ignore_exitcodes is for.)
|
||||
if not self.exists:
|
||||
self.error("error during transfer")
|
||||
raise (Exception("Target doesn't exist after transfer, something went wrong."))
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
# python 2 compatibility
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
|
||||
|
||||
|
||||
def cli():
|
||||
import sys
|
||||
from zfs_autobackup.ZfsAutobackup import ZfsAutobackup
|
||||
|
||||
Reference in New Issue
Block a user