update repository references and improve script handling

This commit is contained in:
2025-03-19 20:36:36 +01:00
parent 51b0252b0e
commit 1d90749486
160 changed files with 14361 additions and 18 deletions

View File

@@ -0,0 +1,427 @@
import json
import logging
import os
import tempfile
import asyncio
import platform
import aiohttp
from yarl import URL
import pytest
from aiohttp import ClientSession
from injector import (ClassAssistedBuilder, Injector, Module, inject, provider,
singleton)
from backup.config import Config, Setting
from backup.model import Coordinator
from dev.simulationserver import SimulationServer
from backup.drive import DriveRequests, DriveSource, FolderFinder, AuthCodeQuery
from backup.util import GlobalInfo, Estimator, Resolver, DataCache
from backup.ha import HaRequests, HaSource, HaUpdater
from backup.logger import reset
from backup.model import DummyBackup, DestinationPrecache, Model
from backup.time import Time
from backup.module import BaseModule
from backup.debugworker import DebugWorker
from backup.creds import Creds, DriveRequester
from backup.server import ErrorStore
from backup.ha import AddonStopper
from backup.ui import UiServer
from backup.watcher import Watcher
from .faketime import FakeTime
from .helpers import Uploader, createBackupTar
from dev.ports import Ports
from dev.simulated_google import SimulatedGoogle
from dev.request_interceptor import RequestInterceptor
from dev.simulated_supervisor import SimulatedSupervisor
@singleton
class FsFaker():
@inject
def __init__(self):
self.bytes_free = 1024 * 1024 * 1024
self.bytes_total = 1024 * 1024 * 1024
self.old_method = None
def start(self):
if platform.system() != "Windows":
self.old_method = os.statvfs
os.statvfs = self._hijack
def stop(self):
if platform.system() != "Windows":
os.statvfs = self.old_method
def _hijack(self, path):
return os.statvfs_result((0, 1, int(self.bytes_total), int(self.bytes_free), int(self.bytes_free), 0, 0, 0, 0, 255))
def setFreeBytes(self, bytes_free, bytes_total=1):
self.bytes_free = bytes_free
self.bytes_total = bytes_total
if self.bytes_free > self.bytes_total:
self.bytes_total = self.bytes_free
class ReaderHelper:
def __init__(self, session, ui_port, ingress_port):
self.session = session
self.ui_port = ui_port
self.ingress_port = ingress_port
self.timeout = aiohttp.ClientTimeout(total=20)
def getUrl(self, ingress=True, ssl=False):
if ssl:
protocol = "https"
else:
protocol = "http"
if ingress:
return protocol + "://localhost:" + str(self.ingress_port) + "/"
else:
return protocol + "://localhost:" + str(self.ui_port) + "/"
async def getjson(self, path, status=200, json=None, auth=None, ingress=True, ssl=False, sslcontext=None):
async with self.session.get(self.getUrl(ingress, ssl) + path, json=json, auth=auth, ssl=sslcontext, timeout=self.timeout) as resp:
assert resp.status == status
return await resp.json()
async def get(self, path, status=200, json=None, auth=None, ingress=True, ssl=False):
async with self.session.get(self.getUrl(ingress, ssl) + path, json=json, auth=auth, timeout=self.timeout) as resp:
if resp.status != status:
import logging
logging.getLogger().error(resp.text())
assert resp.status == status
return await resp.text()
async def postjson(self, path, status=200, json=None, ingress=True):
async with self.session.post(self.getUrl(ingress) + path, json=json, timeout=self.timeout) as resp:
assert resp.status == status
return await resp.json()
async def assertError(self, path, error_type="generic_error", status=500, ingress=True, json=None):
logging.getLogger().info("Requesting " + path)
data = await self.getjson(path, status=status, ingress=ingress, json=json)
assert data['error_type'] == error_type
# This module should onyl ever have bindings that can also be satisfied by MainModule
class TestModule(Module):
def __init__(self, config: Config, ports: Ports):
self.ports = ports
self.config = config
@provider
@singleton
def getDriveCreds(self, time: Time) -> Creds:
return Creds(time, "test_client_id", time.now(), "test_access_token", "test_refresh_token", "test_client_secret")
@provider
@singleton
def getTime(self) -> Time:
return FakeTime()
@provider
@singleton
def getPorts(self) -> Ports:
return self.ports
@provider
@singleton
def getConfig(self) -> Config:
return self.config
@pytest.fixture
def event_loop():
if platform.system() == "Windows":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
return asyncio.new_event_loop()
@pytest.fixture
async def generate_config(server_url: URL, ports, cleandir):
return Config.withOverrides({
Setting.DRIVE_URL: str(server_url),
Setting.SUPERVISOR_URL: str(server_url) + "/",
Setting.AUTHORIZATION_HOST: str(server_url),
Setting.TOKEN_SERVER_HOSTS: str(server_url),
Setting.DRIVE_REFRESH_URL: str(server_url.with_path("/oauth2/v4/token")),
Setting.DRIVE_AUTHORIZE_URL: str(server_url.with_path("/o/oauth2/v2/auth")),
Setting.DRIVE_TOKEN_URL: str(server_url.with_path("/token")),
Setting.DRIVE_DEVICE_CODE_URL: str(server_url.with_path("/device/code")),
Setting.SUPERVISOR_TOKEN: "test_header",
Setting.SECRETS_FILE_PATH: "secrets.yaml",
Setting.CREDENTIALS_FILE_PATH: "credentials.dat",
Setting.FOLDER_FILE_PATH: "folder.dat",
Setting.RETAINED_FILE_PATH: "retained.json",
Setting.ID_FILE_PATH: "id.json",
Setting.DATA_CACHE_FILE_PATH: "data_cache.json",
Setting.STOP_ADDON_STATE_PATH: "stop_addon.json",
Setting.INGRESS_TOKEN_FILE_PATH: "ingress.dat",
Setting.DEFAULT_DRIVE_CLIENT_ID: "test_client_id",
Setting.DEFAULT_DRIVE_CLIENT_SECRET: "test_client_secret",
Setting.BACKUP_DIRECTORY_PATH: os.path.join(cleandir, "backups"),
Setting.PORT: ports.ui,
Setting.INGRESS_PORT: ports.ingress,
Setting.BACKUP_STARTUP_DELAY_MINUTES: 0,
Setting.PING_TIMEOUT: 0.1,
})
@pytest.fixture
async def injector(cleandir, ports, generate_config):
drive_creds = Creds(FakeTime(), "test_client_id", None, "test_access_token", "test_refresh_token")
os.mkdir(os.path.join(cleandir, "backups"))
with open(os.path.join(cleandir, "secrets.yaml"), "w") as f:
f.write("for_unit_tests: \"password value\"\n")
with open(os.path.join(cleandir, "credentials.dat"), "w") as f:
f.write(json.dumps(drive_creds.serialize()))
return Injector([BaseModule(), TestModule(generate_config, ports)])
@pytest.fixture
async def ui_server(injector, server):
os.mkdir("static")
server = injector.get(UiServer)
await server.run()
yield server
await server.shutdown()
@pytest.fixture
def reader(server, ui_server, session, ui_port, ingress_port):
return ReaderHelper(session, ui_port, ingress_port)
@pytest.fixture
async def uploader(injector: Injector, server_url):
return injector.get(ClassAssistedBuilder[Uploader]).build(host=str(server_url))
@pytest.fixture
async def google(injector: Injector):
return injector.get(SimulatedGoogle)
@pytest.fixture
async def interceptor(injector: Injector):
return injector.get(RequestInterceptor)
@pytest.fixture
async def supervisor(injector: Injector, server, session):
return injector.get(SimulatedSupervisor)
@pytest.fixture
async def addon_stopper(injector: Injector):
return injector.get(AddonStopper)
@pytest.fixture
async def server(injector, port, drive_creds: Creds, session):
server = injector.get(SimulationServer)
# start the server
logging.getLogger().info("Starting SimulationServer on port " + str(port))
await server.start(port)
yield server
await server.stop()
@pytest.fixture
async def data_cache(injector):
return injector.get(DataCache)
@pytest.fixture
async def session(injector):
async with injector.get(ClientSession) as session:
yield session
@pytest.fixture
async def precache(injector):
return injector.get(DestinationPrecache)
@pytest.fixture
async def backup(coord, source, dest):
await coord.sync()
assert len(coord.backups()) == 1
return coord.backups()[0]
@pytest.fixture
async def fs(injector):
faker = injector.get(FsFaker)
faker.start()
yield faker
faker.stop()
@pytest.fixture
async def estimator(injector, fs):
return injector.get(Estimator)
@pytest.fixture
async def device_code(injector):
return injector.get(AuthCodeQuery)
@pytest.fixture
async def error_store(injector):
return injector.get(ErrorStore)
@pytest.fixture
async def model(injector):
return injector.get(Model)
@pytest.fixture
async def global_info(injector):
return injector.get(GlobalInfo)
@pytest.fixture
async def server_url(port):
return URL("http://localhost:").with_port(port)
@pytest.fixture
async def ports(unused_tcp_port_factory):
return Ports(unused_tcp_port_factory(), unused_tcp_port_factory(), unused_tcp_port_factory())
@pytest.fixture
async def port(ports: Ports):
return ports.server
@pytest.fixture
async def ui_url(ports: Ports):
return URL("http://localhost").with_port(ports.ingress)
@pytest.fixture
async def ui_port(ports: Ports):
return ports.ui
@pytest.fixture
async def ingress_port(ports: Ports):
return ports.ingress
@pytest.fixture
async def coord(injector):
return injector.get(Coordinator)
@pytest.fixture()
async def updater(injector):
return injector.get(HaUpdater)
@pytest.fixture()
async def cleandir():
newpath = tempfile.mkdtemp()
os.chdir(newpath)
return newpath
@pytest.fixture
async def time(injector):
reset()
return injector.get(Time)
@pytest.fixture
async def config(injector):
return injector.get(Config)
@pytest.fixture
async def drive_creds(injector):
return injector.get(Creds)
@pytest.fixture
async def drive(injector, server, session):
return injector.get(DriveSource)
@pytest.fixture
async def ha(injector, server, session):
return injector.get(HaSource)
@pytest.fixture
async def ha_requests(injector, server):
return injector.get(HaRequests)
@pytest.fixture
async def drive_requests(injector, server):
return injector.get(DriveRequests)
@pytest.fixture
async def drive_requester(injector, server):
return injector.get(DriveRequester)
@pytest.fixture(autouse=True)
def verify_closed_responses(drive_requester: DriveRequester):
yield "unused"
for resp in drive_requester.all_resposnes:
assert resp.closed
@pytest.fixture
async def resolver(injector):
return injector.get(Resolver)
@pytest.fixture
async def client_identifier(injector):
return injector.get(Config).clientIdentifier()
@pytest.fixture
async def debug_worker(injector):
return injector.get(DebugWorker)
@pytest.fixture()
async def folder_finder(injector):
return injector.get(FolderFinder)
@pytest.fixture()
async def watcher(injector):
watcher = injector.get(Watcher)
yield watcher
await watcher.stop()
class BackupHelper():
def __init__(self, uploader, time):
self.time = time
self.uploader = uploader
async def createFile(self, size=1024 * 1024 * 2, slug="testslug", name="Test Name"):
from_backup: DummyBackup = DummyBackup(
name, self.time.toUtc(self.time.local(1985, 12, 6)), "fake source", slug)
data = await self.uploader.upload(createBackupTar(slug, name, self.time.now(), size))
return from_backup, data
@pytest.fixture
def backup_helper(uploader, time):
return BackupHelper(uploader, time)

View File

@@ -0,0 +1,71 @@
import os
import json
from time import sleep
import pytest
import asyncio
from yarl import URL
from aiohttp.client_exceptions import ClientResponseError
from backup.config import Config, Setting
from dev.simulationserver import SimulationServer
from dev.simulated_google import SimulatedGoogle, URL_MATCH_UPLOAD_PROGRESS, URL_MATCH_FILE
from dev.request_interceptor import RequestInterceptor
from backup.drive import DriveSource, FolderFinder, DriveRequests, RETRY_SESSION_ATTEMPTS, UPLOAD_SESSION_EXPIRATION_DURATION, URL_START_UPLOAD
from backup.drive.driverequests import (BASE_CHUNK_SIZE, CHUNK_UPLOAD_TARGET_SECONDS)
from backup.drive.drivesource import FOLDER_MIME_TYPE
from backup.exceptions import (BackupFolderInaccessible, BackupFolderMissingError,
DriveQuotaExceeded, ExistingBackupFolderError,
GoogleCantConnect, GoogleCredentialsExpired,
GoogleInternalError, GoogleUnexpectedError,
GoogleSessionError, GoogleTimeoutError, CredRefreshMyError, CredRefreshGoogleError)
from backup.creds import Creds
from backup.model import DriveBackup, DummyBackup
from ..faketime import FakeTime
from ..helpers import compareStreams, createBackupTar
class BackupHelper():
def __init__(self, uploader, time):
self.time = time
self.uploader = uploader
async def createFile(self, size=1024 * 1024 * 2, slug="testslug", name="Test Name", note=None):
from_backup: DummyBackup = DummyBackup(
name, self.time.toUtc(self.time.local(1985, 12, 6)), "fake source", slug, note=note, size=size)
data = await self.uploader.upload(createBackupTar(slug, name, self.time.now(), size))
return from_backup, data
@pytest.mark.asyncio
async def test_minimum_chunk_size(drive_requests: DriveRequests, time: FakeTime, backup_helper: BackupHelper, config: Config):
config.override(Setting.UPLOAD_LIMIT_BYTES_PER_SECOND, BASE_CHUNK_SIZE)
from_backup, data = await backup_helper.createFile(BASE_CHUNK_SIZE * 10)
async with data:
async for progress in drive_requests.create(data, {}, "unused"):
assert time.sleeps[-1] == 1
assert len(time.sleeps) == 11
@pytest.mark.asyncio
async def test_lower_chunk_size(drive_requests: DriveRequests, time: FakeTime, backup_helper: BackupHelper, config: Config):
config.override(Setting.UPLOAD_LIMIT_BYTES_PER_SECOND, BASE_CHUNK_SIZE / 2)
from_backup, data = await backup_helper.createFile(BASE_CHUNK_SIZE * 10)
# It should still upload in 256 kb chunks, just with more delay
async with data:
async for progress in drive_requests.create(data, {}, "unused"):
assert time.sleeps[-1] == 2
assert len(time.sleeps) == 11
@pytest.mark.asyncio
async def test_higher_speed_limit(drive_requests: DriveRequests, time: FakeTime, backup_helper: BackupHelper, config: Config):
config.override(Setting.UPLOAD_LIMIT_BYTES_PER_SECOND, BASE_CHUNK_SIZE * 2)
from_backup, data = await backup_helper.createFile(BASE_CHUNK_SIZE * 10)
# It should still upload in 256 kb chunks, just with more delay
async with data:
async for progress in drive_requests.create(data, {}, "unused"):
assert time.sleeps[-1] == 0.5
assert len(time.sleeps) == 11

View File

@@ -0,0 +1,54 @@
import asyncio
from datetime import datetime, timedelta
from backup.time import Time
from pytz import timezone
class FakeTime(Time):
def __init__(self, now: datetime = None):
super().__init__(local_tz=timezone('EST'))
if now:
self._now = now
else:
self._now = self.toUtc(
datetime(1985, 12, 6, 0, 0, 0, tzinfo=timezone('EST')))
self._start = self._now
self.sleeps = []
def setTimeZone(self, tz):
if isinstance(tz, str):
self.local_tz = timezone(tz)
else:
self.local_tz = tz
def monotonic(self):
return (self._now - self._start).total_seconds()
def setNow(self, now: datetime):
self._now = now
return self
def advanceDay(self, days=1):
return self.advance(days=1)
def advance(self, days=0, hours=0, minutes=0, seconds=0, duration=None):
self._now = self._now + \
timedelta(days=days, hours=hours, seconds=seconds, minutes=minutes)
if duration is not None:
self._now = self._now + duration
return self
def now(self) -> datetime:
return self._now
def nowLocal(self) -> datetime:
return self.toLocal(self._now)
async def sleepAsync(self, seconds: float, _exit_early: asyncio.Event = None):
self.sleeps.append(seconds)
self._now = self._now + timedelta(seconds=seconds)
# allow the task to be interrupted if such a thing is requested.
await asyncio.sleep(0)
def clearSleeps(self):
self.sleeps = []

View File

@@ -0,0 +1,219 @@
import json
import tarfile
import pytest
import platform
import os
from datetime import datetime
from io import BytesIO, IOBase
from aiohttp import ClientSession
from injector import inject, singleton
from backup.util import AsyncHttpGetter
from backup.model import SimulatedSource
from backup.time import Time
from backup.config import CreateOptions
all_folders = [
"share",
"ssl",
"addons/local"
]
all_addons = [
{
"name": "Sexy Robots",
"slug": "sexy_robots",
"description": "The robots you already know, but sexier. See what they don't want you to see.",
"version": "0.69",
"size": 1,
"logo": True,
"state": "started"
},
{
"name": "Particle Accelerator",
"slug": "particla_accel",
"description": "What CAN'T you do with Home Assistant?",
"version": "0.5",
"size": 500.3,
"logo": True,
"state": "started"
},
{
"name": "Empty Addon",
"slug": "addon_empty",
"description": "Explore the meaning of the universe by contemplating whats missing.",
"version": "0.-1",
"size": 1024 * 1024 * 1024 * 21.2,
"logo": False,
"state": "started"
}
]
def skipForWindows():
if platform.system() == "Windows":
pytest.skip("This test can't be run in windows environments")
def skipForRoot():
if os.getuid() == 0:
pytest.skip("This test can't be run as root")
def createBackupTar(slug: str, name: str, date: datetime, padSize: int, included_folders=None, included_addons=None, password=None) -> BytesIO:
backup_type = "full"
haVersion = None
if included_folders is not None:
folders = []
for folder in included_folders:
if folder == "homeassistant":
haVersion = "0.92.2"
else:
folders.append(folder)
else:
folders = all_folders.copy()
haVersion = "0.92.2"
if included_addons is not None:
backup_type = "partial"
addons = []
for addon in all_addons:
if addon['slug'] in included_addons:
addons.append(addon)
else:
addons = all_addons.copy()
backup_info = {
"slug": slug,
"name": name,
"date": date.isoformat(),
"type": backup_type,
"protected": password is not None,
"homeassistant": haVersion,
"folders": folders,
"addons": addons,
"repositories": [
"https://github.com/hassio-addons/repository"
]
}
stream = BytesIO()
tar = tarfile.open(fileobj=stream, mode="w")
add(tar, "backup.json", BytesIO(json.dumps(backup_info).encode()))
add(tar, "padding.dat", getTestStream(padSize))
tar.close()
stream.seek(0)
stream.size = lambda: len(stream.getbuffer())
return stream
def add(tar, name, stream):
info = tarfile.TarInfo(name)
info.size = len(stream.getbuffer())
stream.seek(0)
tar.addfile(info, stream)
def parseBackupInfo(stream: BytesIO):
with tarfile.open(fileobj=stream, mode="r") as tar:
info = tar.getmember("backup.json")
with tar.extractfile(info) as f:
backup_data = json.load(f)
backup_data['size'] = float(
round(len(stream.getbuffer()) / 1024.0 / 1024.0, 2))
backup_data['version'] = 'dev'
return backup_data
def getTestStream(size: int):
"""
Produces a stream of repeating prime sequences to avoid accidental repetition
"""
arr = bytearray()
while True:
for prime in [4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937]:
for x in range(prime):
if len(arr) < size:
arr.append(x % 255)
else:
break
if len(arr) >= size:
break
if len(arr) >= size:
break
return BytesIO(arr)
async def compareStreams(left, right):
await left.setup()
await right.setup()
while True:
from_left = await left.read(1024 * 1024)
from_right = await right.read(1024 * 1024)
if len(from_left.getbuffer()) == 0:
assert len(from_right.getbuffer()) == 0
break
if from_left.getbuffer() != from_right.getbuffer():
print("break!")
assert from_left.getbuffer() == from_right.getbuffer()
class IntentionalFailure(Exception):
pass
class HelperTestSource(SimulatedSource):
def __init__(self, name, is_destination=False):
super().__init__(name, is_destination=is_destination)
self.allow_create = True
self.allow_save = True
self.queries = 0
def reset(self):
self.saved = []
self.deleted = []
self.created = []
self.queries = 0
@property
def query_count(self):
return self.queries
async def get(self):
self.queries += 1
return await super().get()
def assertThat(self, created=0, deleted=0, saved=0, current=0):
assert len(self.saved) == saved
assert len(self.deleted) == deleted
assert len(self.created) == created
assert len(self.current) == current
return self
def assertUnchanged(self):
self.assertThat(current=len(self.current))
return self
async def create(self, options: CreateOptions):
if not self.allow_create:
raise IntentionalFailure()
return await super().create(options)
async def save(self, backup, bytes: IOBase = None):
if not self.allow_save:
raise IntentionalFailure()
return await super().save(backup, bytes=bytes)
@singleton
class Uploader():
@inject
def __init__(self, host, session: ClientSession, time: Time):
self.host = host
self.session = session
self.time = time
async def upload(self, data) -> AsyncHttpGetter:
async with await self.session.post(self.host + "/uploadfile", data=data) as resp:
resp.raise_for_status()
source = AsyncHttpGetter(self.host + "/readfile", {}, self.session, time=self.time)
return source

View File

@@ -0,0 +1,355 @@
import json
import pytest
import os
from stat import S_IREAD
from backup.config import Config, Setting
from backup.ha import AddonStopper
from backup.exceptions import SupervisorFileSystemError
from .faketime import FakeTime
from dev.simulated_supervisor import SimulatedSupervisor, URL_MATCH_START_ADDON, URL_MATCH_STOP_ADDON, URL_MATCH_ADDON_INFO
from dev.request_interceptor import RequestInterceptor
from .helpers import skipForRoot
def getSaved(config: Config):
with open(config.get(Setting.STOP_ADDON_STATE_PATH)) as f:
data = json.load(f)
return set(data["start"]), set(data["watchdog"])
def save(config: Config, to_start, to_watchdog_enable):
with open(config.get(Setting.STOP_ADDON_STATE_PATH), "w") as f:
json.dump({"start": list(to_start), "watchdog": list(to_watchdog_enable)}, f)
@pytest.mark.asyncio
async def test_no_stop_config(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config) -> None:
slug = "test_slug_1"
supervisor.installAddon(slug, "Test decription")
addon_stopper.allowRun()
addon_stopper.isBackingUp(False)
assert supervisor.addon(slug)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug)["state"] == "started"
await addon_stopper.check()
await addon_stopper.startAddons()
assert supervisor.addon(slug)["state"] == "started"
@pytest.mark.asyncio
async def test_load_addons_on_boot(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
slug2 = "test_slug_2"
supervisor.installAddon(slug2, "Test decription")
slug3 = "test_slug_3"
supervisor.installAddon(slug3, "Test decription")
config.override(Setting.STOP_ADDONS, slug1)
save(config, {slug3}, {slug2})
await addon_stopper.start(False)
assert addon_stopper.must_start == {slug3}
assert addon_stopper.must_enable_watchdog == {slug2}
addon_stopper.allowRun()
assert addon_stopper.must_start == {slug1, slug3}
assert addon_stopper.must_enable_watchdog == {slug2}
@pytest.mark.asyncio
async def test_do_nothing_while_backing_up(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
slug2 = "test_slug_2"
supervisor.installAddon(slug2, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1, slug2]))
await addon_stopper.start(False)
addon_stopper.allowRun()
addon_stopper.isBackingUp(True)
assert addon_stopper.must_start == {slug1, slug2}
await addon_stopper.check()
assert not interceptor.urlWasCalled(URL_MATCH_START_ADDON)
assert not interceptor.urlWasCalled(URL_MATCH_STOP_ADDON)
@pytest.mark.asyncio
async def test_start_and_stop(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
await addon_stopper.check()
assert supervisor.addon(slug1)["state"] == "stopped"
await addon_stopper.startAddons()
assert supervisor.addon(slug1)["state"] == "started"
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_start_and_stop_error(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
await addon_stopper.check()
assert supervisor.addon(slug1)["state"] == "stopped"
supervisor.addon(slug1)["state"] = "error"
assert supervisor.addon(slug1)["state"] == "error"
await addon_stopper.startAddons()
assert supervisor.addon(slug1)["state"] == "started"
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_stop_failure(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, slug1)
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
interceptor.setError(URL_MATCH_STOP_ADDON, 400)
await addon_stopper.stopAddons("ignore")
assert interceptor.urlWasCalled(URL_MATCH_STOP_ADDON)
assert getSaved(config) == (set(), set())
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.check()
await addon_stopper.startAddons()
assert supervisor.addon(slug1)["state"] == "started"
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_start_failure(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor, time: FakeTime) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
await addon_stopper.check()
assert getSaved(config) == ({slug1}, set())
assert supervisor.addon(slug1)["state"] == "stopped"
interceptor.setError(URL_MATCH_START_ADDON, 400)
await addon_stopper.startAddons()
assert getSaved(config) == (set(), set())
assert interceptor.urlWasCalled(URL_MATCH_START_ADDON)
assert supervisor.addon(slug1)["state"] == "stopped"
@pytest.mark.asyncio
async def test_delayed_start(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor, time: FakeTime) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
assert getSaved(config) == ({slug1}, set())
# start the addon again, which simluates the supervisor's tendency to report an addon as started right after stopping it.
supervisor.addon(slug1)["state"] = "started"
await addon_stopper.check()
await addon_stopper.startAddons()
assert getSaved(config) == ({slug1}, set())
time.advance(seconds=30)
await addon_stopper.check()
assert getSaved(config) == ({slug1}, set())
time.advance(seconds=30)
await addon_stopper.check()
assert getSaved(config) == ({slug1}, set())
time.advance(seconds=30)
supervisor.addon(slug1)["state"] = "stopped"
await addon_stopper.check()
assert supervisor.addon(slug1)["state"] == "started"
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_delayed_start_give_up(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor, time: FakeTime) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
assert getSaved(config) == ({slug1}, set())
# start the addon again, which simluates the supervisor's tendency to report an addon as started right after stopping it.
supervisor.addon(slug1)["state"] = "started"
await addon_stopper.check()
await addon_stopper.startAddons()
assert getSaved(config) == ({slug1}, set())
time.advance(seconds=30)
await addon_stopper.check()
assert getSaved(config) == ({slug1}, set())
time.advance(seconds=30)
await addon_stopper.check()
assert getSaved(config) == ({slug1}, set())
# Should clear saved state after this, since it stops checking after 2 minutes.
time.advance(seconds=100)
await addon_stopper.check()
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_disable_watchdog(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
supervisor.addon(slug1)["watchdog"] = True
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
assert supervisor.addon(slug1)["watchdog"] is False
await addon_stopper.check()
assert supervisor.addon(slug1)["state"] == "stopped"
assert supervisor.addon(slug1)["watchdog"] is False
await addon_stopper.startAddons()
assert supervisor.addon(slug1)["state"] == "started"
assert supervisor.addon(slug1)["watchdog"] is True
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_enable_watchdog_on_reboot(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, time: FakeTime) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
supervisor.addon(slug1)["watchdog"] = False
save(config, set(), {slug1})
await addon_stopper.start(False)
addon_stopper.allowRun()
assert addon_stopper.must_enable_watchdog == {slug1}
time.advance(minutes=5)
await addon_stopper.check()
assert supervisor.addon(slug1)["watchdog"] is True
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_enable_watchdog_waits_for_start(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
supervisor.addon(slug1)["watchdog"] = False
save(config, {slug1}, {slug1})
await addon_stopper.start(False)
addon_stopper.allowRun()
assert addon_stopper.must_enable_watchdog == {slug1}
await addon_stopper.check()
assert getSaved(config) == ({slug1}, {slug1})
supervisor.addon(slug1)["state"] = "stopped"
await addon_stopper.check()
assert supervisor.addon(slug1)["state"] == "started"
assert supervisor.addon(slug1)["watchdog"] is True
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_get_info_failure_on_stop(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, slug1)
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
interceptor.setError(URL_MATCH_ADDON_INFO, 400)
await addon_stopper.stopAddons("ignore")
assert interceptor.urlWasCalled(URL_MATCH_ADDON_INFO)
assert getSaved(config) == (set(), set())
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.check()
await addon_stopper.startAddons()
assert supervisor.addon(slug1)["state"] == "started"
assert getSaved(config) == (set(), set())
@pytest.mark.asyncio
async def test_get_info_failure_on_start(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor) -> None:
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
await addon_stopper.check()
assert getSaved(config) == ({slug1}, set())
assert supervisor.addon(slug1)["state"] == "stopped"
interceptor.setError(URL_MATCH_ADDON_INFO, 400)
await addon_stopper.startAddons()
assert getSaved(config) == (set(), set())
assert interceptor.urlWasCalled(URL_MATCH_ADDON_INFO)
assert supervisor.addon(slug1)["state"] == "stopped"
@pytest.mark.asyncio
async def test_read_only_fs(supervisor: SimulatedSupervisor, addon_stopper: AddonStopper, config: Config, interceptor: RequestInterceptor) -> None:
# This test can't be run as the root user, since no file is read-only to root.
skipForRoot()
# Stop an addon
slug1 = "test_slug_1"
supervisor.installAddon(slug1, "Test decription")
config.override(Setting.STOP_ADDONS, ",".join([slug1]))
addon_stopper.allowRun()
addon_stopper.must_start = set()
assert supervisor.addon(slug1)["state"] == "started"
await addon_stopper.stopAddons("ignore")
assert supervisor.addon(slug1)["state"] == "stopped"
await addon_stopper.check()
assert getSaved(config) == ({slug1}, set())
# make the state file unmodifiable
os.chmod(config.get(Setting.STOP_ADDON_STATE_PATH), S_IREAD)
# verify we raise a known error when trying to save.
with pytest.raises(SupervisorFileSystemError):
await addon_stopper.startAddons()

View File

@@ -0,0 +1,117 @@
from datetime import timedelta
import pytest
from aiohttp import ClientSession
from aiohttp.web import StreamResponse
from .conftest import Uploader
from backup.exceptions import LogicError
from dev.request_interceptor import RequestInterceptor
from .conftest import FakeTime
@pytest.mark.asyncio
async def test_basics(uploader: Uploader, server, session: ClientSession):
getter = await uploader.upload(bytearray([0, 1, 2, 3, 4, 5, 6, 7]))
await getter.setup()
assert (await getter.read(1)).read() == bytearray([0])
assert (await getter.read(2)).read() == bytearray([1, 2])
assert (await getter.read(3)).read() == bytearray([3, 4, 5])
assert (await getter.read(3)).read() == bytearray([6, 7])
assert (await getter.read(3)).read() == bytearray([])
assert (await getter.read(3)).read() == bytearray([])
getter.position(2)
assert (await getter.read(2)).read() == bytearray([2, 3])
assert (await getter.read(3)).read() == bytearray([4, 5, 6])
getter.position(2)
assert (await getter.read(2)).read() == bytearray([2, 3])
getter.position(2)
assert (await getter.read(2)).read() == bytearray([2, 3])
assert (await getter.read(100)).read() == bytearray([4, 5, 6, 7])
assert (await getter.read(3)).read() == bytearray([])
assert (await getter.read(3)).read() == bytearray([])
@pytest.mark.asyncio
async def test_position_error(uploader: Uploader, server):
getter = await uploader.upload(bytearray([0, 1, 2, 3, 4, 5, 6, 7]))
await getter.setup()
assert (await getter.read(1)).read() == bytearray([0])
with pytest.raises(LogicError):
await getter.setup()
@pytest.mark.asyncio
async def test_no_content_length(uploader: Uploader, server, interceptor: RequestInterceptor):
getter = await uploader.upload(bytearray([0, 1, 2, 3, 4, 5, 6, 7]))
intercept = interceptor.setError("/readfile")
intercept.addResponse(StreamResponse(headers={}))
with pytest.raises(LogicError) as e:
await getter.setup()
assert e.value.message() == "Content size must be provided if the webserver doesn't provide it"
@pytest.mark.asyncio
async def test_no_setup_error(uploader: Uploader, server):
getter = await uploader.upload(bytearray([0, 1, 2, 3, 4, 5, 6, 7]))
with pytest.raises(LogicError):
await getter.read(1)
@pytest.mark.asyncio
async def test_progress(uploader: Uploader, server):
getter = await uploader.upload(bytearray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]))
await getter.setup()
assert getter.progress() == 0
assert (await getter.read(1)).read() == bytearray([0])
assert getter.progress() == 10
assert (await getter.read(2)).read() == bytearray([1, 2])
assert getter.progress() == 30
assert (await getter.read(7)).read() == bytearray([3, 4, 5, 6, 7, 8, 9])
assert getter.progress() == 100
assert str.format("{0}", getter) == "100"
@pytest.mark.asyncio
async def test_speed(uploader: Uploader, server, time: FakeTime):
getter = await uploader.upload(bytearray(x for x in range(0, 100)))
assert getter.startTime() == time.now()
await getter.setup()
assert getter.speed(period=timedelta(seconds=10)) is None
time.advance(seconds=1)
await getter.read(1)
assert getter.speed(period=timedelta(seconds=10)) == 1
time.advance(seconds=1)
await getter.read(1)
assert getter.speed(period=timedelta(seconds=10)) == 1
assert getter.speed(period=timedelta(seconds=1)) == 1
assert getter.speed(period=timedelta(seconds=1.5)) == 1
assert getter.speed(period=timedelta(seconds=0.5)) == 1
time.advance(seconds=1)
assert getter.speed(period=timedelta(seconds=10)) == 1
assert getter.speed(period=timedelta(seconds=1)) == 1
assert getter.speed(period=timedelta(seconds=1.5)) == 1
time.advance(seconds=0.5)
assert getter.speed(period=timedelta(seconds=1)) == 0.5
time.advance(seconds=0.5)
assert getter.speed(period=timedelta(seconds=1)) == 0
# Now 4 seconds have passed, and we've transferred 4 bytes
await getter.read(2)
assert getter.speed(period=timedelta(seconds=4)) == 1
assert getter.speed(period=timedelta(seconds=10)) == 1
time.advance(seconds=10)
await getter.read(10)
assert getter.speed(period=timedelta(seconds=10)) == 1
time.advance(seconds=10)
await getter.read(20)
assert getter.speed(period=timedelta(seconds=10)) == 2
time.advance(seconds=10)
assert getter.speed(period=timedelta(seconds=10)) == 2
time.advance(seconds=5)
assert getter.speed(period=timedelta(seconds=10)) == 1

View File

@@ -0,0 +1,104 @@
import pytest
from backup.drive import AuthCodeQuery
from backup.exceptions import LogicError, GoogleCredGenerateError, ProtocolError
from dev.request_interceptor import RequestInterceptor
from dev.simulated_google import URL_MATCH_TOKEN, SimulatedGoogle, URL_MATCH_DEVICE_CODE
from aiohttp.web_response import json_response
from backup.config import Config, Setting
@pytest.mark.asyncio
async def test_invalid_sequence(device_code: AuthCodeQuery, interceptor: RequestInterceptor) -> None:
with pytest.raises(LogicError):
await device_code.waitForPermission()
@pytest.mark.asyncio
async def test_success(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server) -> None:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
google._device_code_accepted = True
assert await device_code.waitForPermission() is not None
@pytest.mark.asyncio
async def test_google_failure_on_request(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server) -> None:
interceptor.setError(URL_MATCH_DEVICE_CODE, 458)
with pytest.raises(GoogleCredGenerateError) as error:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
assert error.value.message() == "Google responded with error status HTTP 458. Please verify your credentials are set up correctly."
@pytest.mark.asyncio
async def test_failure_on_http_unknown(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server) -> None:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
interceptor.setError(URL_MATCH_TOKEN, 500)
with pytest.raises(GoogleCredGenerateError) as error:
await device_code.waitForPermission()
assert error.value.message() == "Failed unexpectedly while trying to reach Google. See the add-on logs for details."
@pytest.mark.asyncio
async def test_success_after_wait(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server) -> None:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
match = interceptor.setError(URL_MATCH_TOKEN)
match.addResponse(json_response(data={'error': "slow_down"}, status=403))
google._device_code_accepted = True
await device_code.waitForPermission()
assert match.callCount() == 2
@pytest.mark.asyncio
async def test_success_after_428(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server) -> None:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
match = interceptor.setError(URL_MATCH_TOKEN)
match.addResponse(json_response(data={}, status=428))
match.addResponse(json_response(data={}, status=428))
match.addResponse(json_response(data={}, status=428))
match.addResponse(json_response(data={}, status=428))
match.addResponse(json_response(data={}, status=428))
google._device_code_accepted = True
await device_code.waitForPermission()
assert match.callCount() == 6
@pytest.mark.asyncio
async def test_permission_failure(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server) -> None:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
match = interceptor.setError(URL_MATCH_TOKEN)
match.addResponse(json_response(data={}, status=403))
google._device_code_accepted = False
with pytest.raises(GoogleCredGenerateError) as error:
await device_code.waitForPermission()
assert error.value.message() == "Google refused the request to connect your account, either because you rejected it or they were set up incorrectly."
@pytest.mark.asyncio
async def test_json_parse_failure(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server) -> None:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
interceptor.setError(URL_MATCH_TOKEN, 200)
with pytest.raises(ProtocolError):
await device_code.waitForPermission()
@pytest.mark.asyncio
async def test_repeated_failure(device_code: AuthCodeQuery, interceptor: RequestInterceptor, google: SimulatedGoogle, server, config: Config) -> None:
await device_code.requestCredentials(google._custom_drive_client_id, google._custom_drive_client_secret)
config.override(Setting.DRIVE_TOKEN_URL, "http://go.nowhere")
with pytest.raises(GoogleCredGenerateError) as error:
await device_code.waitForPermission()
error.value.message() == "Failed unexpectedly too many times while attempting to reach Google. See the logs for details."

View File

@@ -0,0 +1,104 @@
from pytest import fixture, raises
from backup.util import Backoff
@fixture
def error():
return Exception()
def test_defaults(error):
backoff = Backoff()
assert backoff.backoff(error) == 2
assert backoff.backoff(error) == 4
assert backoff.backoff(error) == 8
assert backoff.backoff(error) == 16
assert backoff.backoff(error) == 32
assert backoff.backoff(error) == 64
assert backoff.backoff(error) == 128
assert backoff.backoff(error) == 256
assert backoff.backoff(error) == 512
assert backoff.backoff(error) == 1024
assert backoff.backoff(error) == 2048
for x in range(10000):
assert backoff.backoff(error) == 3600
def test_max(error):
backoff = Backoff(max=500)
assert backoff.backoff(error) == 2
assert backoff.backoff(error) == 4
assert backoff.backoff(error) == 8
assert backoff.backoff(error) == 16
assert backoff.backoff(error) == 32
assert backoff.backoff(error) == 64
assert backoff.backoff(error) == 128
assert backoff.backoff(error) == 256
for x in range(10000):
assert backoff.backoff(error) == 500
def test_initial(error):
backoff = Backoff(initial=0)
assert backoff.backoff(error) == 0
assert backoff.backoff(error) == 2
assert backoff.backoff(error) == 4
assert backoff.backoff(error) == 8
assert backoff.backoff(error) == 16
assert backoff.backoff(error) == 32
assert backoff.backoff(error) == 64
assert backoff.backoff(error) == 128
assert backoff.backoff(error) == 256
assert backoff.backoff(error) == 512
assert backoff.backoff(error) == 1024
assert backoff.backoff(error) == 2048
for x in range(10000):
assert backoff.backoff(error) == 3600
def test_attempts(error):
backoff = Backoff(attempts=5)
assert backoff.backoff(error) == 2
assert backoff.backoff(error) == 4
assert backoff.backoff(error) == 8
assert backoff.backoff(error) == 16
assert backoff.backoff(error) == 32
for x in range(5):
with raises(type(error)):
backoff.backoff(error)
def test_start(error):
backoff = Backoff(base=10)
assert backoff.backoff(error) == 10
assert backoff.backoff(error) == 20
assert backoff.backoff(error) == 40
assert backoff.backoff(error) == 80
def test_realistic(error):
backoff = Backoff(base=5, initial=0, exp=1.5, attempts=5)
assert backoff.backoff(error) == 0
assert backoff.backoff(error) == 5
assert backoff.backoff(error) == 5 * 1.5
assert backoff.backoff(error) == 5 * (1.5**2)
assert backoff.backoff(error) == 5 * (1.5**3)
for x in range(5):
with raises(type(error)):
backoff.backoff(error)
def test_maxOut(error):
backoff = Backoff(base=10, max=100)
assert backoff.backoff(error) == 10
assert backoff.backoff(error) == 20
backoff.maxOut()
assert backoff.backoff(error) == 100
assert backoff.backoff(error) == 100
backoff.reset()
assert backoff.backoff(error) == 10

View File

@@ -0,0 +1,129 @@
from backup.config import BytesizeAsStringValidator
from backup.exceptions import InvalidConfigurationValue
import pytest
def test_minimum():
parser = BytesizeAsStringValidator("test", minimum=10)
assert parser.validate("11 bytes") == 11
assert parser.validate(11) == 11
with pytest.raises(InvalidConfigurationValue):
parser.validate("9 bytes")
def test_maximum():
parser = BytesizeAsStringValidator("test", maximum=10)
assert parser.validate("9 bytes") == 9
assert parser.validate(9) == 9
with pytest.raises(InvalidConfigurationValue):
parser.validate("11 bytes")
assert parser.formatForUi(9) == "9 B"
def test_ui_format():
parser = BytesizeAsStringValidator("test")
assert parser.formatForUi(25) == "25 B"
assert parser.formatForUi(25 * 1024) == "25 KB"
assert parser.formatForUi(25 * 1024 * 1024) == "25 MB"
assert parser.formatForUi(25 * 1024 * 1024 * 1024) == "25 GB"
assert parser.formatForUi(25 * 1024 * 1024 * 1024 * 1024) == "25 TB"
assert parser.formatForUi(25 * 1024 * 1024 * 1024 * 1024 * 1024) == "25 PB"
assert parser.formatForUi(25 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024) == "25 EB"
assert parser.formatForUi(25 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024) == "25 ZB"
assert parser.formatForUi(25 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024) == "25 YB"
assert parser.formatForUi(2000 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024) == "2000 YB"
assert parser.formatForUi(2.5 * 1024 * 1024) == "2.5 MB"
assert parser.formatForUi(2.534525 * 1024 * 1024) == "2.534525 MB"
assert parser.formatForUi(98743.1234 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024) == "98743.1234 YB"
assert parser.formatForUi(None) == ""
assert parser.formatForUi("") == ""
assert parser.formatForUi(0) == ""
def test_numbers():
parser = BytesizeAsStringValidator("test")
parser.validate(1.2) == 1
parser.validate(1024.9) == 1024
parser.validate(1024) == 1024
def test_parsing():
parser = BytesizeAsStringValidator("test")
assert parser.validate("1 B") == 1
assert parser.validate("1 b") == 1
assert parser.validate("1 bytes") == 1
assert parser.validate("1 byte") == 1
assert parser.validate("") is None
assert parser.validate(" ") is None
assert parser.validate(" 5. bytes ") == 5
assert parser.validate("10b") == 10
assert parser.validate("1 KB") == 1024
assert parser.validate("1 k") == 1024
assert parser.validate("1 kb") == 1024
assert parser.validate("1 kilobytes") == 1024
assert parser.validate("1 kibibytes") == 1024
assert parser.validate("1 kibi") == 1024
assert parser.validate("2.5 KB") == 1024 * 2.5
assert parser.validate("10k") == 10 * 1024
assert parser.validate("1 MB") == 1024 * 1024
assert parser.validate("1 m") == 1024 * 1024
assert parser.validate("1 mb") == 1024 * 1024
assert parser.validate("1 megs") == 1024 * 1024
assert parser.validate("1 mega") == 1024 * 1024
assert parser.validate("1 megabytes") == 1024 * 1024
assert parser.validate("1 mebibytes") == 1024 * 1024
assert parser.validate("10m") == 10 * 1024 * 1024
assert parser.validate("1 GB") == 1024 * 1024 * 1024
assert parser.validate("1 g") == 1024 * 1024 * 1024
assert parser.validate("1 gb") == 1024 * 1024 * 1024
assert parser.validate("1 gigs") == 1024 * 1024 * 1024
assert parser.validate("1 gig") == 1024 * 1024 * 1024
assert parser.validate("1 giga") == 1024 * 1024 * 1024
assert parser.validate("1 gigabytes") == 1024 * 1024 * 1024
assert parser.validate("1 gibibytes") == 1024 * 1024 * 1024
assert parser.validate("10G") == 10 * 1024 * 1024 * 1024
assert parser.validate("1 TB") == 1024 * 1024 * 1024 * 1024
assert parser.validate("1 t") == 1024 * 1024 * 1024 * 1024
assert parser.validate("1 tb") == 1024 * 1024 * 1024 * 1024
assert parser.validate("1 tera") == 1024 * 1024 * 1024 * 1024
assert parser.validate("1 tebi") == 1024 * 1024 * 1024 * 1024
assert parser.validate("1 terabytes") == 1024 * 1024 * 1024 * 1024
assert parser.validate("10T") == 10 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 PB") == 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 p") == 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 pb") == 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 peta") == 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 pebi") == 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 petabytes") == 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("10P") == 10 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 EB") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 e") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 eb") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 exa") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 exbi") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 exabytes") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("10E") == 10 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 ZB") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 z") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 zb") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 zetta") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 zebi") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 zettabytes") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("10Z") == 10 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 YB") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 y") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 yb") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 yotta") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 yobi") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("1 yottabytes") == 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024
assert parser.validate("10Y") == 10 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024

View File

@@ -0,0 +1,307 @@
import os
from pytest import raises
from backup.model import GenConfig
from backup.config import Config, Setting
from backup.exceptions import InvalidConfigurationValue
def test_validate_empty():
config = Config()
assert config.validate({}) == defaultAnd()
def test_validate_int():
assert Config().validate({'max_backups_in_ha': 5}) == defaultAnd(
{Setting.MAX_BACKUPS_IN_HA: 5})
assert Config().validate({'max_backups_in_ha': 5.0}) == defaultAnd(
{Setting.MAX_BACKUPS_IN_HA: 5})
assert Config().validate({'max_backups_in_ha': "5"}) == defaultAnd(
{Setting.MAX_BACKUPS_IN_HA: 5})
with raises(InvalidConfigurationValue):
Config().validate({'max_backups_in_ha': -2})
def test_validate_float():
setting = Setting.DAYS_BETWEEN_BACKUPS
assert Config().validate({setting: 5}) == defaultAnd({setting: 5})
assert Config().validate({setting.key(): 5}) == defaultAnd({setting: 5})
assert Config().validate({setting: 5.0}) == defaultAnd({setting: 5})
assert Config().validate({setting: "5"}) == defaultAnd({setting: 5})
with raises(InvalidConfigurationValue):
Config().validate({'days_between_backups': -1})
def test_validate_bool():
setting = Setting.SEND_ERROR_REPORTS
assert Config().validate({setting: True}) == defaultAnd({setting: True})
assert Config().validate({setting: False}) == defaultAnd({setting: False})
assert Config().validate({setting: "true"}) == defaultAnd({setting: True})
assert Config().validate({setting: "false"}) == defaultAnd({setting: False})
assert Config().validate({setting: "1"}) == defaultAnd({setting: True})
assert Config().validate({setting: "0"}) == defaultAnd({setting: False})
assert Config().validate({setting: "yes"}) == defaultAnd({setting: True})
assert Config().validate({setting: "no"}) == defaultAnd({setting: False})
assert Config().validate({setting: "on"}) == defaultAnd({setting: True})
assert Config().validate({setting: "off"}) == defaultAnd({setting: False})
def test_validate_string():
assert Config().validate({Setting.BACKUP_NAME: True}) == defaultAnd({Setting.BACKUP_NAME: "True"})
assert Config().validate({Setting.BACKUP_NAME: False}) == defaultAnd({Setting.BACKUP_NAME: "False"})
assert Config().validate({Setting.BACKUP_NAME: "true"}) == defaultAnd({Setting.BACKUP_NAME: "true"})
assert Config().validate({Setting.BACKUP_NAME: "false"}) == defaultAnd({Setting.BACKUP_NAME: "false"})
assert Config().validate({Setting.BACKUP_NAME: "1"}) == defaultAnd({Setting.BACKUP_NAME: "1"})
assert Config().validate({Setting.BACKUP_NAME: "0"}) == defaultAnd({Setting.BACKUP_NAME: "0"})
assert Config().validate({Setting.BACKUP_NAME: "yes"}) == defaultAnd({Setting.BACKUP_NAME: "yes"})
assert Config().validate({Setting.BACKUP_NAME: "no"}) == defaultAnd({Setting.BACKUP_NAME: "no"})
def test_validate_url():
assert Config().validate({Setting.SUPERVISOR_URL: True}) == defaultAnd(
{Setting.SUPERVISOR_URL: "True"})
assert Config().validate({Setting.SUPERVISOR_URL: False}) == defaultAnd(
{Setting.SUPERVISOR_URL: "False"})
assert Config().validate({Setting.SUPERVISOR_URL: "true"}) == defaultAnd(
{Setting.SUPERVISOR_URL: "true"})
assert Config().validate({Setting.SUPERVISOR_URL: "false"}) == defaultAnd(
{Setting.SUPERVISOR_URL: "false"})
assert Config().validate({Setting.SUPERVISOR_URL: "1"}) == defaultAnd(
{Setting.SUPERVISOR_URL: "1"})
assert Config().validate({Setting.SUPERVISOR_URL: "0"}) == defaultAnd(
{Setting.SUPERVISOR_URL: "0"})
assert Config().validate({Setting.SUPERVISOR_URL: "yes"}) == defaultAnd(
{Setting.SUPERVISOR_URL: "yes"})
assert Config().validate({Setting.SUPERVISOR_URL: "no"}) == defaultAnd(
{Setting.SUPERVISOR_URL: "no"})
def test_validate_regex():
assert Config().validate({Setting.DRIVE_IPV4: "192.168.1.1"}) == defaultAnd(
{Setting.DRIVE_IPV4: "192.168.1.1"})
with raises(InvalidConfigurationValue):
Config().validate({Setting.DRIVE_IPV4: -1})
with raises(InvalidConfigurationValue):
Config().validate({Setting.DRIVE_IPV4: "192.168.1"})
def test_remove_ssl():
assert Config().validate({Setting.USE_SSL: True}) == defaultAnd({Setting.USE_SSL: True})
assert Config().validate({Setting.USE_SSL: False}) == defaultAnd()
assert Config().validate({
Setting.USE_SSL: False,
Setting.CERTFILE: "removed",
Setting.KEYFILE: 'removed'
}) == defaultAnd()
assert Config().validate({
Setting.USE_SSL: True,
Setting.CERTFILE: "kept",
Setting.KEYFILE: 'kept'
}) == defaultAnd({
Setting.USE_SSL: True,
Setting.CERTFILE: "kept",
Setting.KEYFILE: 'kept'
})
def test_send_error_reports():
assert Config().validate({Setting.SEND_ERROR_REPORTS: False}) == defaultAnd(
{Setting.SEND_ERROR_REPORTS: False})
assert Config().validate({Setting.SEND_ERROR_REPORTS: True}) == defaultAnd(
{Setting.SEND_ERROR_REPORTS: True})
assert Config().validate(
{Setting.SEND_ERROR_REPORTS: None}) == defaultAnd()
def test_unrecognized_values_filter():
assert Config().validate({'blah': "bloo"}) == defaultAnd()
def test_removes_defaults():
assert Config().validate(
{Setting.BACKUP_TIME_OF_DAY: ""}) == defaultAnd()
def defaultAnd(config={}):
ret = {
Setting.DAYS_BETWEEN_BACKUPS: 3,
Setting.MAX_BACKUPS_IN_HA: 4,
Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE: 4
}
ret.update(config)
return (ret, False)
def test_GenerationalConfig() -> None:
assert Config().getGenerationalConfig() is None
assert Config().override(Setting.GENERATIONAL_DAYS, 5).getGenerationalConfig() == GenConfig(days=5)
assert Config().override(Setting.GENERATIONAL_WEEKS, 3).getGenerationalConfig() == GenConfig(days=1, weeks=3)
assert Config().override(Setting.GENERATIONAL_MONTHS, 3).getGenerationalConfig() == GenConfig(days=1, months=3)
assert Config().override(Setting.GENERATIONAL_YEARS, 3).getGenerationalConfig() == GenConfig(days=1, years=3)
assert Config().override(Setting.GENERATIONAL_DELETE_EARLY, True).override(
Setting.GENERATIONAL_DAYS, 2).getGenerationalConfig() == GenConfig(days=2, aggressive=True)
assert Config().override(Setting.GENERATIONAL_DAYS, 1).override(
Setting.GENERATIONAL_DAY_OF_YEAR, 3).getGenerationalConfig() == GenConfig(days=1, day_of_year=3)
assert Config().override(Setting.GENERATIONAL_DAYS, 1).override(
Setting.GENERATIONAL_DAY_OF_MONTH, 3).getGenerationalConfig() == GenConfig(days=1, day_of_month=3)
assert Config().override(Setting.GENERATIONAL_DAYS, 1).override(
Setting.GENERATIONAL_DAY_OF_WEEK, "tue").getGenerationalConfig() == GenConfig(days=1, day_of_week="tue")
assert Config().override(Setting.GENERATIONAL_DAY_OF_MONTH, 3).override(Setting.GENERATIONAL_DAY_OF_WEEK, "tue").override(Setting.GENERATIONAL_DAY_OF_YEAR, "4").getGenerationalConfig() is None
def test_from_environment():
assert Config.fromEnvironment().get(Setting.PORT) != 1000
os.environ["PORT"] = str(1000)
assert Config.fromEnvironment().get(Setting.PORT) == 1000
del os.environ["PORT"]
assert Config.fromEnvironment().get(Setting.PORT) != 1000
os.environ["port"] = str(1000)
assert Config.fromEnvironment().get(Setting.PORT) == 1000
def test_config_upgrade():
# Test specifying one value
config = Config()
config.update({Setting.DEPRECTAED_BACKUP_TIME_OF_DAY: "00:01"})
assert (config.getAllConfig(), False) == defaultAnd({
Setting.BACKUP_TIME_OF_DAY: "00:01",
Setting.CALL_BACKUP_SNAPSHOT: True
})
assert config.mustSaveUpgradeChanges()
# Test specifying multiple values
config = Config()
config.update({
Setting.DEPRECTAED_MAX_BACKUPS_IN_GOOGLE_DRIVE: 21,
Setting.DEPRECTAED_MAX_BACKUPS_IN_HA: 20,
Setting.DEPRECATED_BACKUP_PASSWORD: "boop"
})
assert config.getAllConfig() == defaultAnd({
Setting.MAX_BACKUPS_IN_HA: 20,
Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE: 21,
Setting.BACKUP_PASSWORD: "boop",
Setting.CALL_BACKUP_SNAPSHOT: True
})[0]
assert config.mustSaveUpgradeChanges()
# test specifying value that don't get upgraded
config = Config()
config.update({Setting.EXCLUDE_ADDONS: "test"})
assert config.getAllConfig() == defaultAnd({
Setting.EXCLUDE_ADDONS: "test"
})[0]
assert not config.mustSaveUpgradeChanges()
# Test specifying both
config = Config()
config.update({
Setting.DEPRECTAED_BACKUP_TIME_OF_DAY: "00:01",
Setting.EXCLUDE_ADDONS: "test"
})
assert config.getAllConfig() == defaultAnd({
Setting.BACKUP_TIME_OF_DAY: "00:01",
Setting.EXCLUDE_ADDONS: "test",
Setting.CALL_BACKUP_SNAPSHOT: True
})[0]
assert config.mustSaveUpgradeChanges()
def test_overwrite_on_upgrade():
config = Config()
config.update({
Setting.DEPRECTAED_MAX_BACKUPS_IN_HA: 5,
Setting.MAX_BACKUPS_IN_HA: 6
})
assert (config.getAllConfig(), False) == defaultAnd({
Setting.MAX_BACKUPS_IN_HA: 6,
Setting.CALL_BACKUP_SNAPSHOT: True
})
assert config.mustSaveUpgradeChanges()
config = Config()
config.update({
Setting.MAX_BACKUPS_IN_HA: 6,
Setting.DEPRECTAED_MAX_BACKUPS_IN_HA: 5
})
assert (config.getAllConfig(), False) == defaultAnd({
Setting.MAX_BACKUPS_IN_HA: 6,
Setting.CALL_BACKUP_SNAPSHOT: True
})
assert config.mustSaveUpgradeChanges()
config = Config()
config.update({
Setting.MAX_BACKUPS_IN_HA: 6,
Setting.DEPRECTAED_MAX_BACKUPS_IN_HA: 4
})
assert (config.getAllConfig(), False) == defaultAnd({
Setting.MAX_BACKUPS_IN_HA: 6,
Setting.CALL_BACKUP_SNAPSHOT: True
})
assert config.mustSaveUpgradeChanges()
def test_overwrite_on_upgrade_default_value():
# Test specifying one value
config = Config()
config.update({
Setting.DEPRECTAED_MAX_BACKUPS_IN_HA: Setting.MAX_BACKUPS_IN_HA.default() + 1,
Setting.MAX_BACKUPS_IN_HA: Setting.MAX_BACKUPS_IN_HA.default()
})
assert (config.getAllConfig(), False) == defaultAnd({
Setting.MAX_BACKUPS_IN_HA: Setting.MAX_BACKUPS_IN_HA.default() + 1,
Setting.CALL_BACKUP_SNAPSHOT: True
})
assert config.mustSaveUpgradeChanges()
config = Config()
config.update({
Setting.MAX_BACKUPS_IN_HA: Setting.MAX_BACKUPS_IN_HA.default(),
Setting.DEPRECTAED_MAX_BACKUPS_IN_HA: Setting.MAX_BACKUPS_IN_HA.default() + 1
})
assert (config.getAllConfig(), False) == defaultAnd({
Setting.MAX_BACKUPS_IN_HA: Setting.MAX_BACKUPS_IN_HA.default() + 1,
Setting.CALL_BACKUP_SNAPSHOT: True
})
assert config.mustSaveUpgradeChanges()
def test_empty_colors():
# Test specifying one value
config = Config()
config.update({Setting.BACKGROUND_COLOR: "", Setting.ACCENT_COLOR: ""})
assert config.get(Setting.BACKGROUND_COLOR) == Setting.BACKGROUND_COLOR.default()
assert config.get(Setting.ACCENT_COLOR) == Setting.ACCENT_COLOR.default()
def test_ignore_upgrades_default():
# Test specifying one value
config = Config()
assert config.get(Setting.IGNORE_UPGRADE_BACKUPS)
config.useLegacyIgnoredBehavior(True)
assert not config.get(Setting.IGNORE_UPGRADE_BACKUPS)
config.useLegacyIgnoredBehavior(False)
assert config.get(Setting.IGNORE_UPGRADE_BACKUPS)
def getGenConfig(update):
base = {
"days": 1,
"weeks": 0,
"months": 0,
"years": 0,
"day_of_week": "mon",
"day_of_year": 1,
"day_of_month": 1
}
base.update(update)
return base

View File

@@ -0,0 +1,552 @@
import asyncio
from datetime import timedelta
import pytest
from pytest import raises
from backup.config import Config, Setting, CreateOptions
from backup.exceptions import LogicError, LowSpaceError, NoBackup, PleaseWait, UserCancelledError
from backup.util import GlobalInfo, DataCache
from backup.model import Coordinator, Model, Backup, DestinationPrecache
from .conftest import FsFaker
from .faketime import FakeTime
from .helpers import HelperTestSource, skipForWindows
@pytest.fixture
def source():
return HelperTestSource("Source")
@pytest.fixture
def dest():
return HelperTestSource("Dest")
@pytest.fixture
def simple_config():
config = Config()
config.override(Setting.BACKUP_STARTUP_DELAY_MINUTES, 0)
return config
@pytest.fixture
def model(source, dest, time, simple_config, global_info, estimator, data_cache: DataCache):
return Model(simple_config, time, source, dest, global_info, estimator, data_cache)
@pytest.fixture
def coord(model, time, simple_config, global_info, estimator):
return Coordinator(model, time, simple_config, global_info, estimator)
@pytest.fixture
def precache(coord, time, dest, simple_config):
return DestinationPrecache(coord, time, dest, simple_config)
@pytest.mark.asyncio
async def test_enabled(coord: Coordinator, dest, time):
dest.setEnabled(True)
assert coord.enabled()
dest.setEnabled(False)
assert not coord.enabled()
@pytest.mark.asyncio
async def test_sync(coord: Coordinator, global_info: GlobalInfo, time: FakeTime):
await coord.sync()
assert global_info._syncs == 1
assert global_info._successes == 1
assert global_info._last_sync_start == time.now()
assert len(coord.backups()) == 1
@pytest.mark.asyncio
async def test_blocking(coord: Coordinator):
# This just makes sure the wait thread is blocked while we do stuff
event_start = asyncio.Event()
event_end = asyncio.Event()
asyncio.create_task(coord._withSoftLock(lambda: sleepHelper(event_start, event_end)))
await event_start.wait()
# Make sure PleaseWait gets called on these
with raises(PleaseWait):
await coord.delete(None, None)
with raises(PleaseWait):
await coord.sync()
with raises(PleaseWait):
await coord.uploadBackups(None)
with raises(PleaseWait):
await coord.startBackup(None)
event_end.set()
async def sleepHelper(event_start: asyncio.Event, event_end: asyncio.Event):
event_start.set()
await event_end.wait()
@pytest.mark.asyncio
async def test_new_backup(coord: Coordinator, time: FakeTime, source, dest):
await coord.startBackup(CreateOptions(time.now(), "Test Name"))
backups = coord.backups()
assert len(backups) == 1
assert backups[0].name() == "Test Name"
assert backups[0].getSource(source.name()) is not None
assert backups[0].getSource(dest.name()) is None
@pytest.mark.asyncio
async def test_sync_error(coord: Coordinator, global_info: GlobalInfo, time: FakeTime, model):
error = Exception("BOOM")
old_sync = model.sync
model.sync = lambda s: doRaise(error)
await coord.sync()
assert global_info._last_error is error
assert global_info._last_failure_time == time.now()
assert global_info._successes == 0
model.sync = old_sync
await coord.sync()
assert global_info._last_error is None
assert global_info._successes == 1
assert global_info._last_success == time.now()
await coord.sync()
def doRaise(error):
raise error
@pytest.mark.asyncio
async def test_delete(coord: Coordinator, backup, source, dest):
assert backup.getSource(source.name()) is not None
assert backup.getSource(dest.name()) is not None
await coord.delete([source.name()], backup.slug())
assert len(coord.backups()) == 1
assert backup.getSource(source.name()) is None
assert backup.getSource(dest.name()) is not None
await coord.delete([dest.name()], backup.slug())
assert backup.getSource(source.name()) is None
assert backup.getSource(dest.name()) is None
assert backup.isDeleted()
assert len(coord.backups()) == 0
await coord.sync()
assert len(coord.backups()) == 1
await coord.delete([source.name(), dest.name()], coord.backups()[0].slug())
assert len(coord.backups()) == 0
@pytest.mark.asyncio
async def test_delete_errors(coord: Coordinator, source, dest, backup):
with raises(NoBackup):
await coord.delete([source.name()], "badslug")
bad_source = HelperTestSource("bad")
with raises(NoBackup):
await coord.delete([bad_source.name()], backup.slug())
@pytest.mark.asyncio
async def test_retain(coord: Coordinator, source, dest, backup):
assert not backup.getSource(source.name()).retained()
assert not backup.getSource(dest.name()).retained()
await coord.retain({
source.name(): True,
dest.name(): True
}, backup.slug())
assert backup.getSource(source.name()).retained()
assert backup.getSource(dest.name()).retained()
@pytest.mark.asyncio
async def test_retain_errors(coord: Coordinator, source, dest, backup):
with raises(NoBackup):
await coord.retain({source.name(): True}, "badslug")
bad_source = HelperTestSource("bad")
with raises(NoBackup):
await coord.delete({bad_source.name(): True}, backup.slug())
@pytest.mark.asyncio
async def test_freshness(coord: Coordinator, source: HelperTestSource, dest: HelperTestSource, backup: Backup, time: FakeTime):
source.setMax(2)
dest.setMax(2)
await coord.sync()
assert backup.getPurges() == {
source.name(): False,
dest.name(): False
}
source.setMax(1)
dest.setMax(1)
await coord.sync()
assert backup.getPurges() == {
source.name(): True,
dest.name(): True
}
dest.setMax(0)
await coord.sync()
assert backup.getPurges() == {
source.name(): True,
dest.name(): False
}
source.setMax(0)
await coord.sync()
assert backup.getPurges() == {
source.name(): False,
dest.name(): False
}
source.setMax(2)
dest.setMax(2)
time.advance(days=7)
await coord.sync()
assert len(coord.backups()) == 2
assert backup.getPurges() == {
source.name(): True,
dest.name(): True
}
assert coord.backups()[1].getPurges() == {
source.name(): False,
dest.name(): False
}
# should refresh on delete
source.setMax(1)
dest.setMax(1)
await coord.delete([source.name()], backup.slug())
assert coord.backups()[0].getPurges() == {
dest.name(): True
}
assert coord.backups()[1].getPurges() == {
source.name(): True,
dest.name(): False
}
# should update on retain
await coord.retain({dest.name(): True}, backup.slug())
assert coord.backups()[0].getPurges() == {
dest.name(): False
}
assert coord.backups()[1].getPurges() == {
source.name(): True,
dest.name(): True
}
# should update on upload
await coord.uploadBackups(coord.backups()[0].slug())
assert coord.backups()[0].getPurges() == {
dest.name(): False,
source.name(): True
}
assert coord.backups()[1].getPurges() == {
source.name(): False,
dest.name(): True
}
@pytest.mark.asyncio
async def test_upload(coord: Coordinator, source: HelperTestSource, dest: HelperTestSource, backup):
await coord.delete([source.name()], backup.slug())
assert backup.getSource(source.name()) is None
await coord.uploadBackups(backup.slug())
assert backup.getSource(source.name()) is not None
with raises(LogicError):
await coord.uploadBackups(backup.slug())
with raises(NoBackup):
await coord.uploadBackups("bad slug")
await coord.delete([dest.name()], backup.slug())
with raises(NoBackup):
await coord.uploadBackups(backup.slug())
@pytest.mark.asyncio
async def test_download(coord: Coordinator, source, dest, backup):
await coord.download(backup.slug())
await coord.delete([source.name()], backup.slug())
await coord.download(backup.slug())
with raises(NoBackup):
await coord.download("bad slug")
@pytest.mark.asyncio
async def test_backoff(coord: Coordinator, model, source: HelperTestSource, dest: HelperTestSource, backup, time: FakeTime, simple_config: Config):
assert await coord.check()
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
simple_config.override(Setting.MAX_SYNC_INTERVAL_SECONDS, 60 * 60 * 6)
simple_config.override(Setting.DEFAULT_SYNC_INTERVAL_VARIATION, 0)
assert coord.nextSyncAttempt() == time.now() + timedelta(hours=6)
assert not await coord.check()
old_sync = model.sync
model.sync = lambda s: doRaise(Exception("BOOM"))
await coord.sync()
# first backoff should be 0 seconds
assert coord.nextSyncAttempt() == time.now()
assert await coord.check()
# backoff maxes out at 2 hr = 7200 seconds
for seconds in [10, 20, 40, 80, 160, 320, 640, 1280, 2560, 5120, 7200, 7200]:
await coord.sync()
assert coord.nextSyncAttempt() == time.now() + timedelta(seconds=seconds)
assert not await coord.check()
assert not await coord.check()
assert not await coord.check()
# a good sync resets it back to 6 hours from now
model.sync = old_sync
await coord.sync()
assert coord.nextSyncAttempt() == time.now() + timedelta(hours=6)
assert not await coord.check()
# if the next backup is less that 6 hours from the last one, that that shoudl be when we sync
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1.0 / 24.0)
assert coord.nextSyncAttempt() == time.now() + timedelta(hours=1)
assert not await coord.check()
time.advance(hours=2)
assert coord.nextSyncAttempt() == time.now() - timedelta(hours=1)
assert await coord.check()
def test_save_creds(coord: Coordinator, source, dest):
pass
@pytest.mark.asyncio
async def test_check_size_new_backup(coord: Coordinator, source: HelperTestSource, dest: HelperTestSource, time, fs: FsFaker):
skipForWindows()
fs.setFreeBytes(0)
with raises(LowSpaceError):
await coord.startBackup(CreateOptions(time.now(), "Test Name"))
@pytest.mark.asyncio
async def test_check_size_sync(coord: Coordinator, source: HelperTestSource, dest: HelperTestSource, time, fs: FsFaker, global_info: GlobalInfo):
skipForWindows()
fs.setFreeBytes(0)
await coord.sync()
assert len(coord.backups()) == 0
assert global_info._last_error is not None
await coord.sync()
assert len(coord.backups()) == 0
assert global_info._last_error is not None
# Verify it resets the global size skip check, but gets through once
global_info.setSkipSpaceCheckOnce(True)
await coord.sync()
assert len(coord.backups()) == 1
assert global_info._last_error is None
assert not global_info.isSkipSpaceCheckOnce()
# Next attempt to backup shoudl fail again.
time.advance(days=7)
await coord.sync()
assert len(coord.backups()) == 1
assert global_info._last_error is not None
@pytest.mark.asyncio
async def test_cancel(coord: Coordinator, global_info: GlobalInfo):
coord._sync_wait.clear()
asyncio.create_task(coord.sync())
await coord._sync_start.wait()
await coord.cancel()
assert isinstance(global_info._last_error, UserCancelledError)
@pytest.mark.asyncio
async def test_working_through_upload(coord: Coordinator, global_info: GlobalInfo, dest):
coord._sync_wait.clear()
assert not coord.isWorkingThroughUpload()
sync_task = asyncio.create_task(coord.sync())
await coord._sync_start.wait()
assert not coord.isWorkingThroughUpload()
dest.working = True
assert coord.isWorkingThroughUpload()
coord._sync_wait.set()
await asyncio.wait([sync_task])
assert not coord.isWorkingThroughUpload()
@pytest.mark.asyncio
async def test_alternate_timezone(coord: Coordinator, time: FakeTime, model: Model, dest, source, simple_config: Config):
time.setTimeZone("Europe/Stockholm")
simple_config.override(Setting.BACKUP_TIME_OF_DAY, "12:00")
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
source.setMax(10)
source.insert("Fri", time.toUtc(time.local(2020, 3, 16, 18, 5)))
time.setNow(time.local(2020, 3, 16, 18, 6))
model.reinitialize()
coord.reset()
await coord.sync()
assert not await coord.check()
assert coord.nextBackupTime() == time.local(2020, 3, 17, 12)
time.setNow(time.local(2020, 3, 17, 11, 59))
await coord.sync()
assert not await coord.check()
time.setNow(time.local(2020, 3, 17, 12))
assert await coord.check()
@pytest.mark.asyncio
async def test_disabled_at_install(coord: Coordinator, dest, time):
"""
Verifies that at install time, if some backups are already present the
addon doesn't try to sync over and over when drive is disabled. This was
a problem at one point.
"""
dest.setEnabled(True)
await coord.sync()
assert len(coord.backups()) == 1
dest.setEnabled(False)
time.advance(days=5)
assert await coord.check()
await coord.sync()
assert not await coord.check()
@pytest.mark.asyncio
async def test_only_source_configured(coord: Coordinator, dest: HelperTestSource, time, source: HelperTestSource):
source.setEnabled(True)
dest.setEnabled(False)
dest.setNeedsConfiguration(False)
await coord.sync()
assert len(coord.backups()) == 1
@pytest.mark.asyncio
async def test_schedule_backup_next_sync_attempt(coord: Coordinator, model, source: HelperTestSource, dest: HelperTestSource, backup, time: FakeTime, simple_config: Config):
"""
Next backup is before max sync interval is reached
"""
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
simple_config.override(Setting.MAX_SYNC_INTERVAL_SECONDS, 60 * 60)
simple_config.override(Setting.DEFAULT_SYNC_INTERVAL_VARIATION, 0)
time.setTimeZone("Europe/Stockholm")
simple_config.override(Setting.BACKUP_TIME_OF_DAY, "03:23")
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
source.setMax(10)
source.insert("Fri", time.toUtc(time.local(2020, 3, 16, 3, 33)))
time.setNow(time.local(2020, 3, 17, 2, 29))
model.reinitialize()
coord.reset()
await coord.sync()
assert coord.nextBackupTime() == time.local(2020, 3, 17, 3, 23)
assert coord.nextBackupTime() == coord.nextSyncAttempt()
@pytest.mark.asyncio
async def test_max_sync_interval_next_sync_attempt(coord: Coordinator, model, source: HelperTestSource, dest: HelperTestSource, backup, time: FakeTime, simple_config: Config):
"""
Next backup is after max sync interval is reached
"""
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
simple_config.override(Setting.MAX_SYNC_INTERVAL_SECONDS, 60 * 60)
simple_config.override(Setting.DEFAULT_SYNC_INTERVAL_VARIATION, 0)
time.setTimeZone("Europe/Stockholm")
simple_config.override(Setting.BACKUP_TIME_OF_DAY, "03:23")
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
source.setMax(10)
source.insert("Fri", time.toUtc(time.local(2020, 3, 16, 3, 33)))
time.setNow(time.local(2020, 3, 17, 1, 29))
model.reinitialize()
coord.reset()
await coord.sync()
assert coord.nextSyncAttempt() == time.local(2020, 3, 17, 2, 29)
assert coord.nextBackupTime() > coord.nextSyncAttempt()
@pytest.mark.asyncio
async def test_generational_only_ignored_snapshots(coord: Coordinator, model, source: HelperTestSource, dest: HelperTestSource, time: FakeTime, simple_config: Config, global_info: GlobalInfo):
"""
Verifies a sync with generational settings and only ignored snapshots doesn't cause an error.
Setup is taken from https://github.com/sabeechen/hassio-google-drive-backup/issues/727
"""
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
simple_config.override(Setting.GENERATIONAL_DAYS, 3)
simple_config.override(Setting.GENERATIONAL_WEEKS, 4)
simple_config.override(Setting.GENERATIONAL_DELETE_EARLY, True)
simple_config.override(Setting.MAX_BACKUPS_IN_HA, 2)
simple_config.override(Setting.MAX_BACKUPS_IN_GOOGLE_DRIVE, 6)
backup = source.insert("Fri", time.toUtc(time.local(2020, 3, 16, 3, 33)))
backup.setIgnore(True)
time.setNow(time.local(2020, 3, 16, 4, 0))
dest.setEnabled(False)
source.setEnabled(True)
await coord.sync()
assert global_info._last_error is None
@pytest.mark.asyncio
async def test_max_sync_interval_randomness(coord: Coordinator, model, source: HelperTestSource, dest: HelperTestSource, backup, time: FakeTime, simple_config: Config):
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
simple_config.override(Setting.MAX_SYNC_INTERVAL_SECONDS, 60 * 60)
simple_config.override(Setting.DEFAULT_SYNC_INTERVAL_VARIATION, 0.5)
time.setTimeZone("Europe/Stockholm")
simple_config.override(Setting.BACKUP_TIME_OF_DAY, "03:23")
simple_config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
source.setMax(10)
source.insert("Fri", time.toUtc(time.local(2020, 3, 16, 3, 33)))
time.setNow(time.local(2020, 3, 17, 1, 29))
model.reinitialize()
coord.reset()
await coord.sync()
next_attempt = coord.nextSyncAttempt()
# verify its within expected range
assert next_attempt - time.now() <= timedelta(hours=1)
assert next_attempt - time.now() >= timedelta(hours=0.5)
# verify it doesn't change
assert coord.nextSyncAttempt() == next_attempt
time.advance(minutes=1)
assert coord.nextSyncAttempt() == next_attempt
# sync, and verify it does change
await coord.sync()
assert coord.nextSyncAttempt() != next_attempt
@pytest.mark.asyncio
async def test_precaching(coord: Coordinator, precache: DestinationPrecache, dest: HelperTestSource, time: FakeTime, global_info: GlobalInfo):
await coord.sync()
dest.reset()
# Warm the cache
assert precache.getNextWarmDate() < coord.nextSyncAttempt()
assert precache.cached(dest.name(), time.now()) is None
assert dest.query_count == 0
time.setNow(precache.getNextWarmDate())
await precache.checkForSmoothing()
assert precache.cached(dest.name(), time.now()) is not None
assert dest.query_count == 1
# No queries should have been made to dest, and the cache should now be cleared
time.setNow(coord.nextSyncAttempt())
assert precache.cached(dest.name(), time.now()) is not None
await coord.sync()
assert dest.query_count == 1
assert precache.cached(dest.name(), time.now()) is None
assert global_info._last_error is None

View File

@@ -0,0 +1,210 @@
import pytest
import os
import json
from injector import Injector
from datetime import timedelta
from backup.config import Config, Setting, VERSION, Version
from backup.util import DataCache, UpgradeFlags, KEY_CREATED, KEY_LAST_SEEN, CACHE_EXPIRATION_DAYS
from backup.time import Time
from os.path import join
@pytest.mark.asyncio
async def test_read_and_write(config: Config, time: Time) -> None:
cache = DataCache(config, time)
assert len(cache.backups) == 0
cache.backup("test")[KEY_CREATED] = time.now().isoformat()
assert not cache._dirty
cache.makeDirty()
assert cache._dirty
cache.saveIfDirty()
assert not cache._dirty
cache = DataCache(config, time)
assert cache.backup("test")[KEY_CREATED] == time.now().isoformat()
assert not cache._dirty
@pytest.mark.asyncio
async def test_backup_expiration(config: Config, time: Time) -> None:
cache = DataCache(config, time)
assert len(cache.backups) == 0
cache.backup("new")[KEY_LAST_SEEN] = time.now().isoformat()
cache.backup("old")[KEY_LAST_SEEN] = (
time.now() - timedelta(days=CACHE_EXPIRATION_DAYS + 1)) .isoformat()
cache.makeDirty()
cache.saveIfDirty()
assert len(cache.backups) == 1
assert "new" in cache.backups
assert "old" not in cache.backups
@pytest.mark.asyncio
async def test_version_upgrades(time: Time, injector: Injector, config: Config) -> None:
# Simluate upgrading from an un-tracked version
assert not os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
cache = injector.get(DataCache)
upgrade_time = time.now()
assert cache.previousVersion == Version.default()
assert cache.currentVersion == Version.parse(VERSION)
assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
data = json.load(f)
assert data["upgrades"] == [{
"prev_version": str(Version.default()),
"new_version": VERSION,
"date": upgrade_time.isoformat()
}]
# Reload the data cache, verify there is no upgrade.
time.advance(days=1)
cache = DataCache(config, time)
assert cache.previousVersion == Version.parse(VERSION)
assert cache.currentVersion == Version.parse(VERSION)
assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
data = json.load(f)
assert data["upgrades"] == [{
"prev_version": str(Version.default()),
"new_version": VERSION,
"date": upgrade_time.isoformat()
}]
# simulate upgrading to a new version, verify an upgrade gets identified.
upgrade_version = Version.parse("200")
class UpgradeCache(DataCache):
def __init__(self):
super().__init__(config, time)
@property
def currentVersion(self):
return upgrade_version
cache = UpgradeCache()
assert cache.previousVersion == Version.parse(VERSION)
assert cache.currentVersion == upgrade_version
assert os.path.exists(config.get(Setting.DATA_CACHE_FILE_PATH))
with open(config.get(Setting.DATA_CACHE_FILE_PATH)) as f:
data = json.load(f)
assert data["upgrades"] == [
{
"prev_version": str(Version.default()),
"new_version": VERSION,
"date": upgrade_time.isoformat()
},
{
"prev_version": VERSION,
"new_version": str(upgrade_version),
"date": time.now().isoformat()
}
]
next_upgrade_time = time.now()
time.advance(days=1)
# Verify version upgrade time queries work as expected
assert cache.getUpgradeTime(Version.parse(VERSION)) == upgrade_time
assert cache.getUpgradeTime(Version.default()) == upgrade_time
assert cache.getUpgradeTime(upgrade_version) == next_upgrade_time
# degenerate case, should never happen but a sensible value needs to be returned
assert cache.getUpgradeTime(Version.parse("201")) == time.now()
@pytest.mark.asyncio
async def test_flag(config: Config, time: Time):
cache = DataCache(config, time)
assert not cache.checkFlag(UpgradeFlags.TESTING_FLAG)
assert not cache.dirty
cache.addFlag(UpgradeFlags.TESTING_FLAG)
assert cache.dirty
assert cache.checkFlag(UpgradeFlags.TESTING_FLAG)
cache.saveIfDirty()
cache = DataCache(config, time)
assert cache.checkFlag(UpgradeFlags.TESTING_FLAG)
@pytest.mark.asyncio
async def test_warn_upgrade_new_install(config: Config, time: Time):
"""A fresh install of the addon should never warn about upgrade snapshots"""
cache = DataCache(config, time)
assert not cache.notifyForIgnoreUpgrades
assert cache._config.get(Setting.IGNORE_UPGRADE_BACKUPS)
@pytest.mark.asyncio
async def test_warn_upgrade_old_install(config: Config, time: Time):
"""An old install of the addon warn about upgrade snapshots"""
with open(config.get(Setting.DATA_CACHE_FILE_PATH), "w") as f:
data = {
"upgrades": [
{
"prev_version": str(Version.default()),
"new_version": "0.108.1",
"date": time.now().isoformat()
}
]
}
json.dump(data, f)
cache = DataCache(config, time)
assert cache.notifyForIgnoreUpgrades
assert not cache._config.get(Setting.IGNORE_UPGRADE_BACKUPS)
@pytest.mark.asyncio
async def test_warn_upgrade_old_install_explicit_ignore_upgrades(config: Config, time: Time, cleandir: str):
"""An old install of the addon should not warn about upgrade snapshots if it explicitly ignores them"""
with open(config.get(Setting.DATA_CACHE_FILE_PATH), "w") as f:
data = {
"upgrades": [
{
"prev_version": str(Version.default()),
"new_version": "0.108.1",
"date": time.now().isoformat()
}
]
}
json.dump(data, f)
config_path = join(cleandir, "config.json")
with open(config_path, "w") as f:
data = {
Setting.IGNORE_UPGRADE_BACKUPS.value: True,
Setting.DATA_CACHE_FILE_PATH.value: config.get(Setting.DATA_CACHE_FILE_PATH)
}
json.dump(data, f)
cache = DataCache(Config.fromFile(config_path), time)
assert not cache.notifyForIgnoreUpgrades
assert cache._config.get(Setting.IGNORE_UPGRADE_BACKUPS)
@pytest.mark.asyncio
async def test_warn_upgrade_old_install_explicit_ignore_others(config: Config, time: Time, cleandir: str):
"""An old install of the addon should not warn about upgrade snapshots if it explicitly ignores them"""
with open(config.get(Setting.DATA_CACHE_FILE_PATH), "w") as f:
data = {
"upgrades": [
{
"prev_version": str(Version.default()),
"new_version": "0.108.1",
"date": time.now().isoformat()
}
]
}
json.dump(data, f)
config_path = join(cleandir, "config.json")
with open(config_path, "w") as f:
data = {
Setting.IGNORE_OTHER_BACKUPS.value: True,
Setting.DATA_CACHE_FILE_PATH.value: config.get(Setting.DATA_CACHE_FILE_PATH)
}
json.dump(data, f)
cache = DataCache(Config.fromFile(config_path), time)
assert not cache.notifyForIgnoreUpgrades

View File

@@ -0,0 +1,142 @@
import pytest
from backup.config import Config, Setting
from backup.debugworker import DebugWorker
from backup.util import GlobalInfo
from backup.logger import getLogger
from dev.simulationserver import SimulationServer
from .helpers import skipForWindows
from backup.server import ErrorStore
from .conftest import FakeTime
@pytest.mark.asyncio
async def test_dns_info(debug_worker: DebugWorker, config: Config):
skipForWindows()
config.override(Setting.SEND_ERROR_REPORTS, True)
config.override(Setting.DRIVE_HOST_NAME, "localhost")
await debug_worker.doWork()
assert '127.0.0.1' in debug_worker.dns_info['localhost']
assert 'localhost' in debug_worker.dns_info['localhost']
@pytest.mark.asyncio
async def test_bad_host(debug_worker: DebugWorker, config: Config):
skipForWindows()
config.override(Setting.DRIVE_HOST_NAME, "dasdfdfgvxcvvsoejbr.com")
await debug_worker.doWork()
assert "Name or service not known" in debug_worker.dns_info['dasdfdfgvxcvvsoejbr.com']['dasdfdfgvxcvvsoejbr.com']
@pytest.mark.asyncio
async def test_send_error_report(time, debug_worker: DebugWorker, config: Config, global_info: GlobalInfo, server, error_store: ErrorStore):
config.override(Setting.SEND_ERROR_REPORTS, True)
config.override(Setting.DRIVE_HOST_NAME, "localhost")
global_info.sync()
global_info.success()
global_info.sync()
global_info.success()
global_info.sync()
global_info.failed(Exception())
await debug_worker.doWork()
report = error_store.last_error
assert report['report']['sync_success_count'] == 2
assert report['report']['sync_count'] == 3
assert report['report']['failure_count'] == 1
assert report['report']['sync_last_start'] == time.now().isoformat()
assert report['report']['failure_time'] == time.now().isoformat()
assert report['report']['error'] == getLogger("test").formatException(Exception())
@pytest.mark.asyncio
async def test_dont_send_error_report(time, debug_worker: DebugWorker, config: Config, global_info: GlobalInfo, server: SimulationServer, error_store: ErrorStore):
config.override(Setting.SEND_ERROR_REPORTS, False)
config.override(Setting.DRIVE_HOST_NAME, "localhost")
global_info.failed(Exception())
await debug_worker.doWork()
assert error_store.last_error is None
@pytest.mark.asyncio
async def test_only_send_duplicates(time, debug_worker: DebugWorker, config: Config, global_info: GlobalInfo, server, error_store: ErrorStore):
config.override(Setting.SEND_ERROR_REPORTS, True)
config.override(Setting.DRIVE_HOST_NAME, "localhost")
global_info.failed(Exception("boom1"))
firstExceptionTime = time.now()
await debug_worker.doWork()
report = error_store.last_error
assert report['report']["error"] == getLogger("test").formatException(Exception("boom1"))
assert report['report']["time"] == firstExceptionTime.isoformat()
# Same exception shouldn't cause us to send the error report again
time.advance(days=1)
global_info.failed(Exception("boom1"))
await debug_worker.doWork()
report = error_store.last_error
assert report['report']["error"] == getLogger("test").formatException(Exception("boom1"))
assert report['report']["time"] == firstExceptionTime.isoformat()
# Btu a new one will send a new report
global_info.failed(Exception("boom2"))
await debug_worker.doWork()
report = error_store.last_error
assert report['report']["error"] == getLogger("test").formatException(Exception("boom2"))
assert report['report']["time"] == time.now().isoformat()
@pytest.mark.asyncio
async def test_send_clear(time, debug_worker: DebugWorker, config: Config, global_info: GlobalInfo, server, error_store: ErrorStore):
config.override(Setting.SEND_ERROR_REPORTS, True)
config.override(Setting.DRIVE_HOST_NAME, "localhost")
# Simulate failure
global_info.failed(Exception("boom"))
await debug_worker.doWork()
# And then success
global_info.success()
time.advance(days=1)
await debug_worker.doWork()
report = error_store.last_error
assert report['report'] == {
'duration': '1 day, 0:00:00'
}
@pytest.mark.asyncio
async def test_health_check_timing_success(server_url, time: FakeTime, debug_worker: DebugWorker, config: Config, server: SimulationServer):
# Only do successfull checks once a day
await debug_worker.doWork()
assert server.interceptor.urlWasCalled("/health")
server.interceptor.clear()
await debug_worker.doWork()
assert not server.interceptor.urlWasCalled("/health")
time.advance(hours=23)
await debug_worker.doWork()
assert not server.interceptor.urlWasCalled("/health")
time.advance(hours=2)
await debug_worker.doWork()
assert server.interceptor.urlWasCalled("/health")
@pytest.mark.asyncio
async def test_health_check_timing_failure(server_url, time: FakeTime, debug_worker: DebugWorker, config: Config, server: SimulationServer):
# Failed helath checks retry after a minute
server.interceptor.setError("/health", 500)
await debug_worker.doWork()
assert server.interceptor.urlWasCalled("/health")
server.interceptor.clear()
await debug_worker.doWork()
assert not server.interceptor.urlWasCalled("/health")
time.advance(seconds=59)
await debug_worker.doWork()
assert not server.interceptor.urlWasCalled("/health")
time.advance(seconds=2)
await debug_worker.doWork()
assert server.interceptor.urlWasCalled("/health")

View File

@@ -0,0 +1,119 @@
from backup.model import DestinationPrecache, Model, Coordinator
from backup.config import Config, Setting
from tests.faketime import FakeTime
from dev.request_interceptor import RequestInterceptor
from dev.simulated_google import URL_MATCH_DRIVE_API
from backup.drive import DriveSource
from datetime import timedelta
import pytest
@pytest.mark.asyncio
async def test_no_caching_before_cache_time(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
interceptor.clear()
await precache.checkForSmoothing()
assert precache.getNextWarmDate() > time.now()
assert not interceptor.urlWasCalled(URL_MATCH_DRIVE_API)
assert precache.cached(drive.name(), time.now()) is None
@pytest.mark.asyncio
async def test_no_caching_after_sync_time(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
time.setNow(coord.nextSyncAttempt())
interceptor.clear()
await precache.checkForSmoothing()
assert precache.getNextWarmDate() < time.now()
assert not interceptor.urlWasCalled(URL_MATCH_DRIVE_API)
assert precache.cached(drive.name(), time.now()) is None
@pytest.mark.asyncio
async def test_cache_after_warm_date(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
interceptor.clear()
assert precache.getNextWarmDate() < coord.nextSyncAttempt()
time.setNow(precache.getNextWarmDate())
await precache.checkForSmoothing()
assert interceptor.urlWasCalled(URL_MATCH_DRIVE_API)
assert precache.cached(drive.name(), time.now()) is not None
async def test_no_double_caching(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
interceptor.clear()
time.setNow(precache.getNextWarmDate())
await precache.checkForSmoothing()
assert precache.cached(drive.name(), time.now()) is not None
interceptor.clear()
time.setNow(precache.getNextWarmDate() + (coord.nextSyncAttempt() - precache.getNextWarmDate()) / 2)
await precache.checkForSmoothing()
assert not interceptor.urlWasCalled(URL_MATCH_DRIVE_API)
assert precache.cached(drive.name(), time.now()) is not None
async def test_cache_expiration(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
time.setNow(precache.getNextWarmDate())
await precache.checkForSmoothing()
assert precache.cached(drive.name(), time.now()) is not None
time.setNow(coord.nextSyncAttempt() + timedelta(minutes=2))
assert precache.cached(drive.name(), time.now()) is None
async def test_cache_clear(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
time.setNow(precache.getNextWarmDate())
await precache.checkForSmoothing()
assert precache.cached(drive.name(), time.now()) is not None
precache.clear()
assert precache.cached(drive.name(), time.now()) is None
async def test_cache_error_backoff(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
time.setNow(precache.getNextWarmDate())
interceptor.setError(URL_MATCH_DRIVE_API, status=503)
await precache.checkForSmoothing()
assert precache.cached(drive.name(), time.now()) is None
delta = precache.getNextWarmDate() - time.now()
assert delta >= timedelta(days=1)
async def test_cache_warm_date_stability(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime) -> None:
await coord.sync()
# The warm date shouldn't change
last_warm = precache.getNextWarmDate()
assert precache.getNextWarmDate() == last_warm
time.setNow(last_warm - timedelta(minutes=1))
assert precache.getNextWarmDate() == last_warm
# Until the cached is warmed
time.setNow(last_warm)
await precache.checkForSmoothing()
assert precache.cached(drive.name(), time.now()) is not None
assert precache.getNextWarmDate() != last_warm
async def test_disable_caching(server, precache: DestinationPrecache, model: Model, drive: DriveSource, interceptor: RequestInterceptor, coord: Coordinator, time: FakeTime, config: Config) -> None:
await coord.sync()
config.override(Setting.CACHE_WARMUP_MAX_SECONDS, 0)
time.setNow(precache.getNextWarmDate())
await precache.checkForSmoothing()
assert precache.cached(drive.name(), time.now()) is None

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,89 @@
from backup.config.durationparser import DurationParser
from datetime import timedelta
def test_parse_days():
parser = DurationParser()
assert parser.parse("1 days") == timedelta(days=1)
assert parser.parse("5 days") == timedelta(days=5)
assert parser.parse("5 d") == timedelta(days=5)
assert parser.parse("5d") == timedelta(days=5)
assert parser.parse("5.0d") == timedelta(days=5)
assert parser.parse("5.0day") == timedelta(days=5)
assert parser.parse("5.0 day") == timedelta(days=5)
assert parser.parse("5.5 days") == timedelta(days=5, hours=12)
def test_parse_hours():
parser = DurationParser()
assert parser.parse("1 hours") == timedelta(hours=1)
assert parser.parse("5 hours") == timedelta(hours=5)
assert parser.parse("5 h") == timedelta(hours=5)
assert parser.parse("5hour") == timedelta(hours=5)
assert parser.parse("5.0h") == timedelta(hours=5)
assert parser.parse("5.0 hour") == timedelta(hours=5)
assert parser.parse("5.5 h") == timedelta(hours=5, minutes=30)
def test_parse_minutes():
parser = DurationParser()
assert parser.parse("1 minutes") == timedelta(minutes=1)
assert parser.parse("5 min") == timedelta(minutes=5)
assert parser.parse("5 m") == timedelta(minutes=5)
assert parser.parse("5mins") == timedelta(minutes=5)
assert parser.parse("5.0m") == timedelta(minutes=5)
assert parser.parse("5.0 min") == timedelta(minutes=5)
assert parser.parse("5.5 m") == timedelta(minutes=5, seconds=30)
def test_parse_seconds():
parser = DurationParser()
assert parser.parse("1 seconds") == timedelta(seconds=1)
assert parser.parse("5 sec") == timedelta(seconds=5)
assert parser.parse("5 s") == timedelta(seconds=5)
assert parser.parse("5secs") == timedelta(seconds=5)
assert parser.parse("5.0s") == timedelta(seconds=5)
assert parser.parse("5.0 secs") == timedelta(seconds=5)
assert parser.parse("5.5 s") == timedelta(seconds=5, milliseconds=500)
def test_parse_multiple():
parser = DurationParser()
assert parser.parse("1 day, 5 hours, 30 seconds") == timedelta(days=1, hours=5, seconds=30)
assert parser.parse("1 day 5 hours 30 seconds") == timedelta(days=1, hours=5, seconds=30)
assert parser.parse("1d 5 hours 30s") == timedelta(days=1, hours=5, seconds=30)
assert parser.parse("1d 5h 30s") == timedelta(days=1, hours=5, seconds=30)
assert parser.parse("5m 1d 5h 30s") == timedelta(days=1, hours=5, minutes=5, seconds=30)
def test_format():
parser = DurationParser()
assert parser.format(timedelta(days=1)) == "1 days"
assert parser.format(timedelta(seconds=86400)) == "1 days"
assert parser.format(timedelta(hours=1)) == "1 hours"
assert parser.format(timedelta(minutes=1)) == "1 minutes"
assert parser.format(timedelta(seconds=60)) == "1 minutes"
assert parser.format(timedelta(seconds=5)) == "5 seconds"
assert parser.format(timedelta(seconds=1)) == "1 seconds"
assert parser.format(timedelta(days=5, hours=6, minutes=7)) == "5 days, 6 hours, 7 minutes"
assert parser.format(timedelta(days=5, hours=6, minutes=7, seconds=8)) == "5 days, 6 hours, 7 minutes, 8 seconds"
def test_back_and_forth():
doTestConvert(timedelta(hours=5))
doTestConvert(timedelta(minutes=600))
doTestConvert(timedelta(days=30))
doTestConvert(timedelta(days=5, minutes=6, hours=10, seconds=20))
def doTestConvert(duration):
parser = DurationParser()
assert parser.parse(parser.format(duration)) == duration
def test_convert_empty_seconds():
parser = DurationParser()
assert parser.parse("") == timedelta(seconds=0)
assert parser.parse("0") == timedelta(seconds=0)
assert parser.parse("30") == timedelta(seconds=30)
assert parser.parse(str(60 * 60)) == timedelta(seconds=60 * 60)

View File

@@ -0,0 +1,28 @@
from backup.config import DurationAsStringValidator
from backup.exceptions import InvalidConfigurationValue
from datetime import timedelta
import pytest
def test_minimum():
parser = DurationAsStringValidator("test", minimum=10)
assert parser.validate("11 seconds") == 11
assert parser.validate(11) == 11
with pytest.raises(InvalidConfigurationValue):
parser.validate("9 seconds")
def test_maximum():
parser = DurationAsStringValidator("test", maximum=10)
assert parser.validate("9 seconds") == 9
assert parser.validate(9) == 9
with pytest.raises(InvalidConfigurationValue):
parser.validate("11 seconds")
assert parser.formatForUi(9) == "9 seconds"
def test_base():
parser = DurationAsStringValidator("test", base_seconds=60)
assert parser.validate("60 seconds") == 1
assert parser.validate(60) == 60
assert parser.formatForUi(1) == "1 minutes"

View File

@@ -0,0 +1,13 @@
import pytest
from backup.util import Estimator
from backup.config import Config, Setting
from backup.exceptions import LowSpaceError
@pytest.mark.asyncio
async def test_check_space(estimator: Estimator, coord, config: Config):
estimator.refresh()
estimator.checkSpace(coord.backups())
config.override(Setting.LOW_SPACE_THRESHOLD, estimator.getBytesFree() + 1)
with pytest.raises(LowSpaceError):
estimator.checkSpace(coord.backups())

View File

@@ -0,0 +1,49 @@
from bs4 import BeautifulSoup
import backup.exceptions
import inspect
import pytest
from backup.exceptions import GoogleCredGenerateError, KnownError, KnownTransient, SimulatedError, GoogleDrivePermissionDenied, InvalidConfigurationValue, LogicError, ProtocolError, NoBackup, NotUploadable, PleaseWait, UploadFailed
from .conftest import ReaderHelper
@pytest.mark.asyncio
async def test_verify_coverage(ui_server, reader: ReaderHelper):
# Get the list of exception codes
ignore = [
KnownError,
KnownTransient,
SimulatedError,
GoogleDrivePermissionDenied,
InvalidConfigurationValue,
LogicError,
NoBackup,
NotUploadable,
PleaseWait,
ProtocolError,
UploadFailed,
GoogleCredGenerateError,
]
codes = {}
for name, obj in inspect.getmembers(backup.exceptions):
if inspect.isclass(obj) and (KnownError in obj.__bases__) and obj not in ignore:
codes[obj().code()] = obj
# Get the list of ui dialogs
document = await reader.get("", json=False)
page = BeautifulSoup(document, 'html.parser')
dialogs = {}
for div in page.find_all("div"):
cls = div.get("class")
if cls is None:
continue
if "error_card" in cls:
for specific_class in cls:
if specific_class in dialogs:
dialogs[specific_class] = dialogs[specific_class] + 1
else:
dialogs[specific_class] = 1
# Make sure exactly one dialog has the class
for code in codes.keys():
assert dialogs[code] == 1

View File

@@ -0,0 +1,186 @@
import pytest
from dev.simulationserver import SimulationServer, RequestInterceptor
from backup.time import Time
from backup.config import Config, Setting
from backup.drive import DriveRequests
from backup.exceptions import CredRefreshMyError, GoogleCredentialsExpired, CredRefreshGoogleError
from backup.tracing_session import TracingSession
from yarl import URL
@pytest.mark.asyncio
async def test_correct_host(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, server_url, interceptor: RequestInterceptor):
# Verify the correct endpoitns get called for a successful request
session.record = True
await drive_requests.exchanger.refresh(drive_requests.creds)
assert interceptor.urlWasCalled("/drive/refresh")
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_some_bad_hosts(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, server_url, interceptor: RequestInterceptor):
session.record = True
config.override(Setting.EXCHANGER_TIMEOUT_SECONDS, 1)
config.override(Setting.TOKEN_SERVER_HOSTS, "https://this.goes.nowhere.info," + str(server_url))
await drive_requests.exchanger.refresh(drive_requests.creds)
assert interceptor.urlWasCalled("/drive/refresh")
# Verify both hosts were checked
session._records[0]['url'] == URL("https://this.goes.nowhere.info").with_path("/drive/refresh")
session._records[1]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_all_bad_hosts(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor):
session.record = True
config.override(Setting.EXCHANGER_TIMEOUT_SECONDS, 1)
config.override(Setting.TOKEN_SERVER_HOSTS, "https://this.goes.nowhere.info,http://also.a.bad.host")
with pytest.raises(CredRefreshMyError) as e:
await drive_requests.exchanger.refresh(drive_requests.creds)
# Error should be about the last host name
assert e.value.reason.index("also.a.bad.host") >= 0
# Verify both hosts were checked
session._records[0]['url'] == URL("https://this.goes.nowhere.info").with_path("/drive/refresh")
session._records[1]['url'] == URL("http://also.a.bad.host").with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_exchange_timeout(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
interceptor.setSleep("/drive/refresh", sleep=10)
config.override(Setting.EXCHANGER_TIMEOUT_SECONDS, 0.1)
with pytest.raises(CredRefreshMyError) as e:
await drive_requests.exchanger.refresh(drive_requests.creds)
# Error should be about the last host name
assert e.value.reason == "Timed out communicating with localhost"
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_exchange_invalid_creds(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
drive_requests.creds._refresh_token = "fail"
with pytest.raises(GoogleCredentialsExpired):
await drive_requests.exchanger.refresh(drive_requests.creds)
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_fail_503_with_error(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
interceptor.setError("^/drive/refresh$", 503, response={'error': 'test_value'})
with pytest.raises(CredRefreshGoogleError) as e:
await drive_requests.exchanger.refresh(drive_requests.creds)
assert e.value.message() == "Couldn't refresh your credentials with Google because: 'test_value'"
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_fail_503_invalid_grant(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
interceptor.setError("^/drive/refresh$", 503, response={'error': 'invalid_grant'})
with pytest.raises(GoogleCredentialsExpired):
await drive_requests.exchanger.refresh(drive_requests.creds)
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_fail_503_with_invalid_json(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
interceptor.setError("^/drive/refresh$", 503, response={'ignored': 'nothing'})
with pytest.raises(CredRefreshMyError) as e:
await drive_requests.exchanger.refresh(drive_requests.creds)
assert e.value.message() == "Couldn't refresh Google Drive credentials because: HTTP 503 from localhost"
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_fail_503_with_no_data(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
interceptor.setError("^/drive/refresh$", 503)
with pytest.raises(CredRefreshMyError) as e:
await drive_requests.exchanger.refresh(drive_requests.creds)
assert e.value.message() == "Couldn't refresh Google Drive credentials because: HTTP 503 from localhost"
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_fail_401(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
interceptor.setError("^/drive/refresh$", 401)
with pytest.raises(GoogleCredentialsExpired):
await drive_requests.exchanger.refresh(drive_requests.creds)
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_fail_401_no_fall_through(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
config.override(Setting.TOKEN_SERVER_HOSTS, str(server_url) + "," + str(server_url))
interceptor.setError("^/drive/refresh$", 401)
with pytest.raises(GoogleCredentialsExpired):
await drive_requests.exchanger.refresh(drive_requests.creds)
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
assert len(session._records) == 1
@pytest.mark.asyncio
async def test_invalid_grant_no_fall_through(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
config.override(Setting.TOKEN_SERVER_HOSTS, str(server_url) + "," + str(server_url))
interceptor.setError("^/drive/refresh$", 503, response={'error': 'invalid_grant'})
with pytest.raises(GoogleCredentialsExpired):
await drive_requests.exchanger.refresh(drive_requests.creds)
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
assert len(session._records) == 1
@pytest.mark.asyncio
async def test_timeout_fall_through(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
config.override(Setting.EXCHANGER_TIMEOUT_SECONDS, 0.1)
config.override(Setting.TOKEN_SERVER_HOSTS, str(server_url) + "," + str(server_url))
interceptor.setSleep("^/drive/refresh$", sleep=10, wait_for=1)
await drive_requests.exchanger.refresh(drive_requests.creds)
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
session._records[1]['url'] == server_url.with_path("/drive/refresh")
@pytest.mark.asyncio
async def test_anything_else_through(time: Time, session: TracingSession, config: Config, server: SimulationServer, drive_requests: DriveRequests, interceptor: RequestInterceptor, server_url: URL):
session.record = True
config.override(Setting.TOKEN_SERVER_HOSTS, str(server_url) + "," + str(server_url))
interceptor.setError("^/drive/refresh$", status=500, fail_for=1)
await drive_requests.exchanger.refresh(drive_requests.creds)
# Verify both hosts were checked
session._records[0]['url'] == server_url.with_path("/drive/refresh")
session._records[1]['url'] == server_url.with_path("/drive/refresh")

View File

@@ -0,0 +1,60 @@
from backup.file import File
from os.path import exists, join
from os import remove
import pytest
import json
TEST_DATA = "when you press my special key I play a little melody"
def readfile(path):
with open(path) as f:
return f.read()
@pytest.mark.asyncio
async def test_basic(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
backup_path = join(tmpdir, "test.json.backup")
assert not File.exists(path)
File.write(path, TEST_DATA)
assert File.exists(path)
assert readfile(path) == TEST_DATA
assert readfile(backup_path) == TEST_DATA
assert File.read(path) == TEST_DATA
File.delete(path)
assert not exists(path)
assert not exists(backup_path)
assert not File.exists(path)
@pytest.mark.asyncio
async def test_file_deleted(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
File.write(path, TEST_DATA)
remove(path)
assert File.read(path) == TEST_DATA
@pytest.mark.asyncio
async def test_backup_deleted(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
backup_path = join(tmpdir, "test.json.backup")
File.write(path, TEST_DATA)
remove(backup_path)
assert File.read(path) == TEST_DATA
@pytest.mark.asyncio
async def test_decode_error(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
File.write(path, TEST_DATA)
with open(path, "w"):
# emptys the file contents
pass
with open(path) as f:
assert len(f.read()) == 0
assert File.read(path) == TEST_DATA

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,416 @@
from datetime import timedelta
from backup.model.backups import Backup
import pytest
from backup.util import GlobalInfo
from backup.ha import HaUpdater
from backup.ha.haupdater import REASSURING_MESSAGE
from .faketime import FakeTime
from .helpers import HelperTestSource
from dev.simulationserver import SimulationServer
from backup.logger import getLast
from backup.util import Estimator
from dev.simulated_supervisor import SimulatedSupervisor, URL_MATCH_CORE_API
from dev.request_interceptor import RequestInterceptor
from backup.model import Coordinator
from backup.config import Config, Setting
STALE_ATTRIBUTES = {
"friendly_name": "Backups Stale",
"device_class": "problem"
}
@pytest.fixture
def source():
return HelperTestSource("Source")
@pytest.fixture
def dest():
return HelperTestSource("Dest")
@pytest.mark.asyncio
async def test_init(updater: HaUpdater, global_info, supervisor: SimulatedSupervisor, server, time: FakeTime):
await updater.update()
assert not updater._stale()
assert updater._state() == "waiting"
verifyEntity(supervisor, "binary_sensor.backups_stale",
"off", STALE_ATTRIBUTES)
verifyEntity(supervisor, "sensor.backup_state", "waiting", {
'friendly_name': 'Backup State',
'last_backup': 'Never',
'next_backup': time.now().isoformat(),
'last_uploaded': 'Never',
'backups': [],
'backups_in_google_drive': 0,
'free_space_in_google_drive': "",
'backups_in_home_assistant': 0,
'size_in_google_drive': "0.0 B",
'size_in_home_assistant': '0.0 B'
})
assert supervisor.getNotification() is None
global_info.success()
assert not updater._stale()
assert updater._state() == "backed_up"
@pytest.mark.asyncio
async def test_init_failure(updater: HaUpdater, global_info: GlobalInfo, time: FakeTime, server, supervisor: SimulatedSupervisor):
await updater.update()
assert not updater._stale()
assert updater._state() == "waiting"
global_info.failed(Exception())
assert not updater._stale()
assert updater._state() == "backed_up"
assert supervisor.getNotification() is None
time.advanceDay()
assert updater._stale()
assert updater._state() == "error"
await updater.update()
assert supervisor.getNotification() == {
'message': 'The add-on is having trouble making backups and needs attention. Please visit the add-on status page for details.',
'title': 'Home Assistant Google Drive Backup is Having Trouble',
'notification_id': 'backup_broken'
}
@pytest.mark.asyncio
async def test_failure_backoff_502(updater: HaUpdater, server, time: FakeTime, interceptor: RequestInterceptor):
interceptor.setError(URL_MATCH_CORE_API, 502)
for x in range(9):
await updater.update()
assert time.sleeps == [60, 120, 240, 300, 300, 300, 300, 300, 300]
interceptor.clear()
await updater.update()
assert time.sleeps == [60, 120, 240, 300, 300, 300, 300, 300, 300]
@pytest.mark.asyncio
async def test_failure_backoff_510(updater: HaUpdater, server, time: FakeTime, interceptor: RequestInterceptor):
interceptor.setError(URL_MATCH_CORE_API, 502)
for x in range(9):
await updater.update()
assert time.sleeps == [60, 120, 240, 300, 300, 300, 300, 300, 300]
interceptor.clear()
await updater.update()
assert time.sleeps == [60, 120, 240, 300, 300, 300, 300, 300, 300]
@pytest.mark.asyncio
async def test_failure_backoff_other(updater: HaUpdater, server, time: FakeTime, interceptor: RequestInterceptor):
interceptor.setError(URL_MATCH_CORE_API, 400)
for x in range(9):
await updater.update()
assert time.sleeps == [60, 120, 240, 300, 300, 300, 300, 300, 300]
interceptor.clear()
await updater.update()
assert time.sleeps == [60, 120, 240, 300, 300, 300, 300, 300, 300]
@pytest.mark.asyncio
async def test_update_backups(updater: HaUpdater, server, time: FakeTime, supervisor: SimulatedSupervisor):
await updater.update()
assert not updater._stale()
assert updater._state() == "waiting"
verifyEntity(supervisor, "binary_sensor.backups_stale",
"off", STALE_ATTRIBUTES)
verifyEntity(supervisor, "sensor.backup_state", "waiting", {
'friendly_name': 'Backup State',
'last_backup': 'Never',
'next_backup': time.now().isoformat(),
'last_uploaded': 'Never',
'backups': [],
'backups_in_google_drive': 0,
'backups_in_home_assistant': 0,
'size_in_home_assistant': "0.0 B",
'size_in_google_drive': "0.0 B",
'free_space_in_google_drive': ''
})
@pytest.mark.asyncio
async def test_update_backups_no_next_backup(updater: HaUpdater, server, time: FakeTime, supervisor: SimulatedSupervisor, config: Config):
config.override(Setting.DAYS_BETWEEN_BACKUPS, 0)
await updater.update()
assert not updater._stale()
assert updater._state() == "waiting"
verifyEntity(supervisor, "binary_sensor.backups_stale",
"off", STALE_ATTRIBUTES)
verifyEntity(supervisor, "sensor.backup_state", "waiting", {
'friendly_name': 'Backup State',
'last_backup': 'Never',
'next_backup': None,
'last_uploaded': 'Never',
'backups': [],
'backups_in_google_drive': 0,
'backups_in_home_assistant': 0,
'size_in_home_assistant': "0.0 B",
'size_in_google_drive': "0.0 B",
'free_space_in_google_drive': ''
})
@pytest.mark.asyncio
async def test_update_backups_sync(updater: HaUpdater, server, time: FakeTime, backup: Backup, supervisor: SimulatedSupervisor, config: Config):
await updater.update()
assert not updater._stale()
assert updater._state() == "backed_up"
verifyEntity(supervisor, "binary_sensor.backups_stale",
"off", STALE_ATTRIBUTES)
date = '1985-12-06T05:00:00+00:00'
verifyEntity(supervisor, "sensor.backup_state", "backed_up", {
'friendly_name': 'Backup State',
'last_backup': date,
'last_uploaded': date,
'next_backup': (backup.date() + timedelta(days=config.get(Setting.DAYS_BETWEEN_BACKUPS))).isoformat(),
'backups': [{
'date': date,
'name': backup.name(),
'size': backup.sizeString(),
'state': backup.status(),
'slug': backup.slug()
}
],
'backups_in_google_drive': 1,
'backups_in_home_assistant': 1,
'size_in_home_assistant': Estimator.asSizeString(backup.size()),
'size_in_google_drive': Estimator.asSizeString(backup.size()),
'free_space_in_google_drive': '5.0 GB'
})
@pytest.mark.asyncio
async def test_notification_link(updater: HaUpdater, server, time: FakeTime, global_info, supervisor: SimulatedSupervisor):
await updater.update()
assert not updater._stale()
assert updater._state() == "waiting"
verifyEntity(supervisor, "binary_sensor.backups_stale",
"off", STALE_ATTRIBUTES)
verifyEntity(supervisor, "sensor.backup_state", "waiting", {
'friendly_name': 'Backup State',
'last_backup': 'Never',
'next_backup': time.now().isoformat(),
'last_uploaded': 'Never',
'backups': [],
'backups_in_google_drive': 0,
'backups_in_home_assistant': 0,
'size_in_home_assistant': "0.0 B",
'size_in_google_drive': "0.0 B",
'free_space_in_google_drive': ''
})
assert supervisor.getNotification() is None
global_info.failed(Exception())
global_info.url = "http://localhost/test"
time.advanceDay()
await updater.update()
assert supervisor.getNotification() == {
'message': 'The add-on is having trouble making backups and needs attention. Please visit the add-on [status page](http://localhost/test) for details.',
'title': 'Home Assistant Google Drive Backup is Having Trouble',
'notification_id': 'backup_broken'
}
@pytest.mark.asyncio
async def test_notification_clears(updater: HaUpdater, server, time: FakeTime, global_info, supervisor: SimulatedSupervisor):
await updater.update()
assert not updater._stale()
assert updater._state() == "waiting"
assert supervisor.getNotification() is None
global_info.failed(Exception())
time.advance(hours=8)
await updater.update()
assert supervisor.getNotification() is not None
global_info.success()
await updater.update()
assert supervisor.getNotification() is None
@pytest.mark.asyncio
async def test_publish_for_failure(updater: HaUpdater, server, time: FakeTime, global_info: GlobalInfo, supervisor: SimulatedSupervisor):
global_info.success()
await updater.update()
assert supervisor.getNotification() is None
time.advance(hours=8)
global_info.failed(Exception())
await updater.update()
assert supervisor.getNotification() is not None
time.advance(hours=8)
global_info.failed(Exception())
await updater.update()
assert supervisor.getNotification() is not None
global_info.success()
await updater.update()
assert supervisor.getNotification() is None
@pytest.mark.asyncio
async def test_failure_logging(updater: HaUpdater, server, time: FakeTime, interceptor: RequestInterceptor):
interceptor.setError(URL_MATCH_CORE_API, 501)
assert getLast() is None
await updater.update()
assert getLast() is None
time.advance(minutes=1)
await updater.update()
assert getLast() is None
time.advance(minutes=5)
await updater.update()
assert getLast().msg == REASSURING_MESSAGE.format(501)
last_log = getLast()
time.advance(minutes=5)
await updater.update()
assert getLast() is not last_log
assert getLast().msg == REASSURING_MESSAGE.format(501)
last_log = getLast()
interceptor.clear()
await updater.update()
assert getLast() is last_log
@pytest.mark.asyncio
async def test_publish_retries(updater: HaUpdater, server: SimulationServer, time: FakeTime, backup, drive, supervisor: SimulatedSupervisor):
await updater.update()
assert supervisor.getEntity("sensor.backup_state") is not None
# Shoudlnt update after 59 minutes
supervisor.clearEntities()
time.advance(minutes=59)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") is None
# after that it should
supervisor.clearEntities()
time.advance(minutes=2)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") is not None
supervisor.clearEntities()
await drive.delete(backup)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") is not None
@pytest.mark.asyncio
async def test_ignored_backups(updater: HaUpdater, time: FakeTime, server: SimulationServer, backup: Backup, supervisor: SimulatedSupervisor, coord: Coordinator, config: Config):
config.override(Setting.IGNORE_OTHER_BACKUPS, True)
time.advance(hours=1)
await supervisor.createBackup({'name': "test_backup"}, date=time.now())
await coord.sync()
await updater.update()
state = supervisor.getAttributes("sensor.backup_state")
assert state["backups_in_google_drive"] == 1
assert state["backups_in_home_assistant"] == 1
assert len(state["backups"]) == 1
assert state['last_backup'] == backup.date().isoformat()
@pytest.mark.asyncio
async def test_update_backups_old_names(updater: HaUpdater, server, backup: Backup, time: FakeTime, supervisor: SimulatedSupervisor, config: Config):
config.override(Setting.CALL_BACKUP_SNAPSHOT, True)
await updater.update()
assert not updater._stale()
assert updater._state() == "backed_up"
verifyEntity(supervisor, "binary_sensor.snapshots_stale",
"off", {"friendly_name": "Snapshots Stale",
"device_class": "problem"})
date = '1985-12-06T05:00:00+00:00'
verifyEntity(supervisor, "sensor.snapshot_backup", "backed_up", {
'friendly_name': 'Snapshot State',
'last_snapshot': date,
'snapshots': [{
'date': date,
'name': backup.name(),
'size': backup.sizeString(),
'state': backup.status(),
'slug': backup.slug()
}
],
'snapshots_in_google_drive': 1,
'snapshots_in_home_assistant': 1,
'snapshots_in_hassio': 1,
'size_in_home_assistant': Estimator.asSizeString(backup.size()),
'size_in_google_drive': Estimator.asSizeString(backup.size())
})
@pytest.mark.asyncio
async def test_drive_free_space(updater: HaUpdater, time: FakeTime, server: SimulationServer, supervisor: SimulatedSupervisor, coord: Coordinator, config: Config):
await updater.update()
state = supervisor.getAttributes("sensor.backup_state")
assert state["free_space_in_google_drive"] == ""
await coord.sync()
await updater.update()
state = supervisor.getAttributes("sensor.backup_state")
assert state["free_space_in_google_drive"] == "5.0 GB"
@pytest.mark.asyncio
async def test_stale_backup_is_error(updater: HaUpdater, server, backup: Backup, time: FakeTime, supervisor: SimulatedSupervisor, config: Config):
config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "backed_up"
time.advance(days=1)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "backed_up"
time.advance(days=1)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "error"
time.advance(days=1)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "error"
@pytest.mark.asyncio
async def test_stale_backup_ignores_pending(updater: HaUpdater, server, backup: Backup, time: FakeTime, supervisor: SimulatedSupervisor, config: Config, coord: Coordinator):
config.override(Setting.DAYS_BETWEEN_BACKUPS, 1)
config.override(Setting.NEW_BACKUP_TIMEOUT_SECONDS, 1)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "backed_up"
time.advance(days=2)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "error"
async with supervisor._backup_inner_lock:
await coord.sync()
assert coord.getBackup("pending") is not None
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "error"
@pytest.mark.asyncio
async def test_stale_backups_fine_for_no_creation(updater: HaUpdater, server, backup: Backup, time: FakeTime, supervisor: SimulatedSupervisor, config: Config, coord: Coordinator):
config.override(Setting.DAYS_BETWEEN_BACKUPS, 0)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "backed_up"
# backups shouldn't become stale because the addon doesn't create them.
time.advance(days=100)
await updater.update()
assert supervisor.getEntity("sensor.backup_state") == "backed_up"
def verifyEntity(backend: SimulatedSupervisor, name, state, attributes):
assert backend.getEntity(name) == state
assert backend.getAttributes(name) == attributes

View File

@@ -0,0 +1,63 @@
from backup.file import JsonFileSaver
from os.path import exists, join
from os import remove
import pytest
import json
TEST_DATA = {
'info': "and the value",
'some': 3
}
def readfile(path):
with open(path) as f:
return json.load(f)
@pytest.mark.asyncio
async def test_basic(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
backup_path = join(tmpdir, "test.json.backup")
assert not JsonFileSaver.exists(path)
JsonFileSaver.write(path, TEST_DATA)
assert JsonFileSaver.exists(path)
assert readfile(path) == TEST_DATA
assert readfile(backup_path) == TEST_DATA
assert JsonFileSaver.read(path) == TEST_DATA
JsonFileSaver.delete(path)
assert not exists(path)
assert not exists(backup_path)
assert not JsonFileSaver.exists(path)
@pytest.mark.asyncio
async def test_file_deleted(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
JsonFileSaver.write(path, TEST_DATA)
remove(path)
assert JsonFileSaver.read(path) == TEST_DATA
@pytest.mark.asyncio
async def test_backup_deleted(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
backup_path = join(tmpdir, "test.json.backup")
JsonFileSaver.write(path, TEST_DATA)
remove(backup_path)
assert JsonFileSaver.read(path) == TEST_DATA
@pytest.mark.asyncio
async def test_decode_error(tmpdir: str) -> None:
path = join(tmpdir, "test.json")
JsonFileSaver.write(path, TEST_DATA)
with open(path, "w"):
# emptys the file contents
pass
with open(path) as f:
assert len(f.read()) == 0
assert JsonFileSaver.read(path) == TEST_DATA

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,26 @@
from backup.util import RangeLookup
def test_lookup():
data = [1, 3, 5]
lookup = RangeLookup(data, lambda x: x)
assert list(lookup.matches(-1, 0)) == []
assert list(lookup.matches(6, 7)) == []
assert list(lookup.matches(2, 2)) == []
assert list(lookup.matches(4, 4)) == []
assert list(lookup.matches(6, 6)) == []
assert list(lookup.matches(0, 6)) == [1, 3, 5]
assert list(lookup.matches(1, 5)) == [1, 3, 5]
assert list(lookup.matches(1, 3)) == [1, 3]
assert list(lookup.matches(0, 4)) == [1, 3]
assert list(lookup.matches(3, 5)) == [3, 5]
assert list(lookup.matches(2, 6)) == [3, 5]
assert list(lookup.matches(0, 2)) == [1]
assert list(lookup.matches(1, 1)) == [1]
assert list(lookup.matches(3, 3)) == [3]
assert list(lookup.matches(2, 4)) == [3]
assert list(lookup.matches(5, 5)) == [5]
assert list(lookup.matches(4, 5)) == [5]

View File

@@ -0,0 +1,46 @@
import pytest
import socket
from backup.config import Config, Setting
from backup.util import Resolver
@pytest.mark.asyncio
async def test_empty_name_server(resolver: Resolver, config: Config):
assert resolver._alt_dns.nameservers == ["8.8.8.8", "8.8.4.4"]
assert resolver._resolver is resolver._original_dns
config.override(Setting.ALTERNATE_DNS_SERVERS, "")
resolver.updateConfig()
assert resolver._resolver is resolver._alt_dns
# make sure the value is cached
prev = resolver._alt_dns
resolver.updateConfig()
assert resolver._alt_dns is prev
@pytest.mark.asyncio
async def test_toggle(resolver: Resolver):
assert resolver._resolver is resolver._original_dns
resolver.toggle()
assert resolver._resolver is resolver._alt_dns
resolver.toggle()
assert resolver._resolver is resolver._original_dns
@pytest.mark.asyncio
async def test_hard_resolve(resolver: Resolver, config: Config):
expected = [{
'family': 0,
'flags': socket.AddressInfo.AI_NUMERICHOST,
'port': 1234,
'proto': 0,
'host': "1.2.3.4",
'hostname': "www.googleapis.com"
}]
config.override(Setting.DRIVE_IPV4, "1.2.3.4")
assert await resolver.resolve("www.googleapis.com", 1234, 0) == expected
resolver.toggle()
assert await resolver.resolve("www.googleapis.com", 1234, 0) == expected
resolver.toggle()
assert await resolver.resolve("www.googleapis.com", 1234, 0) == expected

View File

@@ -0,0 +1,442 @@
from datetime import datetime, timedelta
import pytest
from dateutil.tz import tzutc
from pytest import fail
from backup.model import GenConfig, GenerationalScheme, DummyBackup, Backup
from backup.time import Time
def test_timezone(time) -> None:
assert time.local_tz is not None
def test_trivial(time) -> None:
config = GenConfig(days=1)
scheme = GenerationalScheme(time, config, count=0)
backups = [
makeBackup("single", time.local(1928, 12, 6))
]
assert scheme.getOldest(backups)[1].date() == time.local(1928, 12, 6)
def test_trivial_empty(time):
config = GenConfig(days=1)
scheme = GenerationalScheme(time, config, count=0)
assert scheme.getOldest([])[1] is None
def test_trivial_oldest(time: Time) -> None:
config = GenConfig(days=1)
scheme = GenerationalScheme(time, config, count=0)
backups = [
makeBackup("test", time.local(1985, 12, 6, 10)),
makeBackup("test", time.local(1985, 12, 6, 12)),
makeBackup("test", time.local(1985, 12, 6, 13))
]
assertRemovalOrder(scheme, backups, [
time.local(1985, 12, 6, 10),
time.local(1985, 12, 6, 12),
time.local(1985, 12, 6, 13)
])
def test_duplicate_weeks(time):
config = GenConfig(weeks=1, day_of_week='wed')
scheme = GenerationalScheme(time, config, count=0)
backups = [
makeBackup("test", time.local(1985, 12, 5)),
makeBackup("test", time.local(1985, 12, 4)),
makeBackup("test", time.local(1985, 12, 1)),
makeBackup("test", time.local(1985, 12, 2))
]
assertRemovalOrder(scheme, backups, [
time.local(1985, 12, 1),
time.local(1985, 12, 2),
time.local(1985, 12, 5),
time.local(1985, 12, 4)
])
def test_duplicate_months(time) -> None:
config = GenConfig(months=2, day_of_month=15)
scheme = GenerationalScheme(time, config, count=0)
backups = [
makeBackup("test", time.local(1985, 12, 6)),
makeBackup("test", time.local(1985, 12, 15)),
makeBackup("test", time.local(1985, 11, 20)),
makeBackup("test", time.local(1985, 11, 15))
]
assertRemovalOrder(scheme, backups, [
time.local(1985, 11, 20),
time.local(1985, 12, 6),
time.local(1985, 11, 15),
time.local(1985, 12, 15)
])
def test_duplicate_years(time):
config = GenConfig(years=2, day_of_year=1)
scheme = GenerationalScheme(time, config, count=0)
backups = [
makeBackup("test", time.local(1985, 12, 31)),
makeBackup("test", time.local(1985, 1, 1)),
makeBackup("test", time.local(1984, 12, 31)),
makeBackup("test", time.local(1984, 1, 1))
]
assertRemovalOrder(scheme, backups, [
time.local(1984, 12, 31),
time.local(1985, 12, 31),
time.local(1984, 1, 1),
time.local(1985, 1, 1)
])
def test_removal_order(time) -> None:
config = GenConfig(days=5, weeks=2, months=2, years=2,
day_of_week='mon', day_of_month=15, day_of_year=1)
scheme = GenerationalScheme(time, config, count=0)
backups = [
# 5 days, week 1
makeBackup("test", time.local(1985, 12, 7)), # day 1
makeBackup("test", time.local(1985, 12, 6)), # day 2
makeBackup("test", time.local(1985, 12, 5)), # day 3
makeBackup("test", time.local(1985, 12, 4)), # day 4
makeBackup("test", time.local(1985, 12, 3)), # day 5
makeBackup("test", time.local(1985, 12, 1)), # 1st week pref
# week 2
makeBackup("test", time.local(1985, 11, 25)), # 1st month pref
# month2
makeBackup("test", time.local(1985, 11, 15)), # 2nd month pref
# year 1
makeBackup("test", time.local(1985, 1, 1)), # 1st year preference
makeBackup("test", time.local(1985, 1, 2)),
# year 2
makeBackup("test", time.local(1984, 6, 1)), # 2nd year pref
makeBackup("test", time.local(1984, 7, 1)),
# year 3
makeBackup("test", time.local(1983, 1, 1)),
]
assertRemovalOrder(scheme, backups, [
time.local(1983, 1, 1),
time.local(1984, 7, 1),
time.local(1985, 1, 2),
time.local(1984, 6, 1),
time.local(1985, 1, 1),
time.local(1985, 11, 15),
time.local(1985, 11, 25),
time.local(1985, 12, 1),
time.local(1985, 12, 3),
time.local(1985, 12, 4),
time.local(1985, 12, 5),
time.local(1985, 12, 6),
time.local(1985, 12, 7)
])
@pytest.mark.timeout(60)
def test_simulate_daily_backup_for_4_years(time):
config = GenConfig(days=4, weeks=4, months=4, years=4,
day_of_week='mon', day_of_month=1, day_of_year=1)
scheme = GenerationalScheme(time, config, count=16)
backups = simulate(time.local(2019, 1, 1),
time.local(2022, 12, 31),
scheme)
assertRemovalOrder(GenerationalScheme(time, config, count=0), backups, [
# 4 years
time.local(2019, 1, 1),
time.local(2020, 1, 1),
time.local(2021, 1, 1),
time.local(2022, 1, 1),
# 4 months
time.local(2022, 9, 1),
time.local(2022, 10, 1),
time.local(2022, 11, 1),
time.local(2022, 12, 1),
# 4 weeks
time.local(2022, 12, 5),
time.local(2022, 12, 12),
time.local(2022, 12, 19),
time.local(2022, 12, 26),
# 4 days
time.local(2022, 12, 28),
time.local(2022, 12, 29),
time.local(2022, 12, 30),
time.local(2022, 12, 31)
])
@pytest.mark.timeout(60)
def test_simulate_agressive_daily_backup_for_4_years(time):
config = GenConfig(days=4, weeks=4, months=4, years=4,
day_of_week='mon', day_of_month=1, day_of_year=1, aggressive=True)
scheme = GenerationalScheme(time, config, count=16)
backups = simulate(time.local(2019, 1, 1),
time.local(2022, 12, 31),
scheme)
assertRemovalOrder(GenerationalScheme(time, config, count=0), backups, [
# 4 years
time.local(2019, 1, 1),
time.local(2020, 1, 1),
time.local(2021, 1, 1),
time.local(2022, 1, 1),
# 4 months
time.local(2022, 9, 1),
time.local(2022, 10, 1),
time.local(2022, 11, 1),
time.local(2022, 12, 1),
# 4 weeks
time.local(2022, 12, 5),
time.local(2022, 12, 12),
time.local(2022, 12, 19),
time.local(2022, 12, 26),
# 4 days
time.local(2022, 12, 28),
time.local(2022, 12, 29),
time.local(2022, 12, 30),
time.local(2022, 12, 31),
])
def test_count_limit(time):
config = GenConfig(years=2, day_of_year=1)
scheme = GenerationalScheme(time, config, count=1)
backups = [
makeBackup("test", time.local(1985, 1, 1)),
makeBackup("test", time.local(1984, 1, 1))
]
assertRemovalOrder(scheme, backups, [
time.local(1984, 1, 1)
])
def test_aggressive_removal_below_limit(time):
config = GenConfig(years=2, day_of_year=1, aggressive=True)
scheme = GenerationalScheme(time, config, count=5)
backups = [
makeBackup("test", time.local(1985, 1, 1)),
makeBackup("test", time.local(1985, 1, 2))
]
assertRemovalOrder(scheme, backups, [
time.local(1985, 1, 2)
])
def test_aggressive_removal_at_limit_ok(time):
config = GenConfig(years=2, day_of_year=1, aggressive=True)
scheme = GenerationalScheme(time, config, count=2)
backups = [
makeBackup("test", time.local(1985, 1, 1)),
makeBackup("test", time.local(1984, 1, 1))
]
assertRemovalOrder(scheme, backups, [])
def test_aggressive_removal_over_limit(time):
config = GenConfig(years=2, day_of_year=1, aggressive=True)
scheme = GenerationalScheme(time, config, count=2)
backups = [
makeBackup("test", time.local(1985, 1, 1)),
makeBackup("test", time.local(1984, 1, 1)),
makeBackup("test", time.local(1983, 1, 1)),
makeBackup("test", time.local(1983, 1, 2))
]
assertRemovalOrder(scheme, backups, [
time.local(1983, 1, 1),
time.local(1983, 1, 2)
])
def test_removal_order_week(time: Time):
config = GenConfig(weeks=1, day_of_week='wed', aggressive=True)
scheme = GenerationalScheme(time, config, count=1)
backups = [
makeBackup("test", time.local(2019, 10, 28)),
makeBackup("test", time.local(2019, 10, 29)),
makeBackup("test", time.local(2019, 10, 30, 1)),
makeBackup("test", time.local(2019, 10, 30, 2)),
makeBackup("test", time.local(2019, 10, 31)),
makeBackup("test", time.local(2019, 11, 1)),
makeBackup("test", time.local(2019, 11, 2)),
makeBackup("test", time.local(2019, 11, 3)),
]
assertRemovalOrder(scheme, backups, [
time.local(2019, 10, 28),
time.local(2019, 10, 29),
time.local(2019, 10, 30, 1),
time.local(2019, 10, 31),
time.local(2019, 11, 1),
time.local(2019, 11, 2),
time.local(2019, 11, 3)
])
def test_removal_order_month(time):
config = GenConfig(months=1, day_of_month=20, aggressive=True)
scheme = GenerationalScheme(time, config, count=1)
backups = [
makeBackup("test", time.local(2019, 1, 1)),
makeBackup("test", time.local(2019, 1, 2)),
makeBackup("test", time.local(2019, 1, 20, 1)),
makeBackup("test", time.local(2019, 1, 20, 2)),
makeBackup("test", time.local(2019, 1, 21)),
makeBackup("test", time.local(2019, 1, 25)),
makeBackup("test", time.local(2019, 1, 26)),
makeBackup("test", time.local(2019, 1, 27)),
]
assertRemovalOrder(scheme, backups, [
time.local(2019, 1, 1),
time.local(2019, 1, 2),
time.local(2019, 1, 20, 1),
time.local(2019, 1, 21),
time.local(2019, 1, 25),
time.local(2019, 1, 26),
time.local(2019, 1, 27)
])
def test_removal_order_many_months(time):
config = GenConfig(months=70, day_of_month=20, aggressive=True)
scheme = GenerationalScheme(time, config, count=10)
backups = [
makeBackup("test", time.local(2019, 7, 20)), # preferred
makeBackup("test", time.local(2018, 7, 18)), # preferred
makeBackup("test", time.local(2018, 7, 21)),
makeBackup("test", time.local(2017, 1, 19)),
makeBackup("test", time.local(2017, 1, 20)), # preferred
makeBackup("test", time.local(2017, 1, 31)),
makeBackup("test", time.local(2016, 12, 1)), # preferred
makeBackup("test", time.local(2014, 1, 31)),
makeBackup("test", time.local(2014, 1, 1)), # preferred
]
assertRemovalOrder(scheme, backups, [
time.local(2014, 1, 31),
time.local(2017, 1, 19),
time.local(2017, 1, 31),
time.local(2018, 7, 21),
])
def test_removal_order_years(time):
config = GenConfig(years=2, day_of_year=15, aggressive=True)
scheme = GenerationalScheme(time, config, count=10)
backups = [
makeBackup("test", time.local(2019, 2, 15)),
makeBackup("test", time.local(2019, 1, 15)), # keep
makeBackup("test", time.local(2018, 1, 14)),
makeBackup("test", time.local(2018, 1, 15)), # keep
makeBackup("test", time.local(2018, 1, 16)),
makeBackup("test", time.local(2017, 1, 15)),
]
assertRemovalOrder(scheme, backups, [
time.local(2017, 1, 15),
time.local(2018, 1, 14),
time.local(2018, 1, 16),
time.local(2019, 2, 15),
])
@pytest.mark.asyncio
async def test_ignored_generational_labels(time):
config = GenConfig(days=2)
scheme = GenerationalScheme(time, config, count=10)
backup1 = makeBackup("test", time.local(2019, 2, 15))
backup2 = makeBackup("test", time.local(2019, 2, 14))
backup3 = makeBackup("test", time.local(2019, 2, 13), ignore=True)
backups = [backup1, backup2, backup3]
scheme.handleNaming(backups)
assert backup1.getStatusDetail() == ['Day 1 of 2']
assert backup2.getStatusDetail() == ['Day 2 of 2']
assert backup3.getStatusDetail() is None
def getRemovalOrder(scheme, toCheck):
backups = list(toCheck)
removed = []
while True:
oldest = scheme.getOldest(backups)
if not oldest:
break
removed.append(oldest.date())
backups.remove(oldest)
return removed
def assertRemovalOrder(scheme, toCheck, expected):
backups = list(toCheck)
removed = []
index = 0
time = scheme.time
while True:
reason, oldest = scheme.getOldest(backups)
if index >= len(expected):
if oldest is not None:
fail("at index {0}, expected 'None' but got {1}".format(
index, time.toLocal(oldest.date())))
break
if oldest.date() != expected[index]:
fail("at index {0}, expected {1} but got {2}".format(
index, time.toLocal(expected[index]), time.toLocal(oldest.date())))
removed.append(oldest.date())
backups.remove(oldest)
index += 1
return removed
def makeBackup(slug, date, name=None, ignore=False) -> Backup:
if not name:
name = slug
return DummyBackup(name, date.astimezone(tzutc()), "src", slug, ignore=ignore)
def simulate(start: datetime, end: datetime, scheme: GenerationalScheme, backups=[]):
today = start
while today <= end:
backups.append(makeBackup("test", today))
test = scheme.getOldest(backups)
if test is None:
pass
reason, oldest = test
while oldest is not None:
backups.remove(oldest)
test = scheme.getOldest(backups)
if test is None:
pass
reason, oldest = test
today = today + timedelta(hours=27)
today = scheme.time.local(today.year, today.month, today.day)
return backups

View File

@@ -0,0 +1,59 @@
import pytest
from yarl import URL
from dev.simulationserver import SimulationServer
from aiohttp import ClientSession, hdrs
from backup.config import Config
from .faketime import FakeTime
import json
@pytest.mark.asyncio
async def test_refresh_known_error(server: SimulationServer, session: ClientSession, config: Config, server_url: URL):
async with session.post(server_url.with_path("drive/refresh"), json={"blah": "blah"}) as r:
assert r.status == 503
assert await r.json() == {
'error': "Required key 'refresh_token' was missing from the request payload"
}
@pytest.mark.asyncio
async def test_refresh_unknown_error(server: SimulationServer, session: ClientSession, config: Config, server_url: URL):
async with session.post(server_url.with_path("drive/refresh"), data={}) as r:
assert r.status == 500
assert len((await r.json())["error"]) > 0
@pytest.mark.asyncio
async def test_old_auth_method(server: SimulationServer, session: ClientSession, server_url: URL):
start_auth = server_url.with_path("drive/authorize").with_query({
"redirectbacktoken": "http://example.com"
})
# Verify the redirect to Drive's oauthv2 endpoint
async with session.get(start_auth, data={}, allow_redirects=False) as r:
assert r.status == 303
redirect = URL(r.headers[hdrs.LOCATION])
assert redirect.path == "/o/oauth2/v2/auth"
assert redirect.host == "localhost"
# Verify the redirect back to the server's oauth page
async with session.get(redirect, data={}, allow_redirects=False) as r:
assert r.status == 303
redirect = URL(r.headers[hdrs.LOCATION])
assert redirect.path == "/drive/authorize"
assert redirect.host == "localhost"
# Verify we gte redirected back to the addon (example.com) with creds
async with session.get(redirect, data={}, allow_redirects=False) as r:
assert r.status == 303
redirect = URL(r.headers[hdrs.LOCATION])
assert redirect.query.get("creds") is not None
assert redirect.host == "example.com"
async def test_log_to_firestore(time: FakeTime, server: SimulationServer, session: ClientSession, server_url: URL):
data = {"info": "testing"}
async with session.post(server_url.with_path("logerror"), data=json.dumps(data)) as r:
assert r.status == 200
assert server._authserver.error_store.last_error is not None
assert server._authserver.error_store.last_error['report'] == data

View File

@@ -0,0 +1,37 @@
from backup.config import Setting, addon_config, _CONFIG
def test_defaults():
# all settings should have a default
for setting in Setting:
if setting is not Setting.DEBUGGER_PORT:
assert setting.default() is not None, setting.value + " has no default"
def test_validators():
# all defaults shoudl have a validator
for setting in Setting:
assert setting.validator() is not None, setting.value + " has no validator"
def test_defaults_are_valid():
# all defaults values should be valid and validate to their own value
for setting in Setting:
assert setting.validator().validate(setting.default()) == setting.default()
def test_setting_configuration():
# All settings in the default config should have the exact same parse expression
for setting in Setting:
if setting.value in addon_config["schema"]:
if setting != Setting.GENERATIONAL_DAY_OF_WEEK:
assert _CONFIG[setting] == addon_config["schema"][setting.value], setting.value
def test_settings_present():
all = set()
for setting in Setting:
all.add(setting.value)
for setting in addon_config["schema"]:
assert setting in all, setting + " not present in config.json"

View File

@@ -0,0 +1,22 @@
import pytest
import os
from backup.module import MainModule, BaseModule
from backup.starter import Starter
from backup.config import Config, Setting
from injector import Injector
@pytest.mark.asyncio
async def test_bootstrap_requirements(cleandir):
# This just verifies we're able to satisfy starter's injector requirements.
injector = Injector([BaseModule(), MainModule()])
config = injector.get(Config)
config.override(Setting.DATA_CACHE_FILE_PATH, os.path.join(cleandir, "data_cache.json"))
injector.get(Starter)
@pytest.mark.asyncio
async def test_start_and_stop(injector):
starter = injector.get(Starter)
await starter.start()
await starter.stop()

View File

@@ -0,0 +1,64 @@
import datetime
import os
from backup.time import Time, _infer_timezone_from_env, _infer_timezone_from_name, _infer_timezone_from_offset, _infer_timezone_from_system
from .faketime import FakeTime
def test_parse() -> None:
time = Time.parse("1985-12-06 01:01:01.0001")
assert str(time) == "1985-12-06 01:01:01.000100+00:00"
time = Time.parse("1985-12-06 01:01:01.0001+01:00")
assert str(time) == "1985-12-06 01:01:01.000100+01:00"
def test_parse_timezone(time) -> None:
assertUtc(Time.parse("1985-12-06"))
assertUtc(Time.parse("1985-12-06 21:21"))
assertUtc(Time.parse("1985-12-06 21:21+00:00"))
assertUtc(Time.parse("1985-12-06 21:21 UTC"))
assertUtc(Time.parse("1985-12-06 21:21 GGGR"))
assertOffset(Time.parse("1985-12-06 21:21+10"), 10)
assertOffset(Time.parse("1985-12-06 21:21-10"), -10)
def assertOffset(time, hours):
assert time.tzinfo.utcoffset(time) == datetime.timedelta(hours=hours)
def assertUtc(time):
assertOffset(time, 0)
def test_common_timezones(time: FakeTime):
assert _infer_timezone_from_system() is not None
assert _infer_timezone_from_name() is not None
assert _infer_timezone_from_offset() is not None
assert _infer_timezone_from_env() is None
os.environ["TZ"] = "America/Denver"
assert _infer_timezone_from_env().tzname(None) == "America/Denver"
os.environ["TZ"] = "Australia/Brisbane"
assert _infer_timezone_from_env().tzname(None) == "Australia/Brisbane"
tzs = {"SYSTEM": _infer_timezone_from_system(),
"ENV": _infer_timezone_from_env(),
"OFFSET": _infer_timezone_from_offset(),
"NAME": _infer_timezone_from_name()}
for name, tz in tzs.items():
print(name)
time.setTimeZone(tz)
time.now()
time.nowLocal()
time.localize(datetime.datetime(1985, 12, 6))
time.local(1985, 12, 6)
time.toLocal(time.now())
time.toUtc(time.nowLocal())
def test_system_timezone(time: FakeTime):
tz = _infer_timezone_from_system()
assert tz.tzname(time.now()) == "UTC"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,47 @@
from backup.config import Version
def test_default():
assert Version.default() == Version.default()
assert not Version.default() > Version.default()
assert not Version.default() < Version.default()
assert not Version.default() != Version.default()
assert Version.default() >= Version.default()
assert Version.default() <= Version.default()
def test_version():
assert Version(1, 2, 3) == Version(1, 2, 3)
assert Version(1, 2, 3) >= Version(1, 2, 3)
assert Version(1, 2, 3) <= Version(1, 2, 3)
assert Version(1, 2, 3) > Version(1, 2)
assert Version(1) < Version(2)
assert Version(2) > Version(1)
assert Version(1) != Version(2)
assert Version(1, 2) > Version(1)
assert Version(1) < Version(1, 2)
def test_parse():
assert Version.parse("1.0") == Version(1, 0)
assert Version.parse("1.2.3") == Version(1, 2, 3)
def test_parse_staging():
assert Version.parse("1.0.staging.1") == Version(1, 0, 1)
assert Version.parse("1.0.staging.1").staging
assert Version.parse("1.0.staging.1") > Version(1.0)
assert Version.parse("1.2.3") == Version(1, 2, 3)
def test_junk_strings():
assert Version.parse("1-.2.3.1") == Version(1, 2, 3, 1)
assert Version.parse("ignore-1.2.3.1") == Version(1, 2, 3, 1)
assert Version.parse("1.2.ignore.this.text.3.and...andhere.too.1") == Version(1, 2, 3, 1)
def test_broken_versions():
assert Version.parse("") == Version.default()
assert Version.parse(".") == Version.default()
assert Version.parse("empty") == Version.default()
assert Version.parse("no.version.here") == Version.default()

View File

@@ -0,0 +1,119 @@
from backup.watcher import Watcher
from backup.config import Config, Setting, CreateOptions
from backup.ha import HaSource
from os.path import join
from .faketime import FakeTime
from asyncio import sleep
import pytest
import os
TEST_FILE_NAME = "test.tar"
@pytest.mark.asyncio
async def test_watcher_trigger_on_backup(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
await watcher.start()
assert not await watcher.check()
watcher.noticed_change_signal.clear()
await simulateBackup(config, TEST_FILE_NAME, ha, time)
await watcher.noticed_change_signal.wait()
time.advance(minutes=11)
assert await watcher.check()
@pytest.mark.asyncio
async def test_disable_watching(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
config.override(Setting.WATCH_BACKUP_DIRECTORY, False)
await watcher.start()
assert not await watcher.check()
await simulateBackup(config, TEST_FILE_NAME, ha, time)
await sleep(1)
time.advance(minutes=11)
assert not await watcher.check()
@pytest.mark.asyncio
async def test_watcher_doesnt_trigger_on_no_backup(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
await watcher.start()
assert not await watcher.check()
file = join(config.get(Setting.BACKUP_DIRECTORY_PATH), TEST_FILE_NAME)
watcher.noticed_change_signal.clear()
with open(file, "w"):
pass
await watcher.noticed_change_signal.wait()
time.advance(minutes=11)
assert not await watcher.check()
@pytest.mark.asyncio
async def test_watcher_below_wait_threshold(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
await watcher.start()
assert not await watcher.check()
for x in range(10):
watcher.noticed_change_signal.clear()
await simulateBackup(config, f"{TEST_FILE_NAME}.{x}", ha, time)
await watcher.noticed_change_signal.wait()
time.advance(seconds=9)
assert not await watcher.check()
time.advance(minutes=11)
assert await watcher.check()
@pytest.mark.asyncio
async def test_watcher_triggers_for_deletes(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
await simulateBackup(config, TEST_FILE_NAME, ha, time)
await watcher.start()
assert not await watcher.check()
watcher.noticed_change_signal.clear()
os.remove(join(config.get(Setting.BACKUP_DIRECTORY_PATH), TEST_FILE_NAME))
await watcher.noticed_change_signal.wait()
time.advance(seconds=30)
assert await watcher.check()
@pytest.mark.asyncio
async def test_moves_out_trigger(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
await simulateBackup(config, TEST_FILE_NAME, ha, time)
await watcher.start()
watcher.noticed_change_signal.clear()
os.mkdir(join(config.get(Setting.BACKUP_DIRECTORY_PATH), "subdir"))
os.rename(join(config.get(Setting.BACKUP_DIRECTORY_PATH), TEST_FILE_NAME), join(config.get(Setting.BACKUP_DIRECTORY_PATH), "subdir", TEST_FILE_NAME))
await watcher.noticed_change_signal.wait()
time.advance(minutes=11)
assert await watcher.check()
# Check if move ins are really necessary
# @pytest.mark.asyncio
# async def test_moves_in_trigger(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
# os.mkdir(join(config.get(Setting.BACKUP_DIRECTORY_PATH), "subdir"))
# await simulateBackup(config, "subdir/" + TEST_FILE_NAME, ha, time)
# await watcher.start()
# watcher.noticed_change_signal.clear()
# os.rename(join(config.get(Setting.BACKUP_DIRECTORY_PATH), "subdir", TEST_FILE_NAME), join(config.get(Setting.BACKUP_DIRECTORY_PATH), TEST_FILE_NAME))
# await watcher.noticed_change_signal.wait()
# time.advance(minutes=11)
# assert await watcher.check()
@pytest.mark.asyncio
async def test_subdirs_dont_trigger(server, watcher: Watcher, config: Config, time: FakeTime, ha: HaSource):
await simulateBackup(config, TEST_FILE_NAME, ha, time)
await watcher.start()
watcher.noticed_change_signal.clear()
os.mkdir(join(config.get(Setting.BACKUP_DIRECTORY_PATH), "subdir"))
with open(join(config.get(Setting.BACKUP_DIRECTORY_PATH), "subdir", "ignored.txt"), "w"):
pass
assert not await watcher.check()
time.advance(minutes=11)
assert not await watcher.check()
async def simulateBackup(config, file_name, ha, time):
file = join(config.get(Setting.BACKUP_DIRECTORY_PATH), file_name)
with open(file, "w"):
pass
await ha.create(CreateOptions(time.now(), file_name))
# Verify that subdirectories get ignored

View File

@@ -0,0 +1,46 @@
import asyncio
import pytest
from backup.worker import StopWorkException, Worker
from .faketime import FakeTime
@pytest.mark.asyncio
async def test_worker(time: FakeTime):
data = {'count': 0}
async def work():
if data['count'] >= 5:
raise StopWorkException()
data['count'] += 1
worker = Worker("test", work, time, 1)
task = await worker.start()
await asyncio.wait([task])
assert not worker.isRunning()
assert data['count'] == 5
assert time.sleeps == [1, 1, 1, 1, 1]
# assert worker._task.name == "test"
assert worker.getLastError() is None
@pytest.mark.asyncio
async def test_worker_error(time: FakeTime):
data = {'count': 0}
async def work():
if data['count'] >= 5:
raise StopWorkException()
data['count'] += 1
raise OSError()
worker = Worker("test", work, time, 1)
task = await worker.start()
await asyncio.wait([task])
assert not worker.isRunning()
assert data['count'] == 5
assert time.sleeps == [1, 1, 1, 1, 1]
# assert worker.getName() == "test"
assert worker.getLastError() is not None
assert type(worker.getLastError()) is OSError

View File

@@ -0,0 +1,51 @@
from backup.util import TokenBucket
from ..faketime import FakeTime
async def test_consume(time: FakeTime):
bucket = TokenBucket(time, 10, 1, 1)
assert bucket.consume(1)
assert not bucket.consume(1)
time.advance(seconds=1)
assert bucket.consume(1)
assert not bucket.consume(1)
async def test_async_consume(time: FakeTime):
bucket = TokenBucket(time, 10, 1, 1)
assert await bucket.consumeWithWait(1, 2) == 1
assert len(time.sleeps) == 0
time.advance(seconds=2)
assert await bucket.consumeWithWait(1, 2) == 2
assert len(time.sleeps) == 0
assert await bucket.consumeWithWait(1, 2) == 1
assert len(time.sleeps) == 1
assert time.sleeps[0] == 1
async def test_capacity(time: FakeTime):
bucket = TokenBucket(time, 10, 1)
assert await bucket.consumeWithWait(1, 10) == 10
assert len(time.sleeps) == 0
assert await bucket.consumeWithWait(5, 10) == 5
assert len(time.sleeps) == 1
assert time.sleeps[0] == 5
time.clearSleeps()
assert await bucket.consumeWithWait(20, 20) == 20
assert len(time.sleeps) == 1
assert time.sleeps[0] == 20
time.clearSleeps()
time.advance(seconds=5)
assert await bucket.consumeWithWait(1, 10) == 5
async def test_higher_fill_rate(time: FakeTime):
bucket = TokenBucket(time, capacity=1000, fill_rate=100)
assert await bucket.consumeWithWait(1, 1000) == 1000
assert len(time.sleeps) == 0