update repository references and improve script handling
This commit is contained in:
0
hassio-google-drive-backup/dev/__init__.py
Normal file
0
hassio-google-drive-backup/dev/__init__.py
Normal file
404
hassio-google-drive-backup/dev/apiingress.py
Normal file
404
hassio-google-drive-backup/dev/apiingress.py
Normal file
@@ -0,0 +1,404 @@
|
||||
from injector import singleton, inject
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
from typing import Any, Dict, Union, Optional
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web, ClientSession
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway,
|
||||
HTTPServiceUnavailable,
|
||||
HTTPUnauthorized,
|
||||
HTTPNotFound
|
||||
)
|
||||
from multidict import CIMultiDict, istr
|
||||
|
||||
from backup.logger import getLogger
|
||||
from .ports import Ports
|
||||
from .base_server import BaseServer
|
||||
from .simulated_supervisor import SimulatedSupervisor
|
||||
|
||||
ATTR_ADMIN = "admin"
|
||||
ATTR_ENABLE = "enable"
|
||||
ATTR_ICON = "icon"
|
||||
ATTR_PANELS = "panels"
|
||||
ATTR_SESSION = "session"
|
||||
ATTR_TITLE = "title"
|
||||
COOKIE_INGRESS = "ingress_session"
|
||||
HEADER_TOKEN = "X-Supervisor-Token"
|
||||
HEADER_TOKEN_OLD = "X-Hassio-Key"
|
||||
REQUEST_FROM = "HASSIO_FROM"
|
||||
JSON_RESULT = "result"
|
||||
JSON_DATA = "data"
|
||||
JSON_MESSAGE = "message"
|
||||
RESULT_ERROR = "error"
|
||||
RESULT_OK = "ok"
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
def api_return_error(message: Optional[str] = None) -> web.Response:
|
||||
"""Return an API error message."""
|
||||
return web.json_response(
|
||||
{JSON_RESULT: RESULT_ERROR, JSON_MESSAGE: message}, status=400
|
||||
)
|
||||
|
||||
|
||||
def api_return_ok(data: Optional[Dict[str, Any]] = None) -> web.Response:
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response({JSON_RESULT: RESULT_OK, JSON_DATA: data or {}})
|
||||
|
||||
|
||||
def api_process(method):
|
||||
"""Wrap function with true/false calls to rest api."""
|
||||
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return API information."""
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except Exception as err:
|
||||
return api_return_error(message=str(err))
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif isinstance(answer, bool) and not answer:
|
||||
return api_return_error()
|
||||
return api_return_ok()
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
class Addon():
|
||||
def __init__(self, ports: Ports, token: str):
|
||||
self.ports = ports
|
||||
self.ip_address = "127.0.0.1"
|
||||
self.ingress_port = ports.ingress
|
||||
self.token = token
|
||||
|
||||
|
||||
class SysIngress():
|
||||
def __init__(self, ports: Ports, token: str, cookie_value: str):
|
||||
self.ports = ports
|
||||
self.token = token
|
||||
self.cookie_value = cookie_value
|
||||
|
||||
def validate_session(self, session):
|
||||
return session == self.cookie_value
|
||||
|
||||
def get(self, token):
|
||||
if token == self.token:
|
||||
return Addon(self.ports, self.token)
|
||||
return None
|
||||
|
||||
|
||||
class CoreSysAttributes():
|
||||
def __init__(self, ports: Ports, session: ClientSession, token: str, cookie_value: str):
|
||||
self.sys_ingress = SysIngress(ports, token, cookie_value)
|
||||
self.sys_websession = session
|
||||
|
||||
|
||||
@singleton
|
||||
class APIIngress(CoreSysAttributes, BaseServer):
|
||||
@inject
|
||||
def __init__(self, ports: Ports, session: ClientSession, supervisor: SimulatedSupervisor):
|
||||
self.addon_token = self.generateId(10)
|
||||
self.cookie_value = self.generateId(10)
|
||||
super().__init__(ports, session, self.addon_token, self.cookie_value)
|
||||
self.ports = ports
|
||||
self.supervisor = supervisor
|
||||
|
||||
def routes(self):
|
||||
return [
|
||||
web.get("/startingress", self.start_ingress),
|
||||
web.get("/hassio/ingress/{slug}", self.ingress_panel),
|
||||
web.view("/api/hassio_ingress/{token}/{path:.*}", self.handler),
|
||||
]
|
||||
|
||||
def start_ingress(self, request: web.Request):
|
||||
resp = web.Response(status=303)
|
||||
resp.headers[hdrs.LOCATION] = "/hassio/ingress/" + self.supervisor._addon_slug
|
||||
resp.set_cookie(name=COOKIE_INGRESS, value=self.cookie_value, expires="Session", domain=request.url.host, path="/api/hassio_ingress/", httponly="false", secure="false")
|
||||
return resp
|
||||
|
||||
def ingress_panel(self, request: web.Request):
|
||||
slug = request.match_info.get("slug")
|
||||
if slug != self.supervisor._addon_slug:
|
||||
raise HTTPNotFound()
|
||||
body = """
|
||||
<html>
|
||||
<head>
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type">
|
||||
<meta content="utf-8" http-equiv="encoding">
|
||||
<title>Simulated Supervisor Ingress Panel</title>
|
||||
<style type="text/css" >
|
||||
iframe {{
|
||||
display: block;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border: 0;
|
||||
}}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div>
|
||||
The Web-UI below is loaded through an iframe. <a href='startingress'>Start a new ingress session</a> if you get permission errors.
|
||||
</div>
|
||||
<iframe src="api/hassio_ingress/{0}/">
|
||||
<html>
|
||||
<head></head>
|
||||
<body></body>
|
||||
</html>
|
||||
</iframe>
|
||||
</body>
|
||||
</html>
|
||||
""".format(self.addon_token)
|
||||
resp = web.Response(body=body, content_type="text/html")
|
||||
resp.set_cookie(name=COOKIE_INGRESS, value=self.cookie_value, expires="Session", domain=request.url.host, path="/api/hassio_ingress/", httponly="false", secure="false")
|
||||
return resp
|
||||
|
||||
"""
|
||||
The class body below here is copied from
|
||||
https://github.com/home-assistant/supervisor/blob/38b0aea8e2a3b9a9614bb5d94959235a0fae235e/supervisor/api/ingress.py#L35
|
||||
In order to correctly reproduce the supervisor's kooky ingress proxy behavior.
|
||||
"""
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
if not addon:
|
||||
_LOGGER.warning("Ingress for %s not available", token)
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
return addon
|
||||
|
||||
def _check_ha_access(self, request: web.Request) -> None:
|
||||
# always allow
|
||||
pass
|
||||
|
||||
def _create_url(self, addon: Addon, path: str) -> str:
|
||||
"""Create URL to container."""
|
||||
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
|
||||
|
||||
@api_process
|
||||
async def panels(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a list of panel data."""
|
||||
addons = {}
|
||||
for addon in self.sys_ingress.addons:
|
||||
addons[addon.slug] = {
|
||||
ATTR_TITLE: addon.panel_title,
|
||||
ATTR_ICON: addon.panel_icon,
|
||||
ATTR_ADMIN: addon.panel_admin,
|
||||
ATTR_ENABLE: addon.ingress_panel,
|
||||
}
|
||||
|
||||
return {ATTR_PANELS: addons}
|
||||
|
||||
@api_process
|
||||
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
"""Route data to Supervisor ingress service."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
try:
|
||||
# Websocket
|
||||
if _is_websocket(request):
|
||||
return await self._handle_websocket(request, addon, path)
|
||||
|
||||
# Request
|
||||
return await self._handle_request(request, addon, path)
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Ingress error: %s", err)
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def _handle_websocket(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> web.WebSocketResponse:
|
||||
"""Ingress route for websocket."""
|
||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||
req_protocols = [
|
||||
str(proto.strip())
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = ()
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
)
|
||||
await ws_server.prepare(request)
|
||||
|
||||
# Preparing
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
url = f"{url}?{request.query_string}"
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
_websocket_forward(ws_server, ws_client),
|
||||
_websocket_forward(ws_client, ws_server),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
|
||||
return ws_server
|
||||
|
||||
async def _handle_request(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> Union[web.Response, web.StreamResponse]:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
data = await request.read()
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
async with self.sys_websession.request(
|
||||
request.method,
|
||||
url,
|
||||
headers=source_header,
|
||||
params=request.query,
|
||||
allow_redirects=False,
|
||||
data=data,
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
# Simple request
|
||||
if (
|
||||
hdrs.CONTENT_LENGTH in result.headers and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||
):
|
||||
# Return Response
|
||||
body = await result.read()
|
||||
|
||||
return web.Response(
|
||||
headers=headers,
|
||||
status=result.status,
|
||||
content_type=result.content_type,
|
||||
body=body,
|
||||
)
|
||||
|
||||
# Stream response
|
||||
response = web.StreamResponse(status=result.status, headers=headers)
|
||||
response.content_type = result.content_type
|
||||
|
||||
try:
|
||||
await response.prepare(request)
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
await response.write(data)
|
||||
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
aiohttp.ClientPayloadError,
|
||||
ConnectionResetError,
|
||||
) as err:
|
||||
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: str
|
||||
) -> Union[CIMultiDict, Dict[str, str]]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
|
||||
# filter flags
|
||||
for name, value in request.headers.items():
|
||||
if name in (
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
istr(HEADER_TOKEN),
|
||||
istr(HEADER_TOKEN_OLD),
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
# Update X-Forwarded-For
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> Dict[str, str]:
|
||||
"""Create response header."""
|
||||
headers = {}
|
||||
|
||||
for name, value in response.headers.items():
|
||||
if name in (
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.CONTENT_ENCODING
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _is_websocket(request: web.Request) -> bool:
|
||||
"""Return True if request is a websocket."""
|
||||
headers = request.headers
|
||||
|
||||
if (
|
||||
"upgrade" in headers.get(hdrs.CONNECTION, "").lower() and headers.get(hdrs.UPGRADE, "").lower() == "websocket"
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
async def _websocket_forward(ws_from, ws_to):
|
||||
"""Handle websocket message directly."""
|
||||
try:
|
||||
async for msg in ws_from:
|
||||
if msg.type == aiohttp.WSMsgType.TEXT:
|
||||
await ws_to.send_str(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.BINARY:
|
||||
await ws_to.send_bytes(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.PING:
|
||||
await ws_to.ping()
|
||||
elif msg.type == aiohttp.WSMsgType.PONG:
|
||||
await ws_to.pong()
|
||||
elif ws_to.closed:
|
||||
await ws_to.close(code=ws_to.close_code, message=msg.extra)
|
||||
except RuntimeError:
|
||||
_LOGGER.warning("Ingress Websocket runtime error")
|
||||
56
hassio-google-drive-backup/dev/base_server.py
Normal file
56
hassio-google-drive-backup/dev/base_server.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import random
|
||||
import re
|
||||
import io
|
||||
from aiohttp.web import HTTPBadRequest, Request, Response
|
||||
from typing import Any
|
||||
|
||||
rangePattern = re.compile("bytes=\\d+-\\d+")
|
||||
bytesPattern = re.compile("^bytes \\d+-\\d+/\\d+$")
|
||||
intPattern = re.compile("\\d+")
|
||||
|
||||
|
||||
class BaseServer:
|
||||
def generateId(self, length: int = 30) -> str:
|
||||
random_int = random.randint(0, 1000000)
|
||||
ret = str(random_int)
|
||||
return ret + ''.join(map(lambda x: str(x), range(0, length - len(ret))))
|
||||
|
||||
def timeToRfc3339String(self, time) -> str:
|
||||
return time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
def serve_bytes(self, request: Request, bytes: bytearray, include_length: bool = True) -> Any:
|
||||
if "Range" in request.headers:
|
||||
# Do range request
|
||||
if not rangePattern.match(request.headers['Range']):
|
||||
raise HTTPBadRequest()
|
||||
|
||||
numbers = intPattern.findall(request.headers['Range'])
|
||||
start = int(numbers[0])
|
||||
end = int(numbers[1])
|
||||
|
||||
if start < 0:
|
||||
raise HTTPBadRequest()
|
||||
if start > end:
|
||||
raise HTTPBadRequest()
|
||||
if end > len(bytes) - 1:
|
||||
raise HTTPBadRequest()
|
||||
resp = Response(body=bytes[start:end + 1], status=206)
|
||||
resp.headers['Content-Range'] = "bytes {0}-{1}/{2}".format(
|
||||
start, end, len(bytes))
|
||||
if include_length:
|
||||
resp.headers["Content-length"] = str(len(bytes))
|
||||
return resp
|
||||
else:
|
||||
resp = Response(body=io.BytesIO(bytes))
|
||||
resp.headers["Content-length"] = str(len(bytes))
|
||||
return resp
|
||||
|
||||
async def readAll(self, request):
|
||||
data = bytearray()
|
||||
content = request.content
|
||||
while True:
|
||||
chunk, done = await content.readchunk()
|
||||
data.extend(chunk)
|
||||
if len(chunk) == 0:
|
||||
break
|
||||
return data
|
||||
@@ -0,0 +1,3 @@
|
||||
authorization_host: "https://dev.habackup.io"
|
||||
token_server_hosts: "https://token1.dev.habackup.io,https://dev.habackup.io"
|
||||
default_drive_client_id: "795575624694-jcdhoh1jr1ngccfsbi2f44arr4jupl79.apps.googleusercontent.com"
|
||||
27
hassio-google-drive-backup/dev/data/dev_options.json
Normal file
27
hassio-google-drive-backup/dev/data/dev_options.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"drive_url": "http://localhost:56153",
|
||||
"supervisor_url": "http://localhost:56153/",
|
||||
"hassio_header": "test_header",
|
||||
"retained_file_path": "hassio-google-drive-backup/dev/data/retained.json",
|
||||
"data_cache_file_path": "hassio-google-drive-backup/dev/data/data_cache.json",
|
||||
"backup_directory_path": "hassio-google-drive-backup/dev/backup",
|
||||
"certfile": "hassio-google-drive-backup/dev/ssl/fullchain.pem",
|
||||
"keyfile": "hassio-google-drive-backup/dev/ssl/privkey.pem",
|
||||
"secrets_file_path": "hassio-google-drive-backup/dev/data/secrets.yaml",
|
||||
"credentials_file_path": "hassio-google-drive-backup/dev/data/credentials.dat",
|
||||
"folder_file_path": "hassio-google-drive-backup/dev/data/folder.dat",
|
||||
"id_file_path": "hassio-google-drive-backup/dev/data/id.json",
|
||||
"stop_addon_state_path": "hassio-google-drive-backup/dev/data/stop_addon_state.json",
|
||||
"authorization_host": "http://localhost:56153",
|
||||
"token_server_hosts": "http://localhost:56153",
|
||||
"drive_refresh_url": "http://localhost:56153/oauth2/v4/token",
|
||||
"drive_authorize_url": "http://localhost:56153/o/oauth2/v2/auth",
|
||||
"drive_device_code_url": "http://localhost:56153/device/code",
|
||||
"drive_token_url": "http://localhost:56153/token",
|
||||
"ingress_token_file_path": "hassio-google-drive-backup/dev/data/ingress.dat",
|
||||
"log_level": "TRACE",
|
||||
"console_log_level": "TRACE",
|
||||
"ingress_port": 56152,
|
||||
"port": 56151,
|
||||
"cache_warmup_max_seconds": 300
|
||||
}
|
||||
20
hassio-google-drive-backup/dev/data/drive_dev_options.json
Normal file
20
hassio-google-drive-backup/dev/data/drive_dev_options.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"supervisor_url": "http://localhost:56153/",
|
||||
"authorization_host": "https://dev.habackup.io",
|
||||
"token_server_hosts": "https://token1.dev.habackup.io,https://dev.habackup.io",
|
||||
"hassio_header": "test_header",
|
||||
"data_cache_file_path": "hassio-google-drive-backup/dev/data/data_cache.json",
|
||||
"retained_file_path": "hassio-google-drive-backup/dev/data/retained.json",
|
||||
"backup_directory_path": "hassio-google-drive-backup/dev/backup",
|
||||
"certfile": "hassio-google-drive-backup/dev/ssl/fullchain.pem",
|
||||
"keyfile": "hassio-google-drive-backup/dev/ssl/privkey.pem",
|
||||
"secrets_file_path": "hassio-google-drive-backup/dev/data/secrets.yaml",
|
||||
"credentials_file_path": "hassio-google-drive-backup/dev/data/credentials.dat",
|
||||
"folder_file_path": "hassio-google-drive-backup/dev/data/folder.dat",
|
||||
"id_file_path": "hassio-google-drive-backup/dev/data/id.json",
|
||||
"stop_addon_state_path": "hassio-google-drive-backup/dev/data/stop_addon_state.json",
|
||||
"ingress_token_file_path": "hassio-google-drive-backup/dev/data/ingress.dat",
|
||||
"default_drive_client_id": "795575624694-jcdhoh1jr1ngccfsbi2f44arr4jupl79.apps.googleusercontent.com",
|
||||
"ingress_port": 56152,
|
||||
"port": 56151
|
||||
}
|
||||
17
hassio-google-drive-backup/dev/data/drive_options.json
Normal file
17
hassio-google-drive-backup/dev/data/drive_options.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"supervisor_url": "http://localhost:56153/",
|
||||
"hassio_header": "test_header",
|
||||
"data_cache_file_path": "hassio-google-drive-backup/dev/data/data_cache.json",
|
||||
"retained_file_path": "hassio-google-drive-backup/dev/data/retained.json",
|
||||
"backup_directory_path": "hassio-google-drive-backup/dev/backup",
|
||||
"certfile": "hassio-google-drive-backup/dev/ssl/fullchain.pem",
|
||||
"keyfile": "hassio-google-drive-backup/dev/ssl/privkey.pem",
|
||||
"secrets_file_path": "hassio-google-drive-backup/dev/data/secrets.yaml",
|
||||
"credentials_file_path": "hassio-google-drive-backup/dev/data/credentials.dat",
|
||||
"folder_file_path": "hassio-google-drive-backup/dev/data/folder.dat",
|
||||
"ingress_token_file_path": "hassio-google-drive-backup/dev/data/ingress.dat",
|
||||
"id_file_path": "hassio-google-drive-backup/dev/data/id.json",
|
||||
"stop_addon_state_path": "hassio-google-drive-backup/dev/data/stop_addon_state.json",
|
||||
"ingress_port": 56155,
|
||||
"port": 56156
|
||||
}
|
||||
11
hassio-google-drive-backup/dev/data/options.json
Normal file
11
hassio-google-drive-backup/dev/data/options.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"send_error_reports": true,
|
||||
"max_backups_in_ha": 4,
|
||||
"max_backups_in_google_drive": 3,
|
||||
"days_between_backups": 10,
|
||||
"use_ssl": false,
|
||||
"backup_name": "{type} Backup {year}-{month}-{day} {hr24}:{min}:{sec}",
|
||||
"backup_password": "!secret password1",
|
||||
"drive_experimental": true,
|
||||
"drive_ipv4": ""
|
||||
}
|
||||
2
hassio-google-drive-backup/dev/data/secrets.yaml
Normal file
2
hassio-google-drive-backup/dev/data/secrets.yaml
Normal file
@@ -0,0 +1,2 @@
|
||||
password1: "Test value"
|
||||
for_unit_tests: "password value"
|
||||
6
hassio-google-drive-backup/dev/deploy.sh
Executable file
6
hassio-google-drive-backup/dev/deploy.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
sudo docker run --rm --privileged \
|
||||
-v /home/coder/.docker:/root/.docker \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v ..:/data \
|
||||
homeassistant/amd64-builder --all -t /data
|
||||
19
hassio-google-drive-backup/dev/deploy_addon.py
Normal file
19
hassio-google-drive-backup/dev/deploy_addon.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import subprocess
|
||||
import os
|
||||
import json
|
||||
from os.path import abspath, join
|
||||
|
||||
with open(abspath(join(__file__, "..", "..", "config.json"))) as f:
|
||||
version = json.load(f)["version"]
|
||||
print("Version will be: " + version)
|
||||
subprocess.run("docker login", shell=True)
|
||||
|
||||
|
||||
platforms = ["amd64", "armv7", "aarch64", "armhf", "i386"]
|
||||
|
||||
os.chdir("hassio-google-drive-backup")
|
||||
for platform in platforms:
|
||||
subprocess.run("docker build -f Dockerfile-addon -t sabeechen/hassio-google-drive-backup-{0}:{1} --build-arg BUILD_FROM=homeassistant/{0}-base .".format(platform, version), shell=True)
|
||||
|
||||
for platform in platforms:
|
||||
subprocess.run("docker push sabeechen/hassio-google-drive-backup-{0}:{1}".format(platform, version), shell=True)
|
||||
20
hassio-google-drive-backup/dev/deploy_dev_addon.py
Normal file
20
hassio-google-drive-backup/dev/deploy_dev_addon.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import getpass
|
||||
import subprocess
|
||||
import os
|
||||
import json
|
||||
from os.path import abspath, join
|
||||
|
||||
with open(abspath(join(__file__, "..", "..", "config.json"))) as f:
|
||||
version = json.load(f)["version"]
|
||||
|
||||
try:
|
||||
p = getpass.getpass("Enter DockerHub Password")
|
||||
except Exception as error:
|
||||
print('ERROR', error)
|
||||
exit()
|
||||
|
||||
os.chdir("hassio-google-drive-backup")
|
||||
print("Setting the appropriate gcloud project...")
|
||||
subprocess.run("gcloud config set project hassio-drive-backup", shell=True)
|
||||
print("Building and uploading dev container...")
|
||||
subprocess.run("gcloud builds submit --config cloudbuild-dev.yaml --substitutions _DOCKERHUB_PASSWORD={0},_VERSION={1}".format(p, version), shell=True)
|
||||
8
hassio-google-drive-backup/dev/deploy_dev_server.py
Normal file
8
hassio-google-drive-backup/dev/deploy_dev_server.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
os.chdir("hassio-google-drive-backup")
|
||||
print("Setting the appropriate gcloud project...")
|
||||
subprocess.run("gcloud config set project hassio-drive-backup-dev", shell=True)
|
||||
print("Building and uploading server container...")
|
||||
subprocess.run("gcloud builds submit --config cloudbuild-server.yaml", shell=True)
|
||||
8
hassio-google-drive-backup/dev/deploy_server.py
Normal file
8
hassio-google-drive-backup/dev/deploy_server.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
os.chdir("hassio-google-drive-backup")
|
||||
print("Setting the appropriate gcloud project...")
|
||||
subprocess.run("gcloud config set project hassio-drive-backup", shell=True)
|
||||
print("Building and uploading server container...")
|
||||
subprocess.run("gcloud builds submit --config cloudbuild-server.yaml", shell=True)
|
||||
57
hassio-google-drive-backup/dev/error_tools.py
Normal file
57
hassio-google-drive-backup/dev/error_tools.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import argparse
|
||||
from google.cloud import firestore
|
||||
from datetime import datetime, timedelta
|
||||
DELETE_BATCH_SIZE = 200
|
||||
STORE_NAME = "error_reports"
|
||||
|
||||
|
||||
def delete_old_data():
|
||||
# Initialize Firestore
|
||||
db = firestore.Client()
|
||||
collection_ref = db.collection(STORE_NAME)
|
||||
|
||||
# Define the datetime for one week ago
|
||||
week_ago = datetime.now() - timedelta(days=7)
|
||||
|
||||
# Query to find all documents older than a week
|
||||
total_deleted = 0
|
||||
while True:
|
||||
to_delete = 0
|
||||
batch = db.batch()
|
||||
docs = collection_ref.where('server_time', '<', week_ago).stream()
|
||||
for doc in docs:
|
||||
to_delete += 1
|
||||
batch.delete(doc.reference)
|
||||
if to_delete >= DELETE_BATCH_SIZE:
|
||||
break
|
||||
if to_delete > 0:
|
||||
batch.commit()
|
||||
total_deleted += to_delete
|
||||
print(f"Deleted {to_delete} documents ({total_deleted} total)")
|
||||
else:
|
||||
break
|
||||
print(f"Success: All documents older than a week deleted ({total_deleted} total)")
|
||||
|
||||
|
||||
def main():
|
||||
# Create command line argument parser
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
# Add purge argument
|
||||
parser.add_argument("--purge", help="Delete all documents older than a week.", action="store_true")
|
||||
|
||||
# Add any other argument you want in future. For example:
|
||||
# parser.add_argument("--future_arg", help="Perform some future operation.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Respond to arguments
|
||||
if args.purge:
|
||||
confirm = input('Are you sure you want to delete all documents older than a week? (y/n): ')
|
||||
if confirm.lower() == 'y':
|
||||
delete_old_data()
|
||||
else:
|
||||
print("Abort: No documents were deleted.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6
hassio-google-drive-backup/dev/http_exception.py
Normal file
6
hassio-google-drive-backup/dev/http_exception.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from aiohttp.web import HTTPClientError
|
||||
|
||||
|
||||
class HttpMultiException(HTTPClientError):
|
||||
def __init__(self, code):
|
||||
self.status_code = code
|
||||
5
hassio-google-drive-backup/dev/ports.py
Normal file
5
hassio-google-drive-backup/dev/ports.py
Normal file
@@ -0,0 +1,5 @@
|
||||
class Ports:
|
||||
def __init__(self, server, ui, ingress):
|
||||
self.server = server
|
||||
self.ui = ui
|
||||
self.ingress = ingress
|
||||
136
hassio-google-drive-backup/dev/request_interceptor.py
Normal file
136
hassio-google-drive-backup/dev/request_interceptor.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import re
|
||||
from aiohttp.web import Request, Response
|
||||
from asyncio import Event
|
||||
from aiohttp.web_response import json_response
|
||||
from injector import singleton, inject
|
||||
from backup.time import Time
|
||||
from typing import List
|
||||
|
||||
|
||||
class UrlMatch():
|
||||
def __init__(self, time: Time, url, fail_after=None, status=None, response=None, wait=False, sleep=None, fail_for=None):
|
||||
self.time = time
|
||||
self.url: str = url
|
||||
self.fail_after: int = fail_after
|
||||
self.status: int = status
|
||||
self.wait_event: Event = Event()
|
||||
self.trigger_event: Event = Event()
|
||||
self.response: str = ""
|
||||
self.wait: bool = wait
|
||||
self.trigger_event.clear()
|
||||
self.wait_event.clear()
|
||||
self.sleep = sleep
|
||||
self.response = response
|
||||
self.fail_for = fail_for
|
||||
self.responses = []
|
||||
self._calls = 0
|
||||
self.time = time
|
||||
|
||||
def addResponse(self, response):
|
||||
self.responses.append(response)
|
||||
|
||||
def stop(self):
|
||||
self.wait_event.set()
|
||||
self.trigger_event.set()
|
||||
|
||||
def isMatch(self, request):
|
||||
return re.match(self.url, request.url.path) or re.match(self.url, str(request.url))
|
||||
|
||||
async def waitForCall(self):
|
||||
await self.trigger_event.wait()
|
||||
|
||||
def clear(self):
|
||||
self.wait_event.set()
|
||||
|
||||
def callCount(self):
|
||||
return self._calls
|
||||
|
||||
async def _doAction(self, request: Request):
|
||||
self._calls += 1
|
||||
if len(self.responses) > 0:
|
||||
return self.responses.pop(0)
|
||||
if self.status is not None:
|
||||
await self._readAll(request)
|
||||
if self.response:
|
||||
return json_response(self.response, status=self.status)
|
||||
else:
|
||||
return Response(status=self.status)
|
||||
elif self.wait:
|
||||
self.trigger_event.set()
|
||||
await self.wait_event.wait()
|
||||
elif self.sleep is not None:
|
||||
await self.time.sleepAsync(self.sleep, early_exit=self.wait_event)
|
||||
|
||||
async def called(self, request: Request):
|
||||
if self.fail_after is None or self.fail_after <= 0:
|
||||
if self.fail_for is not None and self.fail_for > 0:
|
||||
self.fail_for -= 1
|
||||
return await self._doAction(request)
|
||||
elif self.fail_for is not None:
|
||||
return None
|
||||
|
||||
return await self._doAction(request)
|
||||
elif self.fail_after is not None:
|
||||
self.fail_after -= 1
|
||||
|
||||
async def _readAll(self, request: Request):
|
||||
data = bytearray()
|
||||
content = request.content
|
||||
while True:
|
||||
chunk, done = await content.readchunk()
|
||||
data.extend(chunk)
|
||||
if len(chunk) == 0:
|
||||
break
|
||||
return data
|
||||
|
||||
|
||||
@singleton
|
||||
class RequestInterceptor:
|
||||
@inject
|
||||
def __init__(self):
|
||||
self._matchers: List[UrlMatch] = []
|
||||
self._history = []
|
||||
self.time = Time()
|
||||
|
||||
def stop(self):
|
||||
for matcher in self._matchers:
|
||||
matcher.stop()
|
||||
|
||||
def setError(self, url, status=None, fail_after=None, fail_for=None, response=None) -> UrlMatch:
|
||||
matcher = UrlMatch(self.time, url, fail_after, status=status, response=response, fail_for=fail_for)
|
||||
self._matchers.append(matcher)
|
||||
return matcher
|
||||
|
||||
def clear(self):
|
||||
self._matchers.clear()
|
||||
self._history.clear()
|
||||
|
||||
def setWaiter(self, url, attempts=None):
|
||||
matcher = UrlMatch(self.time, url, attempts, wait=True)
|
||||
self._matchers.append(matcher)
|
||||
return matcher
|
||||
|
||||
def setSleep(self, url, attempts=None, sleep=None, wait_for=None):
|
||||
matcher = UrlMatch(self.time, url, attempts, sleep=sleep, fail_for=wait_for)
|
||||
self._matchers.append(matcher)
|
||||
return matcher
|
||||
|
||||
async def checkUrl(self, request):
|
||||
ret = None
|
||||
self.record(request)
|
||||
for match in self._matchers:
|
||||
if match.isMatch(request):
|
||||
ret = await match.called(request)
|
||||
return ret
|
||||
|
||||
def record(self, request: Request):
|
||||
record = str(request.url.path)
|
||||
if len(request.url.query_string) > 0:
|
||||
record += "?" + str(request.url.query_string)
|
||||
self._history.append(record)
|
||||
|
||||
def urlWasCalled(self, url) -> bool:
|
||||
for called_url in self._history:
|
||||
if url == called_url or re.match(url, called_url):
|
||||
return True
|
||||
return False
|
||||
522
hassio-google-drive-backup/dev/simulated_google.py
Normal file
522
hassio-google-drive-backup/dev/simulated_google.py
Normal file
@@ -0,0 +1,522 @@
|
||||
import re
|
||||
|
||||
from yarl import URL
|
||||
from datetime import timedelta
|
||||
from backup.logger import getLogger
|
||||
from backup.config import Setting, Config
|
||||
from backup.time import Time
|
||||
from backup.creds import KEY_CLIENT_SECRET, KEY_CLIENT_ID, KEY_ACCESS_TOKEN, KEY_TOKEN_EXPIRY
|
||||
from aiohttp.web import (HTTPBadRequest, HTTPNotFound,
|
||||
HTTPUnauthorized, Request, Response, delete, get,
|
||||
json_response, patch, post, put, HTTPSeeOther)
|
||||
from injector import inject, singleton
|
||||
from .base_server import BaseServer, bytesPattern, intPattern
|
||||
from .ports import Ports
|
||||
from typing import Any, Dict
|
||||
from asyncio import Event
|
||||
from backup.creds import Creds
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
mimeTypeQueryPattern = re.compile("^mimeType='.*'$")
|
||||
parentsQueryPattern = re.compile("^'.*' in parents$")
|
||||
resumeBytesPattern = re.compile("^bytes \\*/\\d+$")
|
||||
|
||||
URL_MATCH_DRIVE_API = "^.*drive.*$"
|
||||
URL_MATCH_UPLOAD = "^/upload/drive/v3/files/$"
|
||||
URL_MATCH_UPLOAD_PROGRESS = "^/upload/drive/v3/files/progress/.*$"
|
||||
URL_MATCH_CREATE = "^/upload/drive/v3/files/progress/.*$"
|
||||
URL_MATCH_FILE = "^/drive/v3/files/.*$"
|
||||
URL_MATCH_DEVICE_CODE = "^/device/code$"
|
||||
URL_MATCH_TOKEN = "^/token$"
|
||||
|
||||
|
||||
@singleton
|
||||
class SimulatedGoogle(BaseServer):
|
||||
@inject
|
||||
def __init__(self, config: Config, time: Time, ports: Ports):
|
||||
self._time = time
|
||||
self.config = config
|
||||
|
||||
# auth state
|
||||
self._custom_drive_client_id = self.generateId(5)
|
||||
self._custom_drive_client_secret = self.generateId(5)
|
||||
self._custom_drive_client_expiration = None
|
||||
self._drive_auth_code = "drive_auth_code"
|
||||
self._port = ports.server
|
||||
self._auth_token = ""
|
||||
self._refresh_token = "test_refresh_token"
|
||||
self._client_id_hack = None
|
||||
|
||||
# Drive item states
|
||||
self.items = {}
|
||||
self.lostPermission = []
|
||||
self.space_available = 5 * 1024 * 1024 * 1024
|
||||
self.usage = 0
|
||||
|
||||
# Upload state information
|
||||
self._upload_info: Dict[str, Any] = {}
|
||||
self.chunks = []
|
||||
self._upload_chunk_wait = Event()
|
||||
self._upload_chunk_trigger = Event()
|
||||
self._current_chunk = 1
|
||||
self._waitOnChunk = 0
|
||||
self.device_auth_params = {}
|
||||
self._device_code_accepted = None
|
||||
|
||||
def setDriveSpaceAvailable(self, bytes_available):
|
||||
self.space_available = bytes_available
|
||||
|
||||
def generateNewAccessToken(self):
|
||||
new_token = self.generateId(20)
|
||||
self._auth_token = new_token
|
||||
|
||||
def generateNewRefreshToken(self):
|
||||
new_token = self.generateId(20)
|
||||
self._refresh_token = new_token
|
||||
|
||||
def expireCreds(self):
|
||||
self.generateNewAccessToken()
|
||||
self.generateNewRefreshToken()
|
||||
|
||||
def expireRefreshToken(self):
|
||||
self.generateNewRefreshToken()
|
||||
|
||||
def resetDriveAuth(self):
|
||||
self.expireCreds()
|
||||
self.config.override(Setting.DEFAULT_DRIVE_CLIENT_ID, self.generateId(5))
|
||||
self.config.override(Setting.DEFAULT_DRIVE_CLIENT_SECRET, self.generateId(5))
|
||||
|
||||
def creds(self):
|
||||
return Creds(self._time,
|
||||
id=self.config.get(Setting.DEFAULT_DRIVE_CLIENT_ID),
|
||||
expiration=self._time.now() + timedelta(hours=1),
|
||||
access_token=self._auth_token,
|
||||
refresh_token=self._refresh_token)
|
||||
|
||||
def routes(self):
|
||||
return [
|
||||
put('/upload/drive/v3/files/progress/{id}', self._uploadProgress),
|
||||
post('/upload/drive/v3/files/', self._upload),
|
||||
post('/drive/v3/files/', self._create),
|
||||
get('/drive/v3/files/', self._query),
|
||||
delete('/drive/v3/files/{id}/', self._delete),
|
||||
patch('/drive/v3/files/{id}/', self._update),
|
||||
get('/drive/v3/files/{id}/', self._get),
|
||||
post('/oauth2/v4/token', self._oauth2Token),
|
||||
get('/o/oauth2/v2/auth', self._oAuth2Authorize),
|
||||
get('/drive/customcreds', self._getCustomCred),
|
||||
get('/drive/v3/about', self._driveAbout),
|
||||
post('/device/code', self._deviceCode),
|
||||
get('/device', self._device),
|
||||
get('/debug/google', self._debug),
|
||||
post('/token', self._driveToken),
|
||||
]
|
||||
|
||||
async def _debug(self, request: Request):
|
||||
return json_response({
|
||||
"custom_drive_client_id": self._custom_drive_client_id,
|
||||
"custom_drive_client_secret": self._custom_drive_client_secret,
|
||||
"device_auth_params": self.device_auth_params
|
||||
})
|
||||
|
||||
async def _checkDriveHeaders(self, request: Request):
|
||||
if request.headers.get("Authorization", "") != "Bearer " + self._auth_token:
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
async def _deviceCode(self, request: Request):
|
||||
params = await request.post()
|
||||
client_id = params['client_id']
|
||||
scope = params['scope']
|
||||
if client_id != self._custom_drive_client_id or scope != 'https://www.googleapis.com/auth/drive.file':
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
self.device_auth_params = {
|
||||
'device_code': self.generateId(10),
|
||||
'expires_in': 60,
|
||||
'interval': 1,
|
||||
'user_code': self.generateId(8),
|
||||
'verification_url': str(URL("http://localhost").with_port(self._port).with_path("device"))
|
||||
}
|
||||
self._device_code_accepted = None
|
||||
return json_response(self.device_auth_params)
|
||||
|
||||
async def _device(self, request: Request):
|
||||
code = request.query.get('code')
|
||||
if code:
|
||||
if self.device_auth_params.get('user_code', "dfsdfsdfsdfs") == code:
|
||||
body = "Accepted"
|
||||
self._device_code_accepted = True
|
||||
self.generateNewRefreshToken()
|
||||
self.generateNewAccessToken()
|
||||
else:
|
||||
body = "Wrong code"
|
||||
else:
|
||||
body = """
|
||||
<html>
|
||||
<head>
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type">
|
||||
<meta content="utf-8" http-equiv="encoding">
|
||||
<title>Simulated Drive Device Authorization</title>
|
||||
</head>
|
||||
<body>
|
||||
<div>
|
||||
Enter the device code provided below
|
||||
</div>
|
||||
<form>
|
||||
<label for="code">Device Code:</label><br>
|
||||
<input type="text" value="Device Code" id="code" name="code">
|
||||
<input type="submit" value="Submit">
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
resp = Response(body=body, content_type="text/html")
|
||||
return resp
|
||||
|
||||
async def _oAuth2Authorize(self, request: Request):
|
||||
query = request.query
|
||||
if query.get('client_id') != self.config.get(Setting.DEFAULT_DRIVE_CLIENT_ID) and query.get('client_id') != self._custom_drive_client_id:
|
||||
raise HTTPUnauthorized()
|
||||
if query.get('scope') != 'https://www.googleapis.com/auth/drive.file':
|
||||
raise HTTPUnauthorized()
|
||||
if query.get('response_type') != 'code':
|
||||
raise HTTPUnauthorized()
|
||||
if query.get('include_granted_scopes') != 'true':
|
||||
raise HTTPUnauthorized()
|
||||
if query.get('access_type') != 'offline':
|
||||
raise HTTPUnauthorized()
|
||||
if 'state' not in query:
|
||||
raise HTTPUnauthorized()
|
||||
if 'redirect_uri' not in query:
|
||||
raise HTTPUnauthorized()
|
||||
if query.get('prompt') != 'consent':
|
||||
raise HTTPUnauthorized()
|
||||
if query.get('redirect_uri') == 'urn:ietf:wg:oauth:2.0:oob':
|
||||
return json_response({"code": self._drive_auth_code})
|
||||
url = URL(query.get('redirect_uri')).with_query({'code': self._drive_auth_code, 'state': query.get('state')})
|
||||
raise HTTPSeeOther(str(url))
|
||||
|
||||
async def _getCustomCred(self, request: Request):
|
||||
return json_response({
|
||||
"client_id": self._custom_drive_client_id,
|
||||
"client_secret": self._custom_drive_client_secret
|
||||
})
|
||||
|
||||
async def _driveToken(self, request: Request):
|
||||
data = await request.post()
|
||||
if not self._checkClientIdandSecret(data.get('client_id'), data.get('client_secret')):
|
||||
raise HTTPUnauthorized()
|
||||
if data.get('grant_type') == 'authorization_code':
|
||||
if data.get('redirect_uri') not in ["http://localhost:{}/drive/authorize".format(self._port), 'urn:ietf:wg:oauth:2.0:oob']:
|
||||
raise HTTPUnauthorized()
|
||||
if data.get('code') != self._drive_auth_code:
|
||||
raise HTTPUnauthorized()
|
||||
elif data.get('grant_type') == 'urn:ietf:params:oauth:grant-type:device_code':
|
||||
if data.get('device_code') != self.device_auth_params['device_code']:
|
||||
raise HTTPUnauthorized()
|
||||
if self._device_code_accepted is None:
|
||||
return json_response({
|
||||
"error": "authorization_pending",
|
||||
"error_description": "Precondition Required"
|
||||
}, status=428)
|
||||
elif self._device_code_accepted is False:
|
||||
raise HTTPUnauthorized()
|
||||
else:
|
||||
raise HTTPBadRequest()
|
||||
self.generateNewRefreshToken()
|
||||
resp = {
|
||||
'access_token': self._auth_token,
|
||||
'refresh_token': self._refresh_token,
|
||||
KEY_CLIENT_ID: data.get('client_id'),
|
||||
KEY_CLIENT_SECRET: self.config.get(Setting.DEFAULT_DRIVE_CLIENT_SECRET),
|
||||
KEY_TOKEN_EXPIRY: self.timeToRfc3339String(self._time.now()),
|
||||
}
|
||||
if self._custom_drive_client_expiration is not None:
|
||||
resp[KEY_TOKEN_EXPIRY] = self.timeToRfc3339String(self._custom_drive_client_expiration)
|
||||
return json_response(resp)
|
||||
|
||||
def _checkClientIdandSecret(self, client_id: str, client_secret: str) -> bool:
|
||||
if self._custom_drive_client_id == client_id and self._custom_drive_client_secret == client_secret:
|
||||
return True
|
||||
if client_id == self.config.get(Setting.DEFAULT_DRIVE_CLIENT_ID) == client_id and client_secret == self.config.get(Setting.DEFAULT_DRIVE_CLIENT_SECRET):
|
||||
return True
|
||||
|
||||
if self._client_id_hack is not None:
|
||||
if client_id == self._client_id_hack and client_secret == self.config.get(Setting.DEFAULT_DRIVE_CLIENT_SECRET):
|
||||
return True
|
||||
return False
|
||||
|
||||
async def _oauth2Token(self, request: Request):
|
||||
params = await request.post()
|
||||
if not self._checkClientIdandSecret(params['client_id'], params['client_secret']):
|
||||
raise HTTPUnauthorized()
|
||||
if params['refresh_token'] != self._refresh_token:
|
||||
raise HTTPUnauthorized()
|
||||
if params['grant_type'] == 'refresh_token':
|
||||
self.generateNewAccessToken()
|
||||
return json_response({
|
||||
'access_token': self._auth_token,
|
||||
'expires_in': 3600,
|
||||
'token_type': 'doesn\'t matter'
|
||||
})
|
||||
elif params['grant_type'] == 'urn:ietf:params:oauth:grant-type:device_code':
|
||||
if params['device_code'] != self.device_auth_params['device_code']:
|
||||
raise HTTPUnauthorized()
|
||||
if not self._device_code_accepted:
|
||||
return json_response({
|
||||
"error": "authorization_pending",
|
||||
"error_description": "Precondition Required"
|
||||
}, status=428)
|
||||
return json_response({
|
||||
'access_token': self._auth_token,
|
||||
'expires_in': 3600,
|
||||
'token_type': 'doesn\'t matter'
|
||||
})
|
||||
else:
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
def filter_fields(self, item: Dict[str, Any], fields) -> Dict[str, Any]:
|
||||
ret = {}
|
||||
for field in fields:
|
||||
if field in item:
|
||||
ret[field] = item[field]
|
||||
return ret
|
||||
|
||||
def parseFields(self, source: str):
|
||||
fields = []
|
||||
for field in source.split(","):
|
||||
if field.startswith("files("):
|
||||
fields.append(field[6:])
|
||||
elif field.endswith(")"):
|
||||
fields.append(field[:-1])
|
||||
else:
|
||||
fields.append(field)
|
||||
return fields
|
||||
|
||||
def formatItem(self, base, id):
|
||||
caps = base.get('capabilites', {})
|
||||
if 'capabilities' not in base:
|
||||
base['capabilities'] = caps
|
||||
if 'canAddChildren' not in caps:
|
||||
caps['canAddChildren'] = True
|
||||
if 'canListChildren' not in caps:
|
||||
caps['canListChildren'] = True
|
||||
if 'canDeleteChildren' not in caps:
|
||||
caps['canDeleteChildren'] = True
|
||||
if 'canTrashChildren' not in caps:
|
||||
caps['canTrashChildren'] = True
|
||||
if 'canTrash' not in caps:
|
||||
caps['canTrash'] = True
|
||||
if 'canDelete' not in caps:
|
||||
caps['canDelete'] = True
|
||||
|
||||
for parent in base.get("parents", []):
|
||||
parent_item = self.items[parent]
|
||||
# This simulates a very simply shared drive permissions structure
|
||||
if parent_item.get("driveId", None) is not None:
|
||||
base["driveId"] = parent_item["driveId"]
|
||||
base["capabilities"] = parent_item["capabilities"]
|
||||
base['trashed'] = False
|
||||
base['id'] = id
|
||||
base['modifiedTime'] = self.timeToRfc3339String(self._time.now())
|
||||
return base
|
||||
|
||||
async def _get(self, request: Request):
|
||||
id = request.match_info.get('id')
|
||||
await self._checkDriveHeaders(request)
|
||||
if id not in self.items:
|
||||
raise HTTPNotFound()
|
||||
if id in self.lostPermission:
|
||||
return Response(
|
||||
status=403,
|
||||
content_type="application/json",
|
||||
text='{"error": {"errors": [{"reason": "forbidden"}]}}')
|
||||
request_type = request.query.get("alt", "metadata")
|
||||
if request_type == "media":
|
||||
# return bytes
|
||||
item = self.items[id]
|
||||
if 'bytes' not in item:
|
||||
raise HTTPBadRequest()
|
||||
return self.serve_bytes(request, item['bytes'], include_length=False)
|
||||
else:
|
||||
fields = request.query.get("fields", "id").split(",")
|
||||
return json_response(self.filter_fields(self.items[id], fields))
|
||||
|
||||
async def _update(self, request: Request):
|
||||
id = request.match_info.get('id')
|
||||
await self._checkDriveHeaders(request)
|
||||
if id not in self.items:
|
||||
return HTTPNotFound
|
||||
update = await request.json()
|
||||
for key in update:
|
||||
if key in self.items[id] and isinstance(self.items[id][key], dict):
|
||||
self.items[id][key].update(update[key])
|
||||
else:
|
||||
self.items[id][key] = update[key]
|
||||
return Response()
|
||||
|
||||
async def _driveAbout(self, request: Request):
|
||||
return json_response({
|
||||
'storageQuota': {
|
||||
'usage': self.usage,
|
||||
'limit': self.space_available
|
||||
},
|
||||
'user': {
|
||||
'emailAddress': "testing@no.where"
|
||||
}
|
||||
})
|
||||
|
||||
async def _delete(self, request: Request):
|
||||
id = request.match_info.get('id')
|
||||
await self._checkDriveHeaders(request)
|
||||
if id not in self.items:
|
||||
raise HTTPNotFound()
|
||||
del self.items[id]
|
||||
return Response()
|
||||
|
||||
async def _query(self, request: Request):
|
||||
await self._checkDriveHeaders(request)
|
||||
query: str = request.query.get("q", "")
|
||||
fields = self.parseFields(request.query.get('fields', 'id'))
|
||||
if mimeTypeQueryPattern.match(query):
|
||||
ret = []
|
||||
mimeType = query[len("mimeType='"):-1]
|
||||
for item in self.items.values():
|
||||
if item.get('mimeType', '') == mimeType:
|
||||
ret.append(self.filter_fields(item, fields))
|
||||
return json_response({'files': ret})
|
||||
elif parentsQueryPattern.match(query):
|
||||
ret = []
|
||||
parent = query[1:-len("' in parents")]
|
||||
if parent not in self.items:
|
||||
raise HTTPNotFound()
|
||||
if parent in self.lostPermission:
|
||||
return Response(
|
||||
status=403,
|
||||
content_type="application/json",
|
||||
text='{"error": {"errors": [{"reason": "forbidden"}]}}')
|
||||
for item in self.items.values():
|
||||
if parent in item.get('parents', []):
|
||||
ret.append(self.filter_fields(item, fields))
|
||||
return json_response({'files': ret})
|
||||
elif len(query) == 0:
|
||||
ret = []
|
||||
for item in self.items.values():
|
||||
ret.append(self.filter_fields(item, fields))
|
||||
return json_response({'files': ret})
|
||||
else:
|
||||
raise HTTPBadRequest
|
||||
|
||||
async def _create(self, request: Request):
|
||||
await self._checkDriveHeaders(request)
|
||||
item = self.formatItem(await request.json(), self.generateId(30))
|
||||
self.items[item['id']] = item
|
||||
return json_response({'id': item['id']})
|
||||
|
||||
async def _upload(self, request: Request):
|
||||
logger.info("Drive start upload request")
|
||||
await self._checkDriveHeaders(request)
|
||||
if request.query.get('uploadType') != 'resumable':
|
||||
raise HTTPBadRequest()
|
||||
mimeType = request.headers.get('X-Upload-Content-Type', None)
|
||||
if mimeType is None:
|
||||
raise HTTPBadRequest()
|
||||
size = int(request.headers.get('X-Upload-Content-Length', -1))
|
||||
if size < 0:
|
||||
raise HTTPBadRequest()
|
||||
total_size = 0
|
||||
for item in self.items.values():
|
||||
total_size += item.get('size', 0)
|
||||
total_size += size
|
||||
if total_size > self.space_available:
|
||||
return json_response({
|
||||
"error": {
|
||||
"errors": [
|
||||
{"reason": "storageQuotaExceeded"}
|
||||
]
|
||||
}
|
||||
}, status=400)
|
||||
metadata = await request.json()
|
||||
id = self.generateId()
|
||||
|
||||
# Validate parents
|
||||
if 'parents' in metadata:
|
||||
for parent in metadata['parents']:
|
||||
if parent not in self.items:
|
||||
raise HTTPNotFound()
|
||||
if parent in self.lostPermission:
|
||||
return Response(status=403, content_type="application/json", text='{"error": {"errors": [{"reason": "forbidden"}]}}')
|
||||
self._upload_info['size'] = size
|
||||
self._upload_info['mime'] = mimeType
|
||||
self._upload_info['item'] = self.formatItem(metadata, id)
|
||||
self._upload_info['id'] = id
|
||||
self._upload_info['next_start'] = 0
|
||||
metadata['bytes'] = bytearray()
|
||||
metadata['size'] = size
|
||||
resp = Response()
|
||||
resp.headers['Location'] = "http://localhost:" + \
|
||||
str(self._port) + "/upload/drive/v3/files/progress/" + id
|
||||
return resp
|
||||
|
||||
async def _uploadProgress(self, request: Request):
|
||||
if self._waitOnChunk > 0:
|
||||
if self._current_chunk == self._waitOnChunk:
|
||||
self._upload_chunk_trigger.set()
|
||||
await self._upload_chunk_wait.wait()
|
||||
else:
|
||||
self._current_chunk += 1
|
||||
id = request.match_info.get('id')
|
||||
await self._checkDriveHeaders(request)
|
||||
if self._upload_info.get('id', "") != id:
|
||||
raise HTTPBadRequest()
|
||||
chunk_size = int(request.headers['Content-Length'])
|
||||
info = request.headers['Content-Range']
|
||||
if resumeBytesPattern.match(info):
|
||||
resp = Response(status=308)
|
||||
if self._upload_info['next_start'] != 0:
|
||||
resp.headers['Range'] = "bytes=0-{0}".format(self._upload_info['next_start'] - 1)
|
||||
return resp
|
||||
if not bytesPattern.match(info):
|
||||
raise HTTPBadRequest()
|
||||
numbers = intPattern.findall(info)
|
||||
start = int(numbers[0])
|
||||
end = int(numbers[1])
|
||||
total = int(numbers[2])
|
||||
if total != self._upload_info['size']:
|
||||
raise HTTPBadRequest()
|
||||
if start != self._upload_info['next_start']:
|
||||
raise HTTPBadRequest()
|
||||
if not (end == total - 1 or chunk_size % (256 * 1024) == 0):
|
||||
raise HTTPBadRequest()
|
||||
if end > total - 1:
|
||||
raise HTTPBadRequest()
|
||||
|
||||
# get the chunk
|
||||
received_bytes = await self.readAll(request)
|
||||
|
||||
# validate the chunk
|
||||
if len(received_bytes) != chunk_size:
|
||||
raise HTTPBadRequest()
|
||||
|
||||
if len(received_bytes) != end - start + 1:
|
||||
raise HTTPBadRequest()
|
||||
|
||||
self._upload_info['item']['bytes'].extend(received_bytes)
|
||||
|
||||
if len(self._upload_info['item']['bytes']) != end + 1:
|
||||
raise HTTPBadRequest()
|
||||
self.usage += len(received_bytes)
|
||||
self.chunks.append(len(received_bytes))
|
||||
if end == total - 1:
|
||||
# upload is complete, so create the item
|
||||
completed = self.formatItem(self._upload_info['item'], self._upload_info['id'])
|
||||
self.items[completed['id']] = completed
|
||||
return json_response({"id": completed['id']})
|
||||
else:
|
||||
# Return an incomplete response
|
||||
# For some reason, the tests like to stop right here
|
||||
resp = Response(status=308)
|
||||
self._upload_info['next_start'] = end + 1
|
||||
resp.headers['Range'] = "bytes=0-{0}".format(end)
|
||||
return resp
|
||||
459
hassio-google-drive-backup/dev/simulated_supervisor.py
Normal file
459
hassio-google-drive-backup/dev/simulated_supervisor.py
Normal file
@@ -0,0 +1,459 @@
|
||||
import asyncio
|
||||
from asyncio.tasks import sleep
|
||||
from datetime import timedelta
|
||||
import random
|
||||
import string
|
||||
import io
|
||||
|
||||
from backup.config import Config, Version
|
||||
from backup.time import Time
|
||||
from aiohttp.web import (HTTPBadRequest, HTTPNotFound,
|
||||
HTTPUnauthorized, Request, Response, get,
|
||||
json_response, post, delete, FileResponse)
|
||||
from injector import inject, singleton
|
||||
from .base_server import BaseServer
|
||||
from .ports import Ports
|
||||
from typing import Any, Dict
|
||||
from tests.helpers import all_addons, createBackupTar, parseBackupInfo
|
||||
|
||||
URL_MATCH_BACKUP_FULL = "^/backups/new/full$"
|
||||
URL_MATCH_BACKUP_DELETE = "^/backups/.*$"
|
||||
URL_MATCH_BACKUP_DOWNLOAD = "^/backups/.*/download$"
|
||||
URL_MATCH_MISC_INFO = "^/info$"
|
||||
URL_MATCH_CORE_API = "^/core/api.*$"
|
||||
URL_MATCH_START_ADDON = "^/addons/.*/start$"
|
||||
URL_MATCH_STOP_ADDON = "^/addons/.*/stop$"
|
||||
URL_MATCH_ADDON_INFO = "^/addons/.*/info$"
|
||||
URL_MATCH_SELF_OPTIONS = "^/addons/self/options$"
|
||||
|
||||
URL_MATCH_SNAPSHOT = "^/snapshots.*$"
|
||||
URL_MATCH_BACKUPS = "^/backups.*$"
|
||||
URL_MATCH_MOUNT = "^/mounts*$"
|
||||
|
||||
|
||||
@singleton
|
||||
class SimulatedSupervisor(BaseServer):
|
||||
@inject
|
||||
def __init__(self, config: Config, ports: Ports, time: Time):
|
||||
self._config = config
|
||||
self._time = time
|
||||
self._ports = ports
|
||||
self._auth_token = "test_header"
|
||||
self._backups: Dict[str, Any] = {}
|
||||
self._backup_data: Dict[str, bytearray] = {}
|
||||
self._backup_lock = asyncio.Lock()
|
||||
self._backup_inner_lock = asyncio.Lock()
|
||||
self._entities = {}
|
||||
self._events = []
|
||||
self._attributes = {}
|
||||
self._notification = None
|
||||
self._min_backup_size = 1024 * 1024 * 5
|
||||
self._max_backup_size = 1024 * 1024 * 5
|
||||
self._addon_slug = "self_slug"
|
||||
self._options = self.defaultOptions()
|
||||
self._username = "user"
|
||||
self._password = "pass"
|
||||
self._addons = all_addons.copy()
|
||||
self._super_version = Version(2023, 7)
|
||||
self._mounts = {
|
||||
'default_backup_mount': None,
|
||||
'mounts': [
|
||||
{
|
||||
"name": "my_media_share",
|
||||
"usage": "media",
|
||||
"type": "cifs",
|
||||
"server": "server.local",
|
||||
"share": "media",
|
||||
"state": "active"
|
||||
},
|
||||
{
|
||||
"name": "my_backup_share",
|
||||
"usage": "backup",
|
||||
"type": "nfs",
|
||||
"server": "server.local",
|
||||
"share": "media",
|
||||
"state": "active"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.installAddon(self._addon_slug, "Home Assistant Google drive Backup")
|
||||
self.installAddon("42", "The answer")
|
||||
self.installAddon("sgadg", "sdgsagsdgsggsd")
|
||||
|
||||
def defaultOptions(self):
|
||||
return {
|
||||
"max_backups_in_ha": 4,
|
||||
"max_backups_in_google_drive": 4,
|
||||
"days_between_backups": 3
|
||||
}
|
||||
|
||||
def routes(self):
|
||||
return [
|
||||
post('/addons/{slug}/options', self._updateOptions),
|
||||
post("/core/api/services/persistent_notification/dismiss", self._dismissNotification),
|
||||
post("/core/api/services/persistent_notification/create", self._createNotification),
|
||||
post("/core/api/events/{name}", self._haEventUpdate),
|
||||
post("/core/api/states/{entity}", self._haStateUpdate),
|
||||
post('/auth', self._authenticate),
|
||||
get('/auth', self._authenticate),
|
||||
get('/info', self._miscInfo),
|
||||
get('/addons/self/info', self._selfInfo),
|
||||
get('/addons', self._allAddons),
|
||||
get('/addons/{slug}/info', self._addonInfo),
|
||||
|
||||
post('/addons/{slug}/start', self._startAddon),
|
||||
post('/addons/{slug}/stop', self._stopAddon),
|
||||
get('/addons/{slug}/logo', self._logoAddon),
|
||||
get('/addons/{slug}/icon', self._logoAddon),
|
||||
|
||||
get('/core/info', self._coreInfo),
|
||||
get('/supervisor/info', self._supervisorInfo),
|
||||
get('/supervisor/logs', self._supervisorLogs),
|
||||
get('/core/logs', self._coreLogs),
|
||||
get('/debug/insert/backup', self._debug_insert_backup),
|
||||
get('/debug/info', self._debugInfo),
|
||||
post("/debug/mounts", self._setMounts),
|
||||
|
||||
get('/backups', self._getBackups),
|
||||
get('/mounts', self._getMounts),
|
||||
delete('/backups/{slug}', self._deletebackup),
|
||||
post('/backups/new/upload', self._uploadbackup),
|
||||
post('/backups/new/partial', self._newbackup),
|
||||
post('/backups/new/full', self._newbackup),
|
||||
get('/backups/new/full', self._newbackup),
|
||||
get('/backups/{slug}/download', self._backupDownload),
|
||||
get('/backups/{slug}/info', self._backupDetail),
|
||||
get('/debug/backups/lock', self._lock_backups),
|
||||
|
||||
# TODO: remove once the api path is fully deprecated
|
||||
get('/snapshots', self._getSnapshots),
|
||||
post('/snapshots/{slug}/remove', self._deletebackup),
|
||||
post('/snapshots/new/upload', self._uploadbackup),
|
||||
post('/snapshots/new/partial', self._newbackup),
|
||||
post('/snapshots/new/full', self._newbackup),
|
||||
get('/snapshots/new/full', self._newbackup),
|
||||
get('/snapshots/{slug}/download', self._backupDownload),
|
||||
get('/snapshots/{slug}/info', self._backupDetail),
|
||||
]
|
||||
|
||||
def getEvents(self):
|
||||
return self._events.copy()
|
||||
|
||||
def getEntity(self, entity):
|
||||
return self._entities.get(entity)
|
||||
|
||||
def clearEntities(self):
|
||||
self._entities = {}
|
||||
|
||||
def addon(self, slug):
|
||||
for addon in self._addons:
|
||||
if addon["slug"] == slug:
|
||||
return addon
|
||||
return None
|
||||
|
||||
def getAttributes(self, attribute):
|
||||
return self._attributes.get(attribute)
|
||||
|
||||
def getNotification(self):
|
||||
return self._notification
|
||||
|
||||
def _formatErrorResponse(self, error: str) -> str:
|
||||
return json_response({'result': error})
|
||||
|
||||
def _formatDataResponse(self, data: Any) -> Response:
|
||||
return json_response({'result': 'ok', 'data': data})
|
||||
|
||||
async def toggleBlockBackup(self):
|
||||
if self._backup_lock.locked():
|
||||
self._backup_lock.release()
|
||||
else:
|
||||
await self._backup_lock.acquire()
|
||||
|
||||
async def _verifyHeader(self, request) -> bool:
|
||||
if request.headers.get("Authorization", None) == "Bearer " + self._auth_token:
|
||||
return
|
||||
if request.headers.get("X-Supervisor-Token", None) == self._auth_token:
|
||||
return
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
async def _getSnapshots(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse({'snapshots': list(self._backups.values())})
|
||||
|
||||
async def _getBackups(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse({'backups': list(self._backups.values())})
|
||||
|
||||
async def _getMounts(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse(self._mounts)
|
||||
|
||||
async def _setMounts(self, request: Request):
|
||||
self._mounts = await request.json()
|
||||
return self._formatDataResponse({})
|
||||
|
||||
async def _stopAddon(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
slug = request.match_info.get('slug')
|
||||
for addon in self._addons:
|
||||
if addon.get("slug", "") == slug:
|
||||
if addon.get("state") == "started":
|
||||
addon["state"] = "stopped"
|
||||
return self._formatDataResponse({})
|
||||
raise HTTPBadRequest()
|
||||
|
||||
async def _logoAddon(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return FileResponse('hassio-google-drive-backup/backup/static/images/logo.png')
|
||||
|
||||
async def _startAddon(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
slug = request.match_info.get('slug')
|
||||
for addon in self._addons:
|
||||
if addon.get("slug", "") == slug:
|
||||
if addon.get("state") != "started":
|
||||
addon["state"] = "started"
|
||||
return self._formatDataResponse({})
|
||||
raise HTTPBadRequest()
|
||||
|
||||
async def _addonInfo(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
slug = request.match_info.get('slug')
|
||||
for addon in self._addons:
|
||||
if addon.get("slug", "") == slug:
|
||||
return self._formatDataResponse({
|
||||
'boot': addon.get("boot"),
|
||||
'watchdog': addon.get("watchdog"),
|
||||
'state': addon.get("state"),
|
||||
})
|
||||
raise HTTPBadRequest()
|
||||
|
||||
async def _supervisorInfo(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse(
|
||||
{
|
||||
'version': str(self._super_version)
|
||||
}
|
||||
)
|
||||
|
||||
async def _allAddons(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse(
|
||||
{
|
||||
"addons": list(self._addons).copy()
|
||||
}
|
||||
)
|
||||
|
||||
async def _supervisorLogs(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return Response(body=self.generate_random_text(20, 10, 20))
|
||||
|
||||
def generate_random_text(self, line_count, min_words=5, max_words=10):
|
||||
lines = []
|
||||
log_levels = ["WARN", "WARNING", "INFO", "ERROR", "DEBUG"]
|
||||
for _ in range(line_count):
|
||||
level = random.choice(log_levels)
|
||||
word_count = random.randint(min_words, max_words)
|
||||
words = [random.choice(string.ascii_lowercase) for _ in range(word_count)]
|
||||
line = level + " " + ' '.join(''.join(random.choices(string.ascii_lowercase + string.digits, k=random.randint(3, 10))) for _ in words)
|
||||
lines.append(line)
|
||||
return '\n'.join(lines)
|
||||
|
||||
async def _coreLogs(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return Response(body="Core Log line 1\nCore Log Line 2")
|
||||
|
||||
async def _coreInfo(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse(
|
||||
{
|
||||
"version": "1.3.3.7",
|
||||
"last_version": "1.3.3.8",
|
||||
"machine": "VS Dev",
|
||||
"ip_address": "127.0.0.1",
|
||||
"arch": "x86",
|
||||
"image": "image",
|
||||
"custom": "false",
|
||||
"boot": "true",
|
||||
"port": self._ports.server,
|
||||
"ssl": "false",
|
||||
"watchdog": "what is this",
|
||||
"wait_boot": "so many arguments"
|
||||
}
|
||||
)
|
||||
|
||||
async def _internalNewBackup(self, request: Request, input_json, date=None, verify_header=True) -> str:
|
||||
async with self._backup_lock:
|
||||
async with self._backup_inner_lock:
|
||||
if 'wait' in input_json:
|
||||
await sleep(input_json['wait'])
|
||||
if verify_header:
|
||||
await self._verifyHeader(request)
|
||||
slug = self.generateId(8)
|
||||
password = input_json.get('password', None)
|
||||
data = createBackupTar(
|
||||
slug,
|
||||
input_json.get('name', "Default name"),
|
||||
date=date or self._time.now(),
|
||||
padSize=int(random.uniform(self._min_backup_size, self._max_backup_size)),
|
||||
included_folders=input_json.get('folders', None),
|
||||
included_addons=input_json.get('addons', None),
|
||||
password=password)
|
||||
backup_info = parseBackupInfo(data)
|
||||
self._backups[slug] = backup_info
|
||||
self._backup_data[slug] = bytearray(data.getbuffer())
|
||||
return slug
|
||||
|
||||
async def createBackup(self, input_json, date=None):
|
||||
return await self._internalNewBackup(None, input_json, date=date, verify_header=False)
|
||||
|
||||
async def _newbackup(self, request: Request):
|
||||
if self._backup_lock.locked():
|
||||
raise HTTPBadRequest()
|
||||
input_json = await request.json()
|
||||
task = asyncio.shield(asyncio.create_task(self._internalNewBackup(request, input_json)))
|
||||
return self._formatDataResponse({"slug": await task})
|
||||
|
||||
async def _lock_backups(self, request: Request):
|
||||
await self._backup_lock.acquire()
|
||||
return self._formatDataResponse({"message": "locked"})
|
||||
|
||||
async def _uploadbackup(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
try:
|
||||
reader = await request.multipart()
|
||||
contents = await reader.next()
|
||||
received_bytes = bytearray()
|
||||
while True:
|
||||
chunk = await contents.read_chunk()
|
||||
if not chunk:
|
||||
break
|
||||
received_bytes.extend(chunk)
|
||||
info = parseBackupInfo(io.BytesIO(received_bytes))
|
||||
self._backups[info['slug']] = info
|
||||
self._backup_data[info['slug']] = received_bytes
|
||||
return self._formatDataResponse({"slug": info['slug']})
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return self._formatErrorResponse("Bad backup")
|
||||
|
||||
async def _deletebackup(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
slug = request.match_info.get('slug')
|
||||
if slug not in self._backups:
|
||||
raise HTTPNotFound()
|
||||
del self._backups[slug]
|
||||
del self._backup_data[slug]
|
||||
return self._formatDataResponse("deleted")
|
||||
|
||||
async def _backupDetail(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
slug = request.match_info.get('slug')
|
||||
if slug not in self._backups:
|
||||
raise HTTPNotFound()
|
||||
return self._formatDataResponse(self._backups[slug])
|
||||
|
||||
async def _backupDownload(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
slug = request.match_info.get('slug')
|
||||
if slug not in self._backup_data:
|
||||
raise HTTPNotFound()
|
||||
return self.serve_bytes(request, self._backup_data[slug])
|
||||
|
||||
async def _selfInfo(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse({
|
||||
"webui": "http://some/address",
|
||||
'ingress_url': "fill me in later",
|
||||
"slug": self._addon_slug,
|
||||
"options": self._options
|
||||
})
|
||||
|
||||
async def _debugInfo(self, request: Request):
|
||||
return self._formatDataResponse({
|
||||
"config": {
|
||||
" webui": "http://some/address",
|
||||
'ingress_url': "fill me in later",
|
||||
"slug": self._addon_slug,
|
||||
"options": self._options
|
||||
}
|
||||
})
|
||||
|
||||
async def _miscInfo(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
return self._formatDataResponse({
|
||||
"supervisor": "super version",
|
||||
"homeassistant": "ha version",
|
||||
"hassos": "hassos version",
|
||||
"hostname": "hostname",
|
||||
"machine": "machine",
|
||||
"arch": "Arch",
|
||||
"supported_arch": "supported arch",
|
||||
"channel": "channel"
|
||||
})
|
||||
|
||||
def installAddon(self, slug, name, version="v1.0", boot=True, started=True):
|
||||
self._addons.append({
|
||||
"name": 'Name for ' + name,
|
||||
"slug": slug,
|
||||
"description": slug + " description",
|
||||
"version": version,
|
||||
"watchdog": False,
|
||||
"boot": "auto" if boot else "manual",
|
||||
"logo": True,
|
||||
"ingress_entry": "/api/hassio_ingress/" + slug,
|
||||
"state": "started" if started else "stopped"
|
||||
})
|
||||
|
||||
async def _authenticate(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
input_json = await request.json()
|
||||
if input_json.get("username") != self._username or input_json.get("password") != self._password:
|
||||
raise HTTPBadRequest()
|
||||
return self._formatDataResponse({})
|
||||
|
||||
async def _updateOptions(self, request: Request):
|
||||
slug = request.match_info.get('slug')
|
||||
|
||||
if slug == "self":
|
||||
await self._verifyHeader(request)
|
||||
self._options = (await request.json())['options'].copy()
|
||||
else:
|
||||
self.addon(slug).update(await request.json())
|
||||
return self._formatDataResponse({})
|
||||
|
||||
async def _haStateUpdate(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
entity = request.match_info.get('entity')
|
||||
json = await request.json()
|
||||
self._entities[entity] = json['state']
|
||||
self._attributes[entity] = json['attributes']
|
||||
return Response()
|
||||
|
||||
async def _haEventUpdate(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
name = request.match_info.get('name')
|
||||
self._events.append((name, await request.json()))
|
||||
return Response()
|
||||
|
||||
async def _createNotification(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
notification = await request.json()
|
||||
print("Created notification with: {}".format(notification))
|
||||
self._notification = notification.copy()
|
||||
return Response()
|
||||
|
||||
async def _dismissNotification(self, request: Request):
|
||||
await self._verifyHeader(request)
|
||||
print("Dismissed notification with: {}".format(await request.json()))
|
||||
self._notification = None
|
||||
return Response()
|
||||
|
||||
async def _debug_insert_backup(self, request: Request) -> Response:
|
||||
days_back = int(request.query.get("days"))
|
||||
date = self._time.now() - timedelta(days=days_back)
|
||||
name = date.strftime("Full Backup %Y-%m-%d %H:%M-%S")
|
||||
wait = int(request.query.get("wait", 0))
|
||||
slug = await self._internalNewBackup(request, {'name': name, 'wait': wait}, date=date, verify_header=False)
|
||||
return self._formatDataResponse({'slug': slug})
|
||||
165
hassio-google-drive-backup/dev/simulationserver.py
Normal file
165
hassio-google-drive-backup/dev/simulationserver.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import re
|
||||
from typing import Dict
|
||||
from yarl import URL
|
||||
import aiohttp
|
||||
from aiohttp.web import (Application,
|
||||
HTTPException,
|
||||
Request, Response, get,
|
||||
json_response, middleware, post, HTTPSeeOther)
|
||||
from aiohttp.client import ClientSession
|
||||
from injector import inject, singleton, Injector, provider
|
||||
|
||||
from backup.time import Time
|
||||
from backup.logger import getLogger
|
||||
from backup.server import Server
|
||||
from tests.faketime import FakeTime
|
||||
from backup.module import BaseModule
|
||||
from backup.config import Config, Setting
|
||||
from .http_exception import HttpMultiException
|
||||
from .simulated_google import SimulatedGoogle
|
||||
from .base_server import BaseServer
|
||||
from .ports import Ports
|
||||
from .request_interceptor import RequestInterceptor
|
||||
from .simulated_supervisor import SimulatedSupervisor
|
||||
from .apiingress import APIIngress
|
||||
import aiorun
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
mimeTypeQueryPattern = re.compile("^mimeType='.*'$")
|
||||
parentsQueryPattern = re.compile("^'.*' in parents$")
|
||||
bytesPattern = re.compile("^bytes \\d+-\\d+/\\d+$")
|
||||
resumeBytesPattern = re.compile("^bytes \\*/\\d+$")
|
||||
intPattern = re.compile("\\d+")
|
||||
rangePattern = re.compile("bytes=\\d+-\\d+")
|
||||
|
||||
|
||||
@singleton
|
||||
class SimulationServer(BaseServer):
|
||||
@inject
|
||||
def __init__(self, ports: Ports, time: Time, session: ClientSession, authserver: Server, config: Config, google: SimulatedGoogle, supervisor: SimulatedSupervisor, api_ingress: APIIngress, interceptor: RequestInterceptor):
|
||||
self.interceptor = interceptor
|
||||
self.google = google
|
||||
self.supervisor = supervisor
|
||||
self.config = config
|
||||
self.id_counter = 0
|
||||
self.files: Dict[str, bytearray] = {}
|
||||
self._port = ports.server
|
||||
self._time: FakeTime = time
|
||||
self.urls = []
|
||||
self.relative = True
|
||||
self._authserver = authserver
|
||||
self._api_ingress = api_ingress
|
||||
|
||||
def wasUrlRequested(self, pattern):
|
||||
for url in self.urls:
|
||||
if pattern in url:
|
||||
return True
|
||||
return False
|
||||
|
||||
def blockBackups(self):
|
||||
self.block_backups = True
|
||||
|
||||
def unBlockBackups(self):
|
||||
self.block_backups = False
|
||||
|
||||
async def uploadfile(self, request: Request):
|
||||
name: str = str(request.query.get("name", "test"))
|
||||
self.files[name] = await self.readAll(request)
|
||||
return Response(text="")
|
||||
|
||||
async def readFile(self, request: Request):
|
||||
return self.serve_bytes(request, self.files[request.query.get("name", "test")])
|
||||
|
||||
async def slugRedirect(self, request: Request):
|
||||
raise HTTPSeeOther("https://localhost:" + str(self.config.get(Setting.INGRESS_PORT)))
|
||||
|
||||
@middleware
|
||||
async def error_middleware(self, request: Request, handler):
|
||||
self.urls.append(str(request.url))
|
||||
resp = await self.interceptor.checkUrl(request)
|
||||
if resp is not None:
|
||||
return resp
|
||||
try:
|
||||
resp = await handler(request)
|
||||
return resp
|
||||
except Exception as ex:
|
||||
await self.readAll(request)
|
||||
if isinstance(ex, HttpMultiException):
|
||||
return Response(status=ex.status_code)
|
||||
elif isinstance(ex, HTTPException):
|
||||
raise
|
||||
else:
|
||||
logger.printException(ex)
|
||||
return json_response(str(ex), status=500)
|
||||
|
||||
def createApp(self):
|
||||
app = Application(middlewares=[self.error_middleware])
|
||||
app.add_routes(self.routes())
|
||||
self._authserver.buildApp(app)
|
||||
return app
|
||||
|
||||
async def start(self, port):
|
||||
self.runner = aiohttp.web.AppRunner(self.createApp())
|
||||
await self.runner.setup()
|
||||
site = aiohttp.web.TCPSite(self.runner, "0.0.0.0", port=port)
|
||||
await site.start()
|
||||
|
||||
async def stop(self):
|
||||
self.interceptor.stop()
|
||||
await self.runner.shutdown()
|
||||
await self.runner.cleanup()
|
||||
|
||||
def routes(self):
|
||||
return [
|
||||
get('/readfile', self.readFile),
|
||||
post('/uploadfile', self.uploadfile),
|
||||
get('/ingress/self_slug', self.slugRedirect),
|
||||
get('/debug/config', self.debug_config)
|
||||
] + self.google.routes() + self.supervisor.routes() + self._api_ingress.routes()
|
||||
|
||||
async def debug_config(self, request: Request):
|
||||
return json_response(self.supervisor._options)
|
||||
|
||||
|
||||
class SimServerModule(BaseModule):
|
||||
def __init__(self, base_url: URL):
|
||||
super().__init__(override_dns=False)
|
||||
self._base_url = base_url
|
||||
|
||||
@provider
|
||||
@singleton
|
||||
def getConfig(self) -> Config:
|
||||
return Config.withOverrides({
|
||||
Setting.DRIVE_AUTHORIZE_URL: str(self._base_url.with_path("o/oauth2/v2/auth")),
|
||||
Setting.AUTHORIZATION_HOST: str(self._base_url),
|
||||
Setting.TOKEN_SERVER_HOSTS: str(self._base_url),
|
||||
Setting.DRIVE_TOKEN_URL: str(self._base_url.with_path("token")),
|
||||
Setting.DRIVE_DEVICE_CODE_URL: str(self._base_url.with_path("device/code")),
|
||||
Setting.DRIVE_REFRESH_URL: str(self._base_url.with_path("oauth2/v4/token")),
|
||||
Setting.INGRESS_PORT: 56152
|
||||
})
|
||||
|
||||
@provider
|
||||
@singleton
|
||||
def getPorts(self) -> Ports:
|
||||
return Ports(56153, 56151, 56152)
|
||||
|
||||
|
||||
async def main():
|
||||
port = 56153
|
||||
base = URL("http://localhost").with_port(port)
|
||||
injector = Injector(SimServerModule(base))
|
||||
server = injector.get(SimulationServer)
|
||||
|
||||
# start the server
|
||||
runner = aiohttp.web.AppRunner(server.createApp())
|
||||
await runner.setup()
|
||||
site = aiohttp.web.TCPSite(runner, "0.0.0.0", port=port)
|
||||
await site.start()
|
||||
print("Server started on port " + str(port))
|
||||
print("Open a browser at http://localhost:" + str(port))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
aiorun.run(main())
|
||||
18
hassio-google-drive-backup/dev/ssl/fullchain.pem
Normal file
18
hassio-google-drive-backup/dev/ssl/fullchain.pem
Normal file
@@ -0,0 +1,18 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIC5TCCAc2gAwIBAgIJAN+M1w1AVtigMA0GCSqGSIb3DQEBCwUAMBQxEjAQBgNV
|
||||
BAMMCWxvY2FsaG9zdDAeFw0xOTAzMjYwMzI2MDJaFw0xOTA0MjUwMzI2MDJaMBQx
|
||||
EjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
|
||||
ggEBANAa2QE9uHexG6b/ggk7muXB4AhEcpPU+eqGmp4kFx/cKTYe+rPfui4FbARa
|
||||
QyajXrVRMukEs0wZpUJ11LeGOmuTJ1Cu6mKtk4ub35ZrTfY0W0YdTW0ASYifDNQZ
|
||||
pt4S0HAcY9A6wlorADxqDkqBt3cSuXdDaR6wFhc4x2kN7xMcKgX5Exv6AS04ksLm
|
||||
fu0JNSvY1PcLQOA8bFc8tm4eEQcF51xBJBchCcXwpsr5OXt33govGcgxEPLZIueO
|
||||
nmzzbF0jWBzBhwmjGGnEVsHnxgTG59QshFuB2xf6uWuZolLaPg32b2CV4gomFbn1
|
||||
7j4JMFTlxw80OkWILLR6pMr1gy0CAwEAAaM6MDgwFAYDVR0RBA0wC4IJbG9jYWxo
|
||||
b3N0MAsGA1UdDwQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0B
|
||||
AQsFAAOCAQEAeK7VMbYO1lQmQcNIG/X42sS5Dm/YFSKgXG0VNMwjEa0xOPS54a6P
|
||||
a3n7Lb6cVgwSstCSkQa0/Paqy/OvoJlvvgSrV8ZkqwU7100d7gohrReMAhWbRRDK
|
||||
GkiJDUUQLAT8DXLRry2r5zRDaHX8OzzQuF8dPbFVkjXv9EMpBISY0hmodQFxBmiK
|
||||
hxiYQWDcNQOTLwRk/x/b61AFLSXduonWM3r+29e8ej7LEHh9UJeLFF7S0+8t+7W4
|
||||
F8j8rGWFjYa2KCUFgTOWSg1cUnKYqFaakcMQAlfcXCzuDOso/gwuVFeZZ1hY7gEQ
|
||||
OHJt0Tu+PWE4CQ3118AIajj2pxTuEHc6Ow==
|
||||
-----END CERTIFICATE-----
|
||||
19
hassio-google-drive-backup/dev/ssl/localhost-ca-bundle.csr
Normal file
19
hassio-google-drive-backup/dev/ssl/localhost-ca-bundle.csr
Normal file
@@ -0,0 +1,19 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIDAjCCAeoCAQAwgaIxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDTzETMBEGA1UE
|
||||
BwwKU291dGggUGFyazEYMBYGA1UECgwPVW5pdCBUZXN0cyBJbmMuMR4wHAYDVQQL
|
||||
DBVUZXN0aW5nIERlcHQuIEkgZ3Vlc3MxEjAQBgNVBAMMCWxvY2FsaG9zdDEjMCEG
|
||||
CSqGSIb3DQEJARYUc3RlcGhlbkBiZWVjaGVucy5jb20wggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQDCu0+68ol5a9ShDmeg41INbwR0QdG0khlzA54Yhu3t
|
||||
yhEYv7H1XE5JKwSENc1YkBTMlnmbEySW+YMpRXy6R/GoCaNU2wnz6UCdkJQQf6l+
|
||||
xIAkaRB+tj7uPpz65olC6tx5CFD+je/A6ZrHzAoEhiKTsQhI5uxexnl191BIQvcj
|
||||
u7qKaN+TXmvKGlixPrYp4T30EWMDsbONyNjcZr/C4Xs1SzicfscDKt8qiINP8Fgd
|
||||
tBDxyPIa4deYVKHG/1le9L1ccPFy1+wSQQG3d4YED7h94ajc5chmjMkJnTTYlRKL
|
||||
XwMZxcsqX9ngHhPvoB5ZahGOLtjyYpxrvduY4kQ8XSaxAgMBAAGgGjAYBgkqhkiG
|
||||
9w0BCQcxCwwJY2hhbGxlbmdlMA0GCSqGSIb3DQEBCwUAA4IBAQCT+ZSEvz9mJhMA
|
||||
v71WWd+QjTyT4+9SItLVK3EAcpPbbJWayCuD+mKCGQr5plixC3w+tjy4coIG8lUo
|
||||
pCX8sXi7TKMVKw6LYvBJeaRRAJ2+exeAQWJvGtRBBohXzm2+SxJ5Zp5+XEY7L3o8
|
||||
Apk++px7kLQTSRZxFAQ/irL/cUrp5Sn33ago+bzGA2AGryrqfBbe/nCwlCGF6cV2
|
||||
2w9oqY38tPeHQK9+MLOWDE0mBZvu+ab1mpTR7hxFVaVIKOBf8BifSVc4qJ8CDS+l
|
||||
N4vEnxHIGdTXVp6yjpWN86qidjbLBqS6ZvY1dw6uFuXWSZP7gRixJi4/NUCf0NSO
|
||||
yd+jFL0b
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
18
hassio-google-drive-backup/dev/ssl/localhost.crt
Normal file
18
hassio-google-drive-backup/dev/ssl/localhost.crt
Normal file
@@ -0,0 +1,18 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIC8DCCAdigAwIBAgIUUOqXw4hsjBcEzJwlO1o9TYw+f+wwDQYJKoZIhvcNAQEL
|
||||
BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTIwMDIwMzA4MDYyNVoXDTIwMDMw
|
||||
NDA4MDYyNVowFDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEF
|
||||
AAOCAQ8AMIIBCgKCAQEAwrtPuvKJeWvUoQ5noONSDW8EdEHRtJIZcwOeGIbt7coR
|
||||
GL+x9VxOSSsEhDXNWJAUzJZ5mxMklvmDKUV8ukfxqAmjVNsJ8+lAnZCUEH+pfsSA
|
||||
JGkQfrY+7j6c+uaJQurceQhQ/o3vwOmax8wKBIYik7EISObsXsZ5dfdQSEL3I7u6
|
||||
imjfk15ryhpYsT62KeE99BFjA7GzjcjY3Ga/wuF7NUs4nH7HAyrfKoiDT/BYHbQQ
|
||||
8cjyGuHXmFShxv9ZXvS9XHDxctfsEkEBt3eGBA+4feGo3OXIZozJCZ002JUSi18D
|
||||
GcXLKl/Z4B4T76AeWWoRji7Y8mKca73bmOJEPF0msQIDAQABozowODAUBgNVHREE
|
||||
DTALgglsb2NhbGhvc3QwCwYDVR0PBAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMB
|
||||
MA0GCSqGSIb3DQEBCwUAA4IBAQBsZ29ZHTO6yNGPKWpxfOG38Z+mk6eh6TpbIVze
|
||||
b7L2cFr/ONEFyz9hnS3kf23S9VsoX0AMdqYZbGmUT/4+d9+Q8hRXv7W3zenUk4KY
|
||||
SkMfvB3J27w2l9Zx7oYfonBC7SSbfYrCBHgZwsINzdP5aC2q6eFTOadIdcF2bxf9
|
||||
FU/4aUyOeCkHAtYkVyxM3F33Qmf7ym7OZYKLn4SrPLFRSYiWRd8w+ww75uinnS5W
|
||||
bG96OojPYzIZu8rb3b5ISR2BMWP0JVQRdmV+8TG1ekaA6EB5gAven55OxCmIUAJm
|
||||
UEOLPRtVvJN0SE1S6jZBXBHler7IRDKpxATXbdFBK01s4rDz
|
||||
-----END CERTIFICATE-----
|
||||
28
hassio-google-drive-backup/dev/ssl/localhost.key
Normal file
28
hassio-google-drive-backup/dev/ssl/localhost.key
Normal file
@@ -0,0 +1,28 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDCu0+68ol5a9Sh
|
||||
Dmeg41INbwR0QdG0khlzA54Yhu3tyhEYv7H1XE5JKwSENc1YkBTMlnmbEySW+YMp
|
||||
RXy6R/GoCaNU2wnz6UCdkJQQf6l+xIAkaRB+tj7uPpz65olC6tx5CFD+je/A6ZrH
|
||||
zAoEhiKTsQhI5uxexnl191BIQvcju7qKaN+TXmvKGlixPrYp4T30EWMDsbONyNjc
|
||||
Zr/C4Xs1SzicfscDKt8qiINP8FgdtBDxyPIa4deYVKHG/1le9L1ccPFy1+wSQQG3
|
||||
d4YED7h94ajc5chmjMkJnTTYlRKLXwMZxcsqX9ngHhPvoB5ZahGOLtjyYpxrvduY
|
||||
4kQ8XSaxAgMBAAECggEAJ1rt0S2FRSnazjX4EZb/lUFzl/9ZX3ILfKglgnV6jo1B
|
||||
CUxsrdba54SvI/0vpA9ydKqQpxumUHDa5jNp8sfpefmArfyatVXVvkJi+jaizcDu
|
||||
2Oz27XTtoP68gSSoZwLKThe1Ls0GwGk1491DxQhK4qhrsTgiW0EneQTjj8cg5XKH
|
||||
/2l0WDslZDwW8XkJ1iqGi/OPs/X4SHggzX3xEFS2SpDK0e6GovyTfijpaql3MLMR
|
||||
jnEeF69hUKKN7ADxhWvQ8d5C0CICYUzryGScVUs5312Zl83iOoeaixxfh6UaNOmE
|
||||
jjdM6Hc7VbYEcfQTdZXyIPrzcz+Tc0DSDW+QsktLMQKBgQDn7j/oCNqLhxa1XnA8
|
||||
HgQqUUTav/OWlWpieTmcyZ2LkRRw9MJTnP1FIfIvOXplWFSpbSSArAEzsjpjRt0n
|
||||
2+7VxwN3qNirNGAk3PZiRXXHq7sE3z39PhLPthpNisYTDTIx8fcYK032uEPHsSSj
|
||||
i13yKeYqeGOmfnu0nrlmZ9+ThQKBgQDW8MnvhqjMxZDdVdxZKlY/8ihnubVBlp59
|
||||
s2SFIrWD1/QcKawCzagJHe/YR865k3ti7XIBghmKwLSMa6ENdTxTSSLHbBXlXJtH
|
||||
tlWFgfVb8eDi7zo9178W8TrWEB7dSC2F6qMN17wOKWRkyo/c4cYBiAUaNQ1inJjk
|
||||
ACOvHesAPQKBgHXEttKd3EtJNzC1WYxNOZQ7XBkvqwLlr/V81NJWVhdOffC1eA95
|
||||
AeoeyJlOOGZJqgO2Ffj4XkvfzmIm05mvxeDrg0k5hXu5xrAxOzK/ToUrIHXi3dk/
|
||||
sdGjCEwjkVyPMNPHp86v/pCvFEvMGWyqEfQrbmJWa1NZmnsmtcHYMOD5AoGAD1AW
|
||||
Qt9IFVaZ7HraeOvAO0wIPuOHG0Ycwn3OUoHXhq4S8RKy83wtVYDxfmoXOzdbmf+q
|
||||
mJrpMO5rrnlYfvn0M0bJmIWFxdJkKaa+zwUkMsm3qNM8Rf2h2oOTGn8Jg+BJhfni
|
||||
ZfERr7yZL2kS+LyI+8DyBBz1eCoJ5mxwHmC2Rk0CgYBcrhxANSpikw07XLRFcvk9
|
||||
m79qiEThhmiBf1WVZdtWNi9hR+zs6mWrTk8N8jfLzNLLNMPdAAybF8feeMTa9xpS
|
||||
zXF9Gqlayzx/+wyPts7ocrdJKikDVdVZauoxG+mNE87VcVEx87ZiboirQVoKSsxe
|
||||
OmwKminJ/E4GHJCY7RLQAw==
|
||||
-----END PRIVATE KEY-----
|
||||
28
hassio-google-drive-backup/dev/ssl/privkey.pem
Normal file
28
hassio-google-drive-backup/dev/ssl/privkey.pem
Normal file
@@ -0,0 +1,28 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDQGtkBPbh3sRum
|
||||
/4IJO5rlweAIRHKT1PnqhpqeJBcf3Ck2Hvqz37ouBWwEWkMmo161UTLpBLNMGaVC
|
||||
ddS3hjprkydQrupirZOLm9+Wa032NFtGHU1tAEmInwzUGabeEtBwHGPQOsJaKwA8
|
||||
ag5Kgbd3Erl3Q2kesBYXOMdpDe8THCoF+RMb+gEtOJLC5n7tCTUr2NT3C0DgPGxX
|
||||
PLZuHhEHBedcQSQXIQnF8KbK+Tl7d94KLxnIMRDy2SLnjp5s82xdI1gcwYcJoxhp
|
||||
xFbB58YExufULIRbgdsX+rlrmaJS2j4N9m9gleIKJhW59e4+CTBU5ccPNDpFiCy0
|
||||
eqTK9YMtAgMBAAECggEADlvr4UQK+GdGCy3SIST1uSi5dpiSd1TYsa/79zFyTwZ3
|
||||
6X4VuleTlx1UqLA5te7L2CL0KlPiszuJxZ4vwUIHwehzbAPFtG1ZouZsdQqOZJCU
|
||||
Q7A96Wl9qWmgDvp+IxCVRUcQNAv54RLaf1CqD8YHjLXEClCibjWkMJIAYGVPu7ez
|
||||
44sbXenPi+4OfI5IHhhBm+RmXv6QpP/A4OyIg/X35NoIp+z+J/aajFsb6AMvFejU
|
||||
kMCj23PUv4MGA0zrc09UDzM/d7qwCeOMCW0QqKidbkZ+UtY3lsSj7b0l50TTEYsf
|
||||
2sB/xjkUVHg9sJc8ieuf8LaHedvmiQPfECjZU9VhmQKBgQDx0h359EJSvil/iQ4o
|
||||
OrsmxMz40mi/9pwznF0SUuRyKOsmJsSx7zL3rVFo/YLHOE5Ju4PSDm1OL4drUE0z
|
||||
2l/0S6tlN4teHU6x969Xqm2vpwKP3jFXpD0zEi4QRGXgqtY1sVFO4ZIKfTa3KKMu
|
||||
wqNmAB1KczvIkU71ClzqaVUULwKBgQDcTqI1SkwmIGP4PnGbLQTRI8pmw4xx/d7X
|
||||
bpgAeCegSwfCy94nX7TdDYujhxa1rp3ya5YSnkTTN7oGCXIsZkLjmfFmjiIh3uEk
|
||||
YX0obydQvVUfnPTPXQP3QhZG2dQtFdUUJOsu1bJKC7a/jcLGqbJzeBUg/Sb0/gXP
|
||||
KCPCCr5bYwKBgHrbVX94KXoAQvUYnKizrgG0Wq7Pt4hPsmxGNMLqekXFpDJt3+DG
|
||||
tg4/b+z3X0n3wU6UhhRiYAYo/5P16EM/3yAukZWK8rOOED06qUrQu4lSQGr3Z/ou
|
||||
5yjbQ6vgFCJgqRP+UmDRGXFazEGh08Yd/QYFaNw6T1VG/eZgrXQqr57hAoGBALcb
|
||||
qFiQm0ApNc4T4IrwXQuTKtxE9guczUXTxwTE2XKySg4PMmMZehMs+f39/tMdAmyG
|
||||
HWL2JxBDRhtUaJAcosXXorvxsM7kF88MNGGSGWRTKVgwNY3QqsYtKKTU0jRy6/pl
|
||||
QRBZT2mZ2NfXdKd4TjkI+s7DekiwhZWLsETMdzEvAoGARDyJNOpPPm/VpDgV08uU
|
||||
P1yPOT6j8qhQ2dN1mEab0NeyY6HGriUg8y6HJ81Obt4YyVPlEplDJe8TkphWNsby
|
||||
B93FpH56WF4g8ivKD4oC2JghlWf4c0MgxiWyoNvlHSM7Dmq2UfPDyV+1UhnNH1ty
|
||||
CUMs7Fjk4BeJbrYmJf3VxYU=
|
||||
-----END PRIVATE KEY-----
|
||||
Reference in New Issue
Block a user