Skip to content

Commit

Permalink
Merge pull request #936 from sabeechen/heroku
Browse files Browse the repository at this point in the history
Add another instance of the addon token server to heroku
  • Loading branch information
sabeechen authored Nov 12, 2023
2 parents e54d33f + 8238079 commit b2c274c
Show file tree
Hide file tree
Showing 7 changed files with 103 additions and 18 deletions.
19 changes: 18 additions & 1 deletion .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,23 @@ RUN apt-get update
RUN apt-get install fping
# install gcloud api
RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && apt-get update -y && apt-get install google-cloud-cli -y


# Install Docker CE CLI
RUN apt-get update \
&& apt-get install -y apt-transport-https ca-certificates curl gnupg2 lsb-release \
&& curl -fsSL https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/gpg | apt-key add - 2>/dev/null \
&& echo "deb [arch=amd64] https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]') $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list \
&& apt-get update \
&& apt-get install -y docker-ce-cli

# Install Docker Compose
RUN LATEST_COMPOSE_VERSION=$(curl -sSL "https://api.github.com/repos/docker/compose/releases/latest" | grep -o -P '(?<="tag_name": ").+(?=")') \
&& curl -sSL "https://github.com/docker/compose/releases/download/${LATEST_COMPOSE_VERSION}/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose \
&& chmod +x /usr/local/bin/docker-compose

# Install Heoku CLI
RUN curl https://cli-assets.heroku.com/install-ubuntu.sh | sh

# Install app dependencies
COPY requirements-dev.txt ./
RUN pip install --no-cache-dir -r requirements-dev.txt
4 changes: 3 additions & 1 deletion .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
{
"build": { "dockerfile": "Dockerfile" },
"runArgs": ["--init", "--privileged"],
"extensions": ["ms-python.python", "wholroyd.jinja","ms-python.vscode-pylance"],
"forwardPorts": [3000]
"forwardPorts": [3000],
"mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ]
}
2 changes: 1 addition & 1 deletion hassio-google-drive-backup/backup/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ def key(self):

# Remote endpoints
Setting.AUTHORIZATION_HOST: "https://habackup.io",
Setting.TOKEN_SERVER_HOSTS: "https://token1.habackup.io,https://habackup.io",
Setting.TOKEN_SERVER_HOSTS: "https://token2.habackup.io,https://token1.habackup.io,https://habackup.io",
Setting.SUPERVISOR_URL: "",
Setting.SUPERVISOR_TOKEN: "",
Setting.DRIVE_URL: "https://www.googleapis.com",
Expand Down
25 changes: 11 additions & 14 deletions hassio-google-drive-backup/backup/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from yarl import URL
from backup.config import Version
from urllib.parse import unquote
from backup.time import Time

NEW_AUTH_MINIMUM = Version(0, 101, 3)

Expand All @@ -26,7 +27,9 @@ def __init__(self,
config: Config,
exchanger_builder: ClassAssistedBuilder[Exchanger],
logger: CloudLogger,
error_store: ErrorStore):
error_store: ErrorStore,
time: Time):
self._time = time
self.exchanger = exchanger_builder.build(
client_id=config.get(Setting.DEFAULT_DRIVE_CLIENT_ID),
client_secret=config.get(Setting.DEFAULT_DRIVE_CLIENT_SECRET),
Expand Down Expand Up @@ -128,14 +131,6 @@ async def refresh(self, request: Request):
return json_response({
"error": "Couldn't connect to Google's servers"
}, status=503)
except ServerDisconnectedError:
return json_response({
"error": "Couldn't connect to Google's servers"
}, status=503)
except ServerTimeoutError:
return json_response({
"error": "Google's servers timed out"
}, status=503)
except GoogleCredentialsExpired:
return json_response({
"error": "expired"
Expand Down Expand Up @@ -203,16 +198,18 @@ def logError(self, request: Request, exception: Exception):
self.logger.log_struct(data)

def logReport(self, request, report):
data = self.getRequestInfo(request)
data = self.getRequestInfo(request, include_timestamp=True)
data['report'] = report
self.logger.log_struct(data)
self.error_store.store(data)

def getRequestInfo(self, request: Request):
return {
def getRequestInfo(self, request: Request, include_timestamp=False):
data = {
'client': request.headers.get('client', "unknown"),
'version': request.headers.get('addon_version', "unknown"),
'address': request.remote,
'url': str(request.url),
'length': request.content_length
'length': request.content_length,
}
if include_timestamp:
data['server_time'] = self._time.now()
return data
57 changes: 57 additions & 0 deletions hassio-google-drive-backup/dev/error_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import argparse
from google.cloud import firestore
from datetime import datetime, timedelta
DELETE_BATCH_SIZE = 200
STORE_NAME = "error_reports"


def delete_old_data():
# Initialize Firestore
db = firestore.Client()
collection_ref = db.collection(STORE_NAME)

# Define the datetime for one week ago
week_ago = datetime.now() - timedelta(days=7)

# Query to find all documents older than a week
total_deleted = 0
while True:
to_delete = 0
batch = db.batch()
docs = collection_ref.where('server_time', '<', week_ago).stream()
for doc in docs:
to_delete += 1
batch.delete(doc.reference)
if to_delete >= DELETE_BATCH_SIZE:
break
if to_delete > 0:
batch.commit()
total_deleted += to_delete
print(f"Deleted {to_delete} documents ({total_deleted} total)")
else:
break
print(f"Success: All documents older than a week deleted ({total_deleted} total)")


def main():
# Create command line argument parser
parser = argparse.ArgumentParser()

# Add purge argument
parser.add_argument("--purge", help="Delete all documents older than a week.", action="store_true")

# Add any other argument you want in future. For example:
# parser.add_argument("--future_arg", help="Perform some future operation.")

args = parser.parse_args()

# Respond to arguments
if args.purge:
confirm = input('Are you sure you want to delete all documents older than a week? (y/n): ')
if confirm.lower() == 'y':
delete_old_data()
else:
print("Abort: No documents were deleted.")

if __name__ == "__main__":
main()
11 changes: 10 additions & 1 deletion hassio-google-drive-backup/tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
from dev.simulationserver import SimulationServer
from aiohttp import ClientSession, hdrs
from backup.config import Config

from .faketime import FakeTime
import json

@pytest.mark.asyncio
async def test_refresh_known_error(server: SimulationServer, session: ClientSession, config: Config, server_url: URL):
Expand Down Expand Up @@ -48,3 +49,11 @@ async def test_old_auth_method(server: SimulationServer, session: ClientSession,
redirect = URL(r.headers[hdrs.LOCATION])
assert redirect.query.get("creds") is not None
assert redirect.host == "example.com"


async def test_log_to_firestore(time: FakeTime, server: SimulationServer, session: ClientSession, server_url: URL):
data = {"info": "testing"}
async with session.post(server_url.with_path("logerror"), data=json.dumps(data)) as r:
assert r.status == 200
assert server._authserver.error_store.last_error is not None
assert server._authserver.error_store.last_error['report'] == data
3 changes: 3 additions & 0 deletions heroku.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
build:
docker:
web: hassio-google-drive-backup/Dockerfile-server

0 comments on commit b2c274c

Please sign in to comment.